Skip to content

Commit

Permalink
Fix a bug where we previously don't save previous hook, hence causing…
Browse files Browse the repository at this point in the history
… issues when absorb new model.
  • Loading branch information
liuliu committed Feb 7, 2024
1 parent cef13b9 commit 5d5b124
Show file tree
Hide file tree
Showing 8 changed files with 52 additions and 34 deletions.
10 changes: 5 additions & 5 deletions lib/nnc/ccv_cnnp_model.c
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ void ccv_cnnp_model_absorb(ccv_cnnp_model_t* const model, ccv_cnnp_model_t* cons
ccv_cnnp_compiled_data_t* const compiled_data = model->compiled_data;
init->graph = ccv_nnc_symbolic_graph_new();
ccv_array_t* const stack = ccv_array_new(sizeof(int), 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(init->graph, _ccv_cnnp_graph_push_graph_exec_symbol, stack);
ccv_nnc_graph_exec_symbol_new_hook(init->graph, _ccv_cnnp_graph_push_graph_exec_symbol, stack, 0);
_ccv_cnnp_model_compile(init, inputs, input_size, compiled_data->loss);
init->parallel_count = model->parallel_count;
init->memory_compression = model->memory_compression;
Expand All @@ -420,7 +420,7 @@ void ccv_cnnp_model_absorb(ccv_cnnp_model_t* const model, ccv_cnnp_model_t* cons
init->compiled_data->minimize.max_saved_aux_size = model->compiled_data->minimize.max_saved_aux_size;
if (model->compiled_data->gradient_mode != CCV_CNNP_COMPILED_DATA_GRADIENT_NONE)
_ccv_cnnp_model_gradient_init(init, model->compiled_data->gradient_mode, model->compiled_data->disable_outgrad, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(init->graph, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(init->graph, 0, 0, 0);
ccv_nnc_symbolic_graph_tensor_auto(init->graph, TRAVERSE_FULL);
int i, j;
// Verify parameters, internals and saved_aux in both graph has the same dimensionality.
Expand Down Expand Up @@ -991,9 +991,9 @@ static void _ccv_cnnp_model_set_rewindables(ccv_cnnp_model_t* const model)
assert(compiled_data);
if (!compiled_data->rewindables)
compiled_data->rewindables = ccv_array_new(sizeof(ccv_cnnp_rewind_symbol_t), 0, 0);
ccv_nnc_tensor_symbol_new_hook(model->graph, _ccv_cnnp_model_tensor_symbol_new_hook, compiled_data->rewindables);
ccv_nnc_tensor_symbol_alias_new_hook(model->graph, _ccv_cnnp_model_tensor_symbol_alias_new_hook, compiled_data->rewindables);
ccv_nnc_graph_exec_symbol_new_hook(model->graph, _ccv_cnnp_model_graph_exec_symbol_new_hook, compiled_data->rewindables);
ccv_nnc_tensor_symbol_new_hook(model->graph, _ccv_cnnp_model_tensor_symbol_new_hook, compiled_data->rewindables, 0);
ccv_nnc_tensor_symbol_alias_new_hook(model->graph, _ccv_cnnp_model_tensor_symbol_alias_new_hook, compiled_data->rewindables, 0);
ccv_nnc_graph_exec_symbol_new_hook(model->graph, _ccv_cnnp_model_graph_exec_symbol_new_hook, compiled_data->rewindables, 0);
}

static void _ccv_cnnp_model_gradient_init(ccv_cnnp_model_t* const model, const int gradient_mode, const uint64_t disable_outgrad, ccv_nnc_tensor_t* const* const fits, const int fit_size)
Expand Down
2 changes: 1 addition & 1 deletion lib/nnc/ccv_cnnp_model_addons.c
Original file line number Diff line number Diff line change
Expand Up @@ -3568,7 +3568,7 @@ ccv_cnnp_model_t* ccv_cnnp_move(const char* const name)
{
ccv_cnnp_model_move_t* const model_move = (ccv_cnnp_model_move_t*)cccalloc(1, sizeof(ccv_cnnp_model_move_t));
model_move->super.isa = &ccv_cnnp_move_isa;
model_move->super.input_size = 0;
model_move->super.input_size = 2;
model_move->super.outputs = &model_move->output;
model_move->super.output_size = 1;
ccv_cnnp_model_copy_name(&model_move->super, name);
Expand Down
18 changes: 14 additions & 4 deletions lib/nnc/ccv_cnnp_model_core.c
Original file line number Diff line number Diff line change
Expand Up @@ -182,10 +182,18 @@ static void _ccv_cnnp_functional_model_deinit(ccv_cnnp_model_t* const super)

KHASH_MAP_INIT_INT64(io_node, ccv_array_t*)

typedef struct {
ccv_array_t* nodes;
ccv_nnc_graph_exec_symbol_new_hook_f previous_func;
void* previous_context;
} ccv_functional_model_build_node_hook_t;

static void _ccv_cnnp_functional_model_build_node_new(void* context, const ccv_nnc_graph_exec_symbol_t symbol, const ccv_nnc_cmd_t cmd, const ccv_nnc_tensor_symbol_t* const inputs, const int input_size, const ccv_nnc_tensor_symbol_t* const outputs, const int output_size, const char* const name)
{
ccv_array_t* const nodes = (ccv_array_t*)context;
ccv_array_push(nodes, &symbol);
ccv_functional_model_build_node_hook_t* const hook = (ccv_functional_model_build_node_hook_t*)context;
ccv_array_push(hook->nodes, &symbol);
if (hook->previous_func)
hook->previous_func(hook->previous_context, symbol, cmd, inputs, input_size, outputs, output_size, name);
}

static void _ccv_cnnp_functional_model_build(ccv_cnnp_model_t* const super, ccv_nnc_symbolic_graph_t* const graph, const ccv_nnc_tensor_symbol_t* const inputs, const int input_size, ccv_nnc_tensor_symbol_t* const outputs, const int output_size)
Expand Down Expand Up @@ -238,6 +246,7 @@ static void _ccv_cnnp_functional_model_build(ccv_cnnp_model_t* const super, ccv_
}
// Go through each sub model to build the graph.
ccv_array_t* nodes;
ccv_functional_model_build_node_hook_t hook;
const ccv_array_t* const dependencies = self->sequence[i]->dependencies;
if ((dependencies && dependencies->rnum > 0) || self->sequence[i]->dependents > 0)
{
Expand All @@ -247,13 +256,14 @@ static void _ccv_cnnp_functional_model_build(ccv_cnnp_model_t* const super, ccv_
nodes = kh_val(io_node_map, k) = ccv_array_new(sizeof(ccv_nnc_graph_exec_symbol_t), 1, 0);
else
nodes = kh_val(io_node_map, k);
ccv_nnc_graph_exec_symbol_new_hook(graph, _ccv_cnnp_functional_model_build_node_new, nodes);
hook.nodes = nodes;
hook.previous_context = ccv_nnc_graph_exec_symbol_new_hook(graph, _ccv_cnnp_functional_model_build_node_new, &hook, &hook.previous_func);
}
sub_model->data = self->super.data;
ccv_cnnp_model_build(sub_model, graph, (ccv_nnc_tensor_symbol_t*)ccv_array_get(input_symbols, 0), input_symbols->rnum, self->sequence[i]->outputs, sub_model->output_size);
if ((dependencies && dependencies->rnum > 0) || self->sequence[i]->dependents > 0)
{
ccv_nnc_graph_exec_symbol_new_hook(graph, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(graph, hook.previous_func, hook.previous_context, 0);
if (dependencies)
for (j = 0; j < dependencies->rnum; j++)
{
Expand Down
8 changes: 5 additions & 3 deletions lib/nnc/ccv_nnc.h
Original file line number Diff line number Diff line change
Expand Up @@ -1991,8 +1991,9 @@ typedef void(*ccv_nnc_tensor_symbol_new_hook_f)(void* context, const ccv_nnc_ten
* @param graph The symbolic graph.
* @param hook The function to be called if a new tensor symbol created.
* @param context The context associated with the callback function.
* @param previous_hook Return the previous hook if provided.
*/
void* ccv_nnc_tensor_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_new_hook_f hook, void* context);
void* ccv_nnc_tensor_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_new_hook_f hook, void* context, ccv_nnc_tensor_symbol_new_hook_f* previous_hook);
/**
* Function prototype for tensor symbol alias creation callback.
*/
Expand All @@ -2002,8 +2003,9 @@ typedef void(*ccv_nnc_tensor_symbol_alias_new_hook_f)(void* context, const ccv_n
* @param graph The symbolic graph.
* @param hook The function to be called if a new tensor symbol alias created.
* @param context The context associated with the callback function.
* @param previous_hook The function to be called if a new tensor symbol alias created.
*/
void* ccv_nnc_tensor_symbol_alias_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_alias_new_hook_f hook, void* context);
void* ccv_nnc_tensor_symbol_alias_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_alias_new_hook_f hook, void* context, ccv_nnc_tensor_symbol_alias_new_hook_f* previous_hook);
/**
* Set the pair reference for tensor symbols. Peer reference for tensor symbols has very specific meanings.
* For a backward pass involves sub-graphs. The commands in the sub-graph could reference to tensor symbols of
Expand All @@ -2024,7 +2026,7 @@ typedef void(*ccv_nnc_graph_exec_symbol_new_hook_f)(void* context, const ccv_nnc
* @param hook The function to be called if a new execution node symbol created.
* @param context The context associated with the callback function.
*/
void* ccv_nnc_graph_exec_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_graph_exec_symbol_new_hook_f hook, void* context);
void* ccv_nnc_graph_exec_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_graph_exec_symbol_new_hook_f hook, void* context, ccv_nnc_graph_exec_symbol_new_hook_f* previous_hook);
/**
* Set the pair reference for exec. This is very similar to the one for concrete graph. A pair reference
* of a backward pass execution node is its forward pass counterpart.
Expand Down
12 changes: 6 additions & 6 deletions lib/nnc/ccv_nnc_dynamic_graph_apply_gradients.c
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ void ccv_nnc_dynamic_graph_apply_gradients(ccv_nnc_dynamic_graph_t* const dynami
ccv_nnc_graph_exec_symbol_t minimizes[parameter_size];
ccv_nnc_tensor_variable_t freeables[parameter_size + saved_aux_size];
ccv_array_t* const symbol_stack = ccv_array_new(sizeof(ccv_nnc_tape_symbol_t), 1, 0);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol, symbol_stack);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol_alias, symbol_stack);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_graph_exec_symbol, symbol_stack);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol, symbol_stack, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol_alias, symbol_stack, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_graph_exec_symbol, symbol_stack, 0);
ccv_array_t* const tensor_binds = ccv_array_new(sizeof(ccv_nnc_tensor_bind_t), parameter_size * 3 + saved_aux_size * 2, 0);
const int parallel_count = ccv_max(parallel, 1);
const int per_parameter_size = parameter_size / parallel_count;
Expand Down Expand Up @@ -194,9 +194,9 @@ void ccv_nnc_dynamic_graph_apply_gradients(ccv_nnc_dynamic_graph_t* const dynami
parallel_count > 1 ? allreduces : sources, parallel_count > 1 ? per_parameter_size : parameter_size,
minimizes, parameter_size,
&graph, &tensor_arena, &exec_arena);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_array_free(tensor_binds);
// Remove newly added symbols to restore the graph.
for (i = 0; i < symbol_stack->rnum; i++)
Expand Down
12 changes: 6 additions & 6 deletions lib/nnc/ccv_nnc_dynamic_graph_backward.c
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,9 @@ void ccv_nnc_dynamic_graph_backward(ccv_nnc_dynamic_graph_t* const dynamic_graph
for (i = 0; i < input_size; i++)
input_symbols[i] = inputs[i]->symbol;
ccv_array_t* const symbol_stack = ccv_array_new(sizeof(ccv_nnc_tape_symbol_t), 1, 0);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol, symbol_stack);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol_alias, symbol_stack);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_graph_exec_symbol, symbol_stack);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol, symbol_stack, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol_alias, symbol_stack, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_graph_exec_symbol, symbol_stack, 0);
ccv_nnc_symbolic_graph_backward(dynamic_graph->tape,
f_symbols, f_variable_size, input_symbols, input_size,
(ccv_nnc_graph_exec_symbol_t*)ccv_array_get(sources, 0), sources->rnum,
Expand Down Expand Up @@ -325,9 +325,9 @@ void ccv_nnc_dynamic_graph_backward(ccv_nnc_dynamic_graph_t* const dynamic_graph
ccv_array_push(destinations, &destination);
}
// Remove the hook only at this point.
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_nnc_dy_xpu_alloc_t xpu_alloc = {
.xpu_alloc = &dynamic_graph->xpu_alloc,
.stream = stream_context
Expand Down
12 changes: 6 additions & 6 deletions lib/nnc/ccv_nnc_dynamic_graph_minimize.c
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,9 @@ void ccv_nnc_dynamic_graph_minimize(ccv_nnc_dynamic_graph_t* const dynamic_graph
for (i = 0; i < parameter_size; i++)
parameter_symbols[i] = parameters[i]->symbol;
ccv_array_t* const symbol_stack = ccv_array_new(sizeof(ccv_nnc_tape_symbol_t), 1, 0);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol, symbol_stack);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol_alias, symbol_stack);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_graph_exec_symbol, symbol_stack);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol, symbol_stack, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_tensor_symbol_alias, symbol_stack, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, ccv_nnc_dynamic_graph_push_backward_graph_exec_symbol, symbol_stack, 0);
ccv_nnc_tensor_symbol_t updated_parameter_symbols[parameter_size];
const int saved_aux_size = parameter_size * ccv_nnc_minimizer_saved_aux_size(minimizer);
ccv_nnc_tensor_symbol_map_t saved_aux_symbols[saved_aux_size];
Expand Down Expand Up @@ -124,9 +124,9 @@ void ccv_nnc_dynamic_graph_minimize(ccv_nnc_dynamic_graph_t* const dynamic_graph
}
}
}
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, 0, 0);
ccv_nnc_tensor_symbol_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_nnc_tensor_symbol_alias_new_hook(dynamic_graph->tape, 0, 0, 0);
ccv_nnc_graph_exec_symbol_new_hook(dynamic_graph->tape, 0, 0, 0);
// Bind generated tensors.
ccv_array_t* const tensor_binds = ccv_array_new(sizeof(ccv_nnc_tensor_bind_t), dynamic_graph->vars->rnum + 2, 0);
for (i = 0; i < dynamic_graph->vars->rnum; i++)
Expand Down
12 changes: 9 additions & 3 deletions lib/nnc/ccv_nnc_symbolic_graph.c
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,10 @@ ccv_nnc_tensor_symbol_t ccv_nnc_tensor_symbol_new(ccv_nnc_symbolic_graph_t* cons
return symbol;
}

void* ccv_nnc_tensor_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_new_hook_f hook, void* context)
void* ccv_nnc_tensor_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_new_hook_f hook, void* context, ccv_nnc_tensor_symbol_new_hook_f* previous_hook)
{
if (previous_hook)
*previous_hook = graph->hooks.tensor_symbol_new.func;
void* const prev = graph->hooks.tensor_symbol_new.context;
graph->hooks.tensor_symbol_new.func = hook;
graph->hooks.tensor_symbol_new.context = context;
Expand Down Expand Up @@ -253,8 +255,10 @@ ccv_nnc_tensor_symbol_t ccv_nnc_tensor_symbol_alias_new(ccv_nnc_symbolic_graph_t
return alias;
}

void* ccv_nnc_tensor_symbol_alias_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_alias_new_hook_f hook, void* context)
void* ccv_nnc_tensor_symbol_alias_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_tensor_symbol_alias_new_hook_f hook, void* context, ccv_nnc_tensor_symbol_alias_new_hook_f* previous_hook)
{
if (previous_hook)
*previous_hook = graph->hooks.tensor_symbol_alias_new.func;
void* const prev = graph->hooks.tensor_symbol_alias_new.context;
graph->hooks.tensor_symbol_alias_new.func = hook;
graph->hooks.tensor_symbol_alias_new.context = context;
Expand Down Expand Up @@ -895,8 +899,10 @@ ccv_nnc_graph_exec_symbol_t ccv_nnc_graph_exec_symbol_new(ccv_nnc_symbolic_graph
return symbol;
}

void* ccv_nnc_graph_exec_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_graph_exec_symbol_new_hook_f hook, void* context)
void* ccv_nnc_graph_exec_symbol_new_hook(ccv_nnc_symbolic_graph_t* const graph, ccv_nnc_graph_exec_symbol_new_hook_f hook, void* context, ccv_nnc_graph_exec_symbol_new_hook_f* previous_hook)
{
if (previous_hook)
*previous_hook = graph->hooks.graph_exec_symbol_new.func;
void* const prev = graph->hooks.graph_exec_symbol_new.context;
graph->hooks.graph_exec_symbol_new.func = hook;
graph->hooks.graph_exec_symbol_new.context = context;
Expand Down

0 comments on commit 5d5b124

Please sign in to comment.