mirror of
https://github.com/FFmpeg/FFmpeg.git
synced 2025-01-03 05:10:03 +02:00
dnn/native: unify error return to DNN_ERROR
Unify all error return as DNN_ERROR, in order to cease model executing when return error in ff_dnn_execute_model_native layer_func.pf_exec Signed-off-by: Ting Fu <ting.fu@intel.com>
This commit is contained in:
parent
0f7a99e37a
commit
230cf9d185
@ -246,10 +246,12 @@ DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *output
|
|||||||
|
|
||||||
for (layer = 0; layer < native_model->layers_num; ++layer){
|
for (layer = 0; layer < native_model->layers_num; ++layer){
|
||||||
DNNLayerType layer_type = native_model->layers[layer].type;
|
DNNLayerType layer_type = native_model->layers[layer].type;
|
||||||
layer_funcs[layer_type].pf_exec(native_model->operands,
|
if (layer_funcs[layer_type].pf_exec(native_model->operands,
|
||||||
native_model->layers[layer].input_operand_indexes,
|
native_model->layers[layer].input_operand_indexes,
|
||||||
native_model->layers[layer].output_operand_index,
|
native_model->layers[layer].output_operand_index,
|
||||||
native_model->layers[layer].params);
|
native_model->layers[layer].params) == DNN_ERROR) {
|
||||||
|
return DNN_ERROR;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (uint32_t i = 0; i < nb_output; ++i) {
|
for (uint32_t i = 0; i < nb_output; ++i) {
|
||||||
|
@ -109,7 +109,7 @@ int dnn_execute_layer_avg_pool(DnnOperand *operands, const int32_t *input_operan
|
|||||||
output_operand->length = calculate_operand_data_length(output_operand);
|
output_operand->length = calculate_operand_data_length(output_operand);
|
||||||
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
||||||
if (!output_operand->data)
|
if (!output_operand->data)
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
output = output_operand->data;
|
output = output_operand->data;
|
||||||
|
|
||||||
for (int y = 0; y < height_end; y += kernel_strides) {
|
for (int y = 0; y < height_end; y += kernel_strides) {
|
||||||
|
@ -114,10 +114,10 @@ int dnn_execute_layer_conv2d(DnnOperand *operands, const int32_t *input_operand_
|
|||||||
output_operand->data_type = operands[input_operand_index].data_type;
|
output_operand->data_type = operands[input_operand_index].data_type;
|
||||||
output_operand->length = calculate_operand_data_length(output_operand);
|
output_operand->length = calculate_operand_data_length(output_operand);
|
||||||
if (output_operand->length <= 0)
|
if (output_operand->length <= 0)
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
||||||
if (!output_operand->data)
|
if (!output_operand->data)
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
output = output_operand->data;
|
output = output_operand->data;
|
||||||
|
|
||||||
av_assert0(channel == conv_params->input_num);
|
av_assert0(channel == conv_params->input_num);
|
||||||
|
@ -76,10 +76,10 @@ int dnn_execute_layer_depth2space(DnnOperand *operands, const int32_t *input_ope
|
|||||||
output_operand->data_type = operands[input_operand_index].data_type;
|
output_operand->data_type = operands[input_operand_index].data_type;
|
||||||
output_operand->length = calculate_operand_data_length(output_operand);
|
output_operand->length = calculate_operand_data_length(output_operand);
|
||||||
if (output_operand->length <= 0)
|
if (output_operand->length <= 0)
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
||||||
if (!output_operand->data)
|
if (!output_operand->data)
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
output = output_operand->data;
|
output = output_operand->data;
|
||||||
|
|
||||||
for (y = 0; y < height; ++y){
|
for (y = 0; y < height; ++y){
|
||||||
|
@ -186,6 +186,6 @@ int dnn_execute_layer_math_binary(DnnOperand *operands, const int32_t *input_ope
|
|||||||
math_binary_not_commutative(floormod, params, input, output, operands, input_operand_indexes);
|
math_binary_not_commutative(floormod, params, input, output, operands, input_operand_indexes);
|
||||||
return 0;
|
return 0;
|
||||||
default:
|
default:
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,6 +143,6 @@ int dnn_execute_layer_math_unary(DnnOperand *operands, const int32_t *input_oper
|
|||||||
dst[i] = round(src[i]);
|
dst[i] = round(src[i]);
|
||||||
return 0;
|
return 0;
|
||||||
default:
|
default:
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -112,10 +112,10 @@ int dnn_execute_layer_pad(DnnOperand *operands, const int32_t *input_operand_ind
|
|||||||
output_operand->data_type = operands[input_operand_index].data_type;
|
output_operand->data_type = operands[input_operand_index].data_type;
|
||||||
output_operand->length = calculate_operand_data_length(output_operand);
|
output_operand->length = calculate_operand_data_length(output_operand);
|
||||||
if (output_operand->length <= 0)
|
if (output_operand->length <= 0)
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
output_operand->data = av_realloc(output_operand->data, output_operand->length);
|
||||||
if (!output_operand->data)
|
if (!output_operand->data)
|
||||||
return -1;
|
return DNN_ERROR;
|
||||||
output = output_operand->data;
|
output = output_operand->data;
|
||||||
|
|
||||||
// copy the original data
|
// copy the original data
|
||||||
|
Loading…
Reference in New Issue
Block a user