dnn/native: unify error return to DNN_ERROR

Unify all error return as DNN_ERROR, in order to cease model executing
when return error in ff_dnn_execute_model_native layer_func.pf_exec

Signed-off-by: Ting Fu <ting.fu@intel.com>
pull/350/head
Ting Fu 4 years ago committed by Guo, Yejun
parent 0f7a99e37a
commit 230cf9d185
  1. 10
      libavfilter/dnn/dnn_backend_native.c
  2. 2
      libavfilter/dnn/dnn_backend_native_layer_avgpool.c
  3. 4
      libavfilter/dnn/dnn_backend_native_layer_conv2d.c
  4. 4
      libavfilter/dnn/dnn_backend_native_layer_depth2space.c
  5. 2
      libavfilter/dnn/dnn_backend_native_layer_mathbinary.c
  6. 2
      libavfilter/dnn/dnn_backend_native_layer_mathunary.c
  7. 4
      libavfilter/dnn/dnn_backend_native_layer_pad.c

@ -246,10 +246,12 @@ DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *output
for (layer = 0; layer < native_model->layers_num; ++layer){
DNNLayerType layer_type = native_model->layers[layer].type;
layer_funcs[layer_type].pf_exec(native_model->operands,
native_model->layers[layer].input_operand_indexes,
native_model->layers[layer].output_operand_index,
native_model->layers[layer].params);
if (layer_funcs[layer_type].pf_exec(native_model->operands,
native_model->layers[layer].input_operand_indexes,
native_model->layers[layer].output_operand_index,
native_model->layers[layer].params) == DNN_ERROR) {
return DNN_ERROR;
}
}
for (uint32_t i = 0; i < nb_output; ++i) {

@ -109,7 +109,7 @@ int dnn_execute_layer_avg_pool(DnnOperand *operands, const int32_t *input_operan
output_operand->length = calculate_operand_data_length(output_operand);
output_operand->data = av_realloc(output_operand->data, output_operand->length);
if (!output_operand->data)
return -1;
return DNN_ERROR;
output = output_operand->data;
for (int y = 0; y < height_end; y += kernel_strides) {

@ -114,10 +114,10 @@ int dnn_execute_layer_conv2d(DnnOperand *operands, const int32_t *input_operand_
output_operand->data_type = operands[input_operand_index].data_type;
output_operand->length = calculate_operand_data_length(output_operand);
if (output_operand->length <= 0)
return -1;
return DNN_ERROR;
output_operand->data = av_realloc(output_operand->data, output_operand->length);
if (!output_operand->data)
return -1;
return DNN_ERROR;
output = output_operand->data;
av_assert0(channel == conv_params->input_num);

@ -76,10 +76,10 @@ int dnn_execute_layer_depth2space(DnnOperand *operands, const int32_t *input_ope
output_operand->data_type = operands[input_operand_index].data_type;
output_operand->length = calculate_operand_data_length(output_operand);
if (output_operand->length <= 0)
return -1;
return DNN_ERROR;
output_operand->data = av_realloc(output_operand->data, output_operand->length);
if (!output_operand->data)
return -1;
return DNN_ERROR;
output = output_operand->data;
for (y = 0; y < height; ++y){

@ -186,6 +186,6 @@ int dnn_execute_layer_math_binary(DnnOperand *operands, const int32_t *input_ope
math_binary_not_commutative(floormod, params, input, output, operands, input_operand_indexes);
return 0;
default:
return -1;
return DNN_ERROR;
}
}

@ -143,6 +143,6 @@ int dnn_execute_layer_math_unary(DnnOperand *operands, const int32_t *input_oper
dst[i] = round(src[i]);
return 0;
default:
return -1;
return DNN_ERROR;
}
}

@ -112,10 +112,10 @@ int dnn_execute_layer_pad(DnnOperand *operands, const int32_t *input_operand_ind
output_operand->data_type = operands[input_operand_index].data_type;
output_operand->length = calculate_operand_data_length(output_operand);
if (output_operand->length <= 0)
return -1;
return DNN_ERROR;
output_operand->data = av_realloc(output_operand->data, output_operand->length);
if (!output_operand->data)
return -1;
return DNN_ERROR;
output = output_operand->data;
// copy the original data

Loading…
Cancel
Save