1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2025-08-10 06:10:52 +02:00

dnn: add backend options when load the model

different backend might need different options for a better performance,
so, add the parameter into dnn interface, as a preparation.

Signed-off-by: Guo, Yejun <yejun.guo@intel.com>
This commit is contained in:
Guo, Yejun
2020-08-07 14:32:55 +08:00
parent 4ed6bca4ae
commit 0a51abe8ab
10 changed files with 17 additions and 10 deletions

View File

@@ -115,7 +115,7 @@ static DNNReturnType set_input_output_native(void *model, DNNData *input, const
// layers_num,layer_type,layer_parameterss,layer_type,layer_parameters... // layers_num,layer_type,layer_parameterss,layer_type,layer_parameters...
// For CONV layer: activation_function, input_num, output_num, kernel_size, kernel, biases // For CONV layer: activation_function, input_num, output_num, kernel_size, kernel, biases
// For DEPTH_TO_SPACE layer: block_size // For DEPTH_TO_SPACE layer: block_size
DNNModel *ff_dnn_load_model_native(const char *model_filename) DNNModel *ff_dnn_load_model_native(const char *model_filename, const char *options)
{ {
DNNModel *model = NULL; DNNModel *model = NULL;
char header_expected[] = "FFMPEGDNNNATIVE"; char header_expected[] = "FFMPEGDNNNATIVE";
@@ -245,6 +245,7 @@ DNNModel *ff_dnn_load_model_native(const char *model_filename)
model->set_input_output = &set_input_output_native; model->set_input_output = &set_input_output_native;
model->get_input = &get_input_native; model->get_input = &get_input_native;
model->options = options;
return model; return model;

View File

@@ -116,7 +116,7 @@ typedef struct ConvolutionalNetwork{
uint32_t nb_output; uint32_t nb_output;
} ConvolutionalNetwork; } ConvolutionalNetwork;
DNNModel *ff_dnn_load_model_native(const char *model_filename); DNNModel *ff_dnn_load_model_native(const char *model_filename, const char *options);
DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *outputs, uint32_t nb_output); DNNReturnType ff_dnn_execute_model_native(const DNNModel *model, DNNData *outputs, uint32_t nb_output);

View File

@@ -155,7 +155,7 @@ err:
return DNN_ERROR; return DNN_ERROR;
} }
DNNModel *ff_dnn_load_model_ov(const char *model_filename) DNNModel *ff_dnn_load_model_ov(const char *model_filename, const char *options)
{ {
DNNModel *model = NULL; DNNModel *model = NULL;
OVModel *ov_model = NULL; OVModel *ov_model = NULL;
@@ -186,6 +186,7 @@ DNNModel *ff_dnn_load_model_ov(const char *model_filename)
model->model = (void *)ov_model; model->model = (void *)ov_model;
model->set_input_output = &set_input_output_ov; model->set_input_output = &set_input_output_ov;
model->get_input = &get_input_ov; model->get_input = &get_input_ov;
model->options = options;
return model; return model;

View File

@@ -29,7 +29,7 @@
#include "../dnn_interface.h" #include "../dnn_interface.h"
DNNModel *ff_dnn_load_model_ov(const char *model_filename); DNNModel *ff_dnn_load_model_ov(const char *model_filename, const char *options);
DNNReturnType ff_dnn_execute_model_ov(const DNNModel *model, DNNData *outputs, uint32_t nb_output); DNNReturnType ff_dnn_execute_model_ov(const DNNModel *model, DNNData *outputs, uint32_t nb_output);

View File

@@ -572,7 +572,7 @@ static DNNReturnType load_native_model(TFModel *tf_model, const char *model_file
return DNN_SUCCESS; return DNN_SUCCESS;
} }
DNNModel *ff_dnn_load_model_tf(const char *model_filename) DNNModel *ff_dnn_load_model_tf(const char *model_filename, const char *options)
{ {
DNNModel *model = NULL; DNNModel *model = NULL;
TFModel *tf_model = NULL; TFModel *tf_model = NULL;
@@ -600,6 +600,7 @@ DNNModel *ff_dnn_load_model_tf(const char *model_filename)
model->model = (void *)tf_model; model->model = (void *)tf_model;
model->set_input_output = &set_input_output_tf; model->set_input_output = &set_input_output_tf;
model->get_input = &get_input_tf; model->get_input = &get_input_tf;
model->options = options;
return model; return model;
} }

View File

@@ -29,7 +29,7 @@
#include "../dnn_interface.h" #include "../dnn_interface.h"
DNNModel *ff_dnn_load_model_tf(const char *model_filename); DNNModel *ff_dnn_load_model_tf(const char *model_filename, const char *options);
DNNReturnType ff_dnn_execute_model_tf(const DNNModel *model, DNNData *outputs, uint32_t nb_output); DNNReturnType ff_dnn_execute_model_tf(const DNNModel *model, DNNData *outputs, uint32_t nb_output);

View File

@@ -43,6 +43,8 @@ typedef struct DNNData{
typedef struct DNNModel{ typedef struct DNNModel{
// Stores model that can be different for different backends. // Stores model that can be different for different backends.
void *model; void *model;
// Stores options when the model is executed by the backend
const char *options;
// Gets model input information // Gets model input information
// Just reuse struct DNNData here, actually the DNNData.data field is not needed. // Just reuse struct DNNData here, actually the DNNData.data field is not needed.
DNNReturnType (*get_input)(void *model, DNNData *input, const char *input_name); DNNReturnType (*get_input)(void *model, DNNData *input, const char *input_name);
@@ -54,7 +56,7 @@ typedef struct DNNModel{
// Stores pointers to functions for loading, executing, freeing DNN models for one of the backends. // Stores pointers to functions for loading, executing, freeing DNN models for one of the backends.
typedef struct DNNModule{ typedef struct DNNModule{
// Loads model and parameters from given file. Returns NULL if it is not possible. // Loads model and parameters from given file. Returns NULL if it is not possible.
DNNModel *(*load_model)(const char *model_filename); DNNModel *(*load_model)(const char *model_filename, const char *options);
// Executes model with specified input and output. Returns DNN_ERROR otherwise. // Executes model with specified input and output. Returns DNN_ERROR otherwise.
DNNReturnType (*execute_model)(const DNNModel *model, DNNData *outputs, uint32_t nb_output); DNNReturnType (*execute_model)(const DNNModel *model, DNNData *outputs, uint32_t nb_output);
// Frees memory allocated for model. // Frees memory allocated for model.

View File

@@ -161,7 +161,7 @@ static av_cold int init(AVFilterContext *ctx)
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
dr_context->model = (dr_context->dnn_module->load_model)(dr_context->model_filename); dr_context->model = (dr_context->dnn_module->load_model)(dr_context->model_filename, NULL);
if (!dr_context->model) { if (!dr_context->model) {
av_log(ctx, AV_LOG_ERROR, "could not load DNN model\n"); av_log(ctx, AV_LOG_ERROR, "could not load DNN model\n");
return AVERROR(EINVAL); return AVERROR(EINVAL);

View File

@@ -41,6 +41,7 @@ typedef struct DnnProcessingContext {
DNNBackendType backend_type; DNNBackendType backend_type;
char *model_inputname; char *model_inputname;
char *model_outputname; char *model_outputname;
char *backend_options;
DNNModule *dnn_module; DNNModule *dnn_module;
DNNModel *model; DNNModel *model;
@@ -69,6 +70,7 @@ static const AVOption dnn_processing_options[] = {
{ "model", "path to model file", OFFSET(model_filename), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS }, { "model", "path to model file", OFFSET(model_filename), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
{ "input", "input name of the model", OFFSET(model_inputname), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS }, { "input", "input name of the model", OFFSET(model_inputname), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
{ "output", "output name of the model", OFFSET(model_outputname), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS }, { "output", "output name of the model", OFFSET(model_outputname), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
{ "options", "backend options", OFFSET(backend_options), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },
{ NULL } { NULL }
}; };
@@ -101,7 +103,7 @@ static av_cold int init(AVFilterContext *context)
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
ctx->model = (ctx->dnn_module->load_model)(ctx->model_filename); ctx->model = (ctx->dnn_module->load_model)(ctx->model_filename, ctx->backend_options);
if (!ctx->model) { if (!ctx->model) {
av_log(ctx, AV_LOG_ERROR, "could not load DNN model\n"); av_log(ctx, AV_LOG_ERROR, "could not load DNN model\n");
return AVERROR(EINVAL); return AVERROR(EINVAL);

View File

@@ -81,7 +81,7 @@ static av_cold int init(AVFilterContext *context)
av_log(context, AV_LOG_ERROR, "load_model for network was not specified\n"); av_log(context, AV_LOG_ERROR, "load_model for network was not specified\n");
return AVERROR(EIO); return AVERROR(EIO);
} }
sr_context->model = (sr_context->dnn_module->load_model)(sr_context->model_filename); sr_context->model = (sr_context->dnn_module->load_model)(sr_context->model_filename, NULL);
if (!sr_context->model){ if (!sr_context->model){
av_log(context, AV_LOG_ERROR, "could not load DNN model\n"); av_log(context, AV_LOG_ERROR, "could not load DNN model\n");
return AVERROR(EIO); return AVERROR(EIO);