summaryrefslogtreecommitdiff
path: root/libavfilter/dnn
diff options
context:
space:
mode:
authorShubhanshu Saxena <shubhanshu.e01@gmail.com>2021-06-05 23:38:04 +0530
committerGuo Yejun <yejun.guo@intel.com>2021-06-12 15:18:58 +0800
commit9675ebbb91891c826eeef065fd8a87d732f73ed0 (patch)
tree8e1eca0da490f1313fdd87f8f5dbb8bdba3d259c /libavfilter/dnn
parent446b4f77c106add0f6db4c0ffad1642d0920d6aa (diff)
downloadffmpeg-9675ebbb91891c826eeef065fd8a87d732f73ed0.tar.gz
lavfi/dnn: Add nb_output to TaskItem
Add nb_output property to TaskItem for use in TensorFlow backend and Native backend. Signed-off-by: Shubhanshu Saxena <shubhanshu.e01@gmail.com>
Diffstat (limited to 'libavfilter/dnn')
-rw-r--r--libavfilter/dnn/dnn_backend_common.h1
-rw-r--r--libavfilter/dnn/dnn_backend_openvino.c3
2 files changed, 4 insertions, 0 deletions
diff --git a/libavfilter/dnn/dnn_backend_common.h b/libavfilter/dnn/dnn_backend_common.h
index f76a05026d..704cf921f1 100644
--- a/libavfilter/dnn/dnn_backend_common.h
+++ b/libavfilter/dnn/dnn_backend_common.h
@@ -35,6 +35,7 @@ typedef struct TaskItem {
const char **output_names;
int async;
int do_ioproc;
+ uint32_t nb_output;
uint32_t inference_todo;
uint32_t inference_done;
} TaskItem;
diff --git a/libavfilter/dnn/dnn_backend_openvino.c b/libavfilter/dnn/dnn_backend_openvino.c
index 0f3b235820..c2487c35be 100644
--- a/libavfilter/dnn/dnn_backend_openvino.c
+++ b/libavfilter/dnn/dnn_backend_openvino.c
@@ -678,6 +678,7 @@ static DNNReturnType get_output_ov(void *model, const char *input_name, int inpu
task.in_frame = in_frame;
task.output_names = &output_name;
task.out_frame = out_frame;
+ task.nb_output = 1;
task.model = ov_model;
if (extract_inference_from_task(ov_model->model->func_type, &task, ov_model->inference_queue, NULL) != DNN_SUCCESS) {
@@ -798,6 +799,7 @@ DNNReturnType ff_dnn_execute_model_ov(const DNNModel *model, DNNExecBaseParams *
task.in_frame = exec_params->in_frame;
task.output_names = &exec_params->output_names[0];
task.out_frame = exec_params->out_frame ? exec_params->out_frame : exec_params->in_frame;
+ task.nb_output = exec_params->nb_output;
task.model = ov_model;
if (extract_inference_from_task(ov_model->model->func_type, &task, ov_model->inference_queue, exec_params) != DNN_SUCCESS) {
@@ -845,6 +847,7 @@ DNNReturnType ff_dnn_execute_model_async_ov(const DNNModel *model, DNNExecBasePa
task->in_frame = exec_params->in_frame;
task->output_names = &exec_params->output_names[0];
task->out_frame = exec_params->out_frame ? exec_params->out_frame : exec_params->in_frame;
+ task->nb_output = exec_params->nb_output;
task->model = ov_model;
if (ff_queue_push_back(ov_model->task_queue, task) < 0) {
av_freep(&task);