From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: from ffbox0-bg.mplayerhq.hu (ffbox0-bg.ffmpeg.org [79.124.17.100]) by master.gitmailbox.com (Postfix) with ESMTP id C0C8F45436 for ; Sun, 30 Apr 2023 15:39:35 +0000 (UTC) Received: from [127.0.1.1] (localhost [127.0.0.1]) by ffbox0-bg.mplayerhq.hu (Postfix) with ESMTP id A305C68C073; Sun, 30 Apr 2023 18:39:31 +0300 (EEST) Received: from out203-205-251-59.mail.qq.com (out203-205-251-59.mail.qq.com [203.205.251.59]) by ffbox0-bg.mplayerhq.hu (Postfix) with ESMTPS id E0DAA68BECA for ; Sun, 30 Apr 2023 18:39:24 +0300 (EEST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=foxmail.com; s=s201512; t=1682869155; bh=mH/JM2Zr9nINJxTzQKlpbjh+NMeQ6RHWL7+1YqRqwoI=; h=From:To:Cc:Subject:Date; b=L4oIdN9NctFmJmFTpsrE5H3/blWLl3m2ZSs0Tvnnv8xkw4gK8TrNh0ijiE2qNwwgG cq/T9bvs/vBnwOuoHGgci1dZdfSpnuF+ZKWQzrVzOKoVrC3mMVipB/AlD+y9VIsHK1 w2bElThcfLYS5ZILCOCdGLlQpV9u+iRVEYcqwgH0= Received: from localhost.localdomain ([240e:446:1f30:84b4:ad7d:93a:9264:72d4]) by newxmesmtplogicsvrszb1-0.qq.com (NewEsmtp) with SMTP id 9C494E77; Sun, 30 Apr 2023 23:39:04 +0800 X-QQ-mid: xmsmtpt1682869144t6lkw8duo Message-ID: X-QQ-XMAILINFO: NY/MPejODIJVF8laCCgeMTi5JYUemqbXMvWsb9QoWeZHPicS+wsRmGNf7ulcB1 PDj/GE9w09R07mLojnm1QPdycMV9mBWpIWQFG9Fvl+LJ44S/cfNsQ6f5ohnoczgM4Q5Z7xtAKPxP pv8SRN2sxoI+MIZrqVlzQsvCGLzgS5otvQKPTmPfoopmW3WoR2yxM9PR86GGyo7aDB2q/D0Ir11F X/FiGP7L9lTxYjBhzsurpNUJ71VA0DF7qpptfRICIhYe8KFwiBfQTfc9W1KrlhOFfbM5Ybc++J/h 5iRUwbOrm0SSvNx6fZAaLlNXh9LO/ryVNiF2HEQCrwSqON8pqfpiJYWsN+zsoY82bofOwwycxn7h JQb/YYMzXrs+XWJRSOC/lTT+o1rkOnJxMe9CUleKTFynl8lLKYYMdcthbovKKmM1DYLHPRjZBQQC bd2QbZgUXOVdzkvg8F9EcsSZ7Xf7VGw+ZwGJhn2eTVWcCjS4HXiB4/ZNpo+ODLtUN0KvoniAytSE sQcaT2SFiHpnOEVYl+s7/qeL6m1f6XPQBfW0C0yhuiImpKqIC969dKS6O/K49MtS2sFSJ4PDT2mt U10vsyIfYoc4iWnHPdcTQV8fGqRrY6jOF1jORYJQH14INVLBQDsKM88pkKZxe9g7Sy73B2MOdzr8 Nhgh5G0PxVN4mzVR9Ghfyj4aQdyv9mlOIJo23FkbLkY7LB5smyPFhwezyraF6nqFXVeNbsuMNJgn GpgjyklFXFmgNgyLjXqb3SPoGySKZKfgdQ2SY5Q75v+66Q4LybA9JlGMhdJQzUws3W3M5WebkXdh ytjQKRwYxYb7os9WM0SuqGxjGvntwpxDfdY1uKr2sEKqLiRg+m7E7XeHKyaW5tRVi72qd5w2Pym2 yOoISFnHP34y8UGKZ8reV6X8UvNQfi3Dvi0IeAKxzQrS0K1Cgj0k2Ee3LNUCJV4Qo47KEpYm5Pym Kd/fkp7C+a8nr3UKchng== From: Zhao Zhili To: ffmpeg-devel@ffmpeg.org Date: Sun, 30 Apr 2023 23:38:53 +0800 X-OQ-MSGID: <20230430153902.48405-1-quinkblack@foxmail.com> X-Mailer: git-send-email 2.40.1 MIME-Version: 1.0 Subject: [FFmpeg-devel] [PATCH 01/10] avfilter/dnn: define each backend as a DNNModule X-BeenThere: ffmpeg-devel@ffmpeg.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: FFmpeg development discussions and patches List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Reply-To: FFmpeg development discussions and patches Cc: Zhao Zhili Content-Type: text/plain; charset="us-ascii" Content-Transfer-Encoding: 7bit Errors-To: ffmpeg-devel-bounces@ffmpeg.org Sender: "ffmpeg-devel" Archived-At: List-Archive: List-Post: From: Zhao Zhili To avoid export multiple functions for each backend implementation. Signed-off-by: Zhao Zhili --- libavfilter/dnn/dnn_backend_openvino.c | 98 +++++++++++++----------- libavfilter/dnn/dnn_backend_openvino.h | 40 ---------- libavfilter/dnn/dnn_backend_tf.c | 101 +++++++++++++------------ libavfilter/dnn/dnn_backend_tf.h | 40 ---------- libavfilter/dnn/dnn_interface.c | 17 ++--- 5 files changed, 112 insertions(+), 184 deletions(-) delete mode 100644 libavfilter/dnn/dnn_backend_openvino.h delete mode 100644 libavfilter/dnn/dnn_backend_tf.h diff --git a/libavfilter/dnn/dnn_backend_openvino.c b/libavfilter/dnn/dnn_backend_openvino.c index b67f288336..7db2e7a10f 100644 --- a/libavfilter/dnn/dnn_backend_openvino.c +++ b/libavfilter/dnn/dnn_backend_openvino.c @@ -23,7 +23,6 @@ * DNN OpenVINO backend implementation. */ -#include "dnn_backend_openvino.h" #include "dnn_io_proc.h" #include "libavformat/avio.h" #include "libavutil/avassert.h" @@ -293,6 +292,46 @@ static void infer_completion_callback(void *args) } } +static void dnn_free_model_ov(DNNModel **model) +{ + if (*model){ + OVModel *ov_model = (*model)->model; + while (ff_safe_queue_size(ov_model->request_queue) != 0) { + OVRequestItem *item = ff_safe_queue_pop_front(ov_model->request_queue); + if (item && item->infer_request) { + ie_infer_request_free(&item->infer_request); + } + av_freep(&item->lltasks); + av_freep(&item); + } + ff_safe_queue_destroy(ov_model->request_queue); + + while (ff_queue_size(ov_model->lltask_queue) != 0) { + LastLevelTaskItem *item = ff_queue_pop_front(ov_model->lltask_queue); + av_freep(&item); + } + ff_queue_destroy(ov_model->lltask_queue); + + while (ff_queue_size(ov_model->task_queue) != 0) { + TaskItem *item = ff_queue_pop_front(ov_model->task_queue); + av_frame_free(&item->in_frame); + av_frame_free(&item->out_frame); + av_freep(&item); + } + ff_queue_destroy(ov_model->task_queue); + + if (ov_model->exe_network) + ie_exec_network_free(&ov_model->exe_network); + if (ov_model->network) + ie_network_free(&ov_model->network); + if (ov_model->core) + ie_core_free(&ov_model->core); + av_freep(&ov_model); + av_freep(model); + } +} + + static int init_model_ov(OVModel *ov_model, const char *input_name, const char *output_name) { int ret = 0; @@ -438,7 +477,7 @@ static int init_model_ov(OVModel *ov_model, const char *input_name, const char * return 0; err: - ff_dnn_free_model_ov(&ov_model->model); + dnn_free_model_ov(&ov_model->model); return ret; } @@ -721,7 +760,7 @@ err: return ret; } -DNNModel *ff_dnn_load_model_ov(const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx) +static DNNModel *dnn_load_model_ov(const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx) { DNNModel *model = NULL; OVModel *ov_model = NULL; @@ -806,11 +845,11 @@ DNNModel *ff_dnn_load_model_ov(const char *model_filename, DNNFunctionType func_ return model; err: - ff_dnn_free_model_ov(&model); + dnn_free_model_ov(&model); return NULL; } -int ff_dnn_execute_model_ov(const DNNModel *model, DNNExecBaseParams *exec_params) +static int dnn_execute_model_ov(const DNNModel *model, DNNExecBaseParams *exec_params) { OVModel *ov_model = model->model; OVContext *ctx = &ov_model->ctx; @@ -893,13 +932,13 @@ int ff_dnn_execute_model_ov(const DNNModel *model, DNNExecBaseParams *exec_param } } -DNNAsyncStatusType ff_dnn_get_result_ov(const DNNModel *model, AVFrame **in, AVFrame **out) +static DNNAsyncStatusType dnn_get_result_ov(const DNNModel *model, AVFrame **in, AVFrame **out) { OVModel *ov_model = model->model; return ff_dnn_get_result_common(ov_model->task_queue, in, out); } -int ff_dnn_flush_ov(const DNNModel *model) +static int dnn_flush_ov(const DNNModel *model) { OVModel *ov_model = model->model; OVContext *ctx = &ov_model->ctx; @@ -937,41 +976,10 @@ int ff_dnn_flush_ov(const DNNModel *model) return 0; } -void ff_dnn_free_model_ov(DNNModel **model) -{ - if (*model){ - OVModel *ov_model = (*model)->model; - while (ff_safe_queue_size(ov_model->request_queue) != 0) { - OVRequestItem *item = ff_safe_queue_pop_front(ov_model->request_queue); - if (item && item->infer_request) { - ie_infer_request_free(&item->infer_request); - } - av_freep(&item->lltasks); - av_freep(&item); - } - ff_safe_queue_destroy(ov_model->request_queue); - - while (ff_queue_size(ov_model->lltask_queue) != 0) { - LastLevelTaskItem *item = ff_queue_pop_front(ov_model->lltask_queue); - av_freep(&item); - } - ff_queue_destroy(ov_model->lltask_queue); - - while (ff_queue_size(ov_model->task_queue) != 0) { - TaskItem *item = ff_queue_pop_front(ov_model->task_queue); - av_frame_free(&item->in_frame); - av_frame_free(&item->out_frame); - av_freep(&item); - } - ff_queue_destroy(ov_model->task_queue); - - if (ov_model->exe_network) - ie_exec_network_free(&ov_model->exe_network); - if (ov_model->network) - ie_network_free(&ov_model->network); - if (ov_model->core) - ie_core_free(&ov_model->core); - av_freep(&ov_model); - av_freep(model); - } -} +const DNNModule ff_dnn_backend_openvino = { + .load_model = dnn_load_model_ov, + .execute_model = dnn_execute_model_ov, + .get_result = dnn_get_result_ov, + .flush = dnn_flush_ov, + .free_model = dnn_free_model_ov, +}; diff --git a/libavfilter/dnn/dnn_backend_openvino.h b/libavfilter/dnn/dnn_backend_openvino.h deleted file mode 100644 index 304bc96b99..0000000000 --- a/libavfilter/dnn/dnn_backend_openvino.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2020 - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * DNN inference functions interface for OpenVINO backend. - */ - - -#ifndef AVFILTER_DNN_DNN_BACKEND_OPENVINO_H -#define AVFILTER_DNN_DNN_BACKEND_OPENVINO_H - -#include "../dnn_interface.h" - -DNNModel *ff_dnn_load_model_ov(const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx); - -int ff_dnn_execute_model_ov(const DNNModel *model, DNNExecBaseParams *exec_params); -DNNAsyncStatusType ff_dnn_get_result_ov(const DNNModel *model, AVFrame **in, AVFrame **out); -int ff_dnn_flush_ov(const DNNModel *model); - -void ff_dnn_free_model_ov(DNNModel **model); - -#endif diff --git a/libavfilter/dnn/dnn_backend_tf.c b/libavfilter/dnn/dnn_backend_tf.c index 486d2405b8..e6ebd17595 100644 --- a/libavfilter/dnn/dnn_backend_tf.c +++ b/libavfilter/dnn/dnn_backend_tf.c @@ -23,7 +23,6 @@ * DNN tensorflow backend implementation. */ -#include "dnn_backend_tf.h" #include "libavformat/avio.h" #include "libavutil/avassert.h" #include "libavutil/avstring.h" @@ -477,7 +476,48 @@ static int load_tf_model(TFModel *tf_model, const char *model_filename) #define NAME_BUFFER_SIZE 256 -DNNModel *ff_dnn_load_model_tf(const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx) +static void dnn_free_model_tf(DNNModel **model) +{ + TFModel *tf_model; + + if (*model){ + tf_model = (*model)->model; + while (ff_safe_queue_size(tf_model->request_queue) != 0) { + TFRequestItem *item = ff_safe_queue_pop_front(tf_model->request_queue); + destroy_request_item(&item); + } + ff_safe_queue_destroy(tf_model->request_queue); + + while (ff_queue_size(tf_model->lltask_queue) != 0) { + LastLevelTaskItem *item = ff_queue_pop_front(tf_model->lltask_queue); + av_freep(&item); + } + ff_queue_destroy(tf_model->lltask_queue); + + while (ff_queue_size(tf_model->task_queue) != 0) { + TaskItem *item = ff_queue_pop_front(tf_model->task_queue); + av_frame_free(&item->in_frame); + av_frame_free(&item->out_frame); + av_freep(&item); + } + ff_queue_destroy(tf_model->task_queue); + + if (tf_model->graph){ + TF_DeleteGraph(tf_model->graph); + } + if (tf_model->session){ + TF_CloseSession(tf_model->session, tf_model->status); + TF_DeleteSession(tf_model->session, tf_model->status); + } + if (tf_model->status){ + TF_DeleteStatus(tf_model->status); + } + av_freep(&tf_model); + av_freep(model); + } +} + +static DNNModel *dnn_load_model_tf(const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx) { DNNModel *model = NULL; TFModel *tf_model = NULL; @@ -567,7 +607,7 @@ DNNModel *ff_dnn_load_model_tf(const char *model_filename, DNNFunctionType func_ return model; err: - ff_dnn_free_model_tf(&model); + dnn_free_model_tf(&model); return NULL; } @@ -765,11 +805,11 @@ err: if (ff_safe_queue_push_back(tf_model->request_queue, request) < 0) { destroy_request_item(&request); } - ff_dnn_free_model_tf(&tf_model->model); + dnn_free_model_tf(&tf_model->model); return ret; } -int ff_dnn_execute_model_tf(const DNNModel *model, DNNExecBaseParams *exec_params) +static int dnn_execute_model_tf(const DNNModel *model, DNNExecBaseParams *exec_params) { TFModel *tf_model = model->model; TFContext *ctx = &tf_model->ctx; @@ -817,13 +857,13 @@ int ff_dnn_execute_model_tf(const DNNModel *model, DNNExecBaseParams *exec_param return execute_model_tf(request, tf_model->lltask_queue); } -DNNAsyncStatusType ff_dnn_get_result_tf(const DNNModel *model, AVFrame **in, AVFrame **out) +static DNNAsyncStatusType dnn_get_result_tf(const DNNModel *model, AVFrame **in, AVFrame **out) { TFModel *tf_model = model->model; return ff_dnn_get_result_common(tf_model->task_queue, in, out); } -int ff_dnn_flush_tf(const DNNModel *model) +static int dnn_flush_tf(const DNNModel *model) { TFModel *tf_model = model->model; TFContext *ctx = &tf_model->ctx; @@ -853,43 +893,10 @@ int ff_dnn_flush_tf(const DNNModel *model) return ff_dnn_start_inference_async(ctx, &request->exec_module); } -void ff_dnn_free_model_tf(DNNModel **model) -{ - TFModel *tf_model; - - if (*model){ - tf_model = (*model)->model; - while (ff_safe_queue_size(tf_model->request_queue) != 0) { - TFRequestItem *item = ff_safe_queue_pop_front(tf_model->request_queue); - destroy_request_item(&item); - } - ff_safe_queue_destroy(tf_model->request_queue); - - while (ff_queue_size(tf_model->lltask_queue) != 0) { - LastLevelTaskItem *item = ff_queue_pop_front(tf_model->lltask_queue); - av_freep(&item); - } - ff_queue_destroy(tf_model->lltask_queue); - - while (ff_queue_size(tf_model->task_queue) != 0) { - TaskItem *item = ff_queue_pop_front(tf_model->task_queue); - av_frame_free(&item->in_frame); - av_frame_free(&item->out_frame); - av_freep(&item); - } - ff_queue_destroy(tf_model->task_queue); - - if (tf_model->graph){ - TF_DeleteGraph(tf_model->graph); - } - if (tf_model->session){ - TF_CloseSession(tf_model->session, tf_model->status); - TF_DeleteSession(tf_model->session, tf_model->status); - } - if (tf_model->status){ - TF_DeleteStatus(tf_model->status); - } - av_freep(&tf_model); - av_freep(model); - } -} +const DNNModule ff_dnn_backend_tf = { + .load_model = dnn_load_model_tf, + .execute_model = dnn_execute_model_tf, + .get_result = dnn_get_result_tf, + .flush = dnn_flush_tf, + .free_model = dnn_free_model_tf, +}; diff --git a/libavfilter/dnn/dnn_backend_tf.h b/libavfilter/dnn/dnn_backend_tf.h deleted file mode 100644 index 0b63a4b6d2..0000000000 --- a/libavfilter/dnn/dnn_backend_tf.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2018 Sergey Lavrushkin - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * DNN inference functions interface for TensorFlow backend. - */ - - -#ifndef AVFILTER_DNN_DNN_BACKEND_TF_H -#define AVFILTER_DNN_DNN_BACKEND_TF_H - -#include "../dnn_interface.h" - -DNNModel *ff_dnn_load_model_tf(const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx); - -int ff_dnn_execute_model_tf(const DNNModel *model, DNNExecBaseParams *exec_params); -DNNAsyncStatusType ff_dnn_get_result_tf(const DNNModel *model, AVFrame **in, AVFrame **out); -int ff_dnn_flush_tf(const DNNModel *model); - -void ff_dnn_free_model_tf(DNNModel **model); - -#endif diff --git a/libavfilter/dnn/dnn_interface.c b/libavfilter/dnn/dnn_interface.c index 5b1695a1dd..4f78f35474 100644 --- a/libavfilter/dnn/dnn_interface.c +++ b/libavfilter/dnn/dnn_interface.c @@ -24,10 +24,11 @@ */ #include "../dnn_interface.h" -#include "dnn_backend_tf.h" -#include "dnn_backend_openvino.h" #include "libavutil/mem.h" +extern const DNNModule ff_dnn_backend_openvino; +extern const DNNModule ff_dnn_backend_tf; + DNNModule *ff_get_dnn_module(DNNBackendType backend_type) { DNNModule *dnn_module; @@ -40,11 +41,7 @@ DNNModule *ff_get_dnn_module(DNNBackendType backend_type) switch(backend_type){ case DNN_TF: #if (CONFIG_LIBTENSORFLOW == 1) - dnn_module->load_model = &ff_dnn_load_model_tf; - dnn_module->execute_model = &ff_dnn_execute_model_tf; - dnn_module->get_result = &ff_dnn_get_result_tf; - dnn_module->flush = &ff_dnn_flush_tf; - dnn_module->free_model = &ff_dnn_free_model_tf; + *dnn_module = ff_dnn_backend_tf; #else av_freep(&dnn_module); return NULL; @@ -52,11 +49,7 @@ DNNModule *ff_get_dnn_module(DNNBackendType backend_type) break; case DNN_OV: #if (CONFIG_LIBOPENVINO == 1) - dnn_module->load_model = &ff_dnn_load_model_ov; - dnn_module->execute_model = &ff_dnn_execute_model_ov; - dnn_module->get_result = &ff_dnn_get_result_ov; - dnn_module->flush = &ff_dnn_flush_ov; - dnn_module->free_model = &ff_dnn_free_model_ov; + *dnn_module = ff_dnn_backend_openvino; #else av_freep(&dnn_module); return NULL; -- 2.40.1 _______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-devel To unsubscribe, visit link above, or email ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".