FFmpeg
dnn_backend_common.h
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * DNN common functions different backends.
22  */
23 
24 #ifndef AVFILTER_DNN_DNN_BACKEND_COMMON_H
25 #define AVFILTER_DNN_DNN_BACKEND_COMMON_H
26 
27 #include "queue.h"
28 #include "../dnn_interface.h"
29 #include "libavutil/thread.h"
30 
31 #define DNN_BACKEND_COMMON_OPTIONS \
32  { "nireq", "number of request", OFFSET(options.nireq), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS }, \
33  { "async", "use DNN async inference", OFFSET(options.async), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
34 
35 // one task for one function call from dnn interface
36 typedef struct TaskItem {
37  void *model; // model for the backend
40  const char *input_name;
41  const char **output_names;
42  uint8_t async;
43  uint8_t do_ioproc;
44  uint32_t nb_output;
45  uint32_t inference_todo;
46  uint32_t inference_done;
47 } TaskItem;
48 
49 // one task might have multiple inferences
50 typedef struct LastLevelTaskItem {
52  uint32_t bbox_index;
54 
55 /**
56  * Common Async Execution Mechanism for the DNN Backends.
57  */
58 typedef struct DNNAsyncExecModule {
59  /**
60  * Synchronous inference function for the backend
61  * with corresponding request item as the argument.
62  */
63  DNNReturnType (*start_inference)(void *request);
64 
65  /**
66  * Completion Callback for the backend.
67  * Expected argument type of callback must match that
68  * of the inference function.
69  */
70  void (*callback)(void *args);
71 
72  /**
73  * Argument for the execution functions.
74  * i.e. Request item for the backend.
75  */
76  void *args;
77 #if HAVE_PTHREAD_CANCEL
78  pthread_t thread_id;
79  pthread_attr_t thread_attr;
80 #endif
82 
83 int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func_type, DNNExecBaseParams *exec_params);
84 
85 /**
86  * Fill the Task for Backend Execution. It should be called after
87  * checking execution parameters using ff_check_exec_params.
88  *
89  * @param task pointer to the allocated task
90  * @param exec_param pointer to execution parameters
91  * @param backend_model void pointer to the backend model
92  * @param async flag for async execution. Must be 0 or 1
93  * @param do_ioproc flag for IO processing. Must be 0 or 1
94  *
95  * @retval DNN_SUCCESS if successful
96  * @retval DNN_ERROR if flags are invalid or any parameter is NULL
97  */
98 DNNReturnType ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int async, int do_ioproc);
99 
100 /**
101  * Join the Async Execution thread and set module pointers to NULL.
102  *
103  * @param async_module pointer to DNNAsyncExecModule module
104  *
105  * @retval DNN_SUCCESS if successful
106  * @retval DNN_ERROR if async_module is NULL
107  */
109 
110 /**
111  * Start asynchronous inference routine for the TensorFlow
112  * model on a detached thread. It calls the completion callback
113  * after the inference completes. Completion callback and inference
114  * function must be set before calling this function.
115  *
116  * If POSIX threads aren't supported, the execution rolls back
117  * to synchronous mode, calling completion callback after inference.
118  *
119  * @param ctx pointer to the backend context
120  * @param async_module pointer to DNNAsyncExecModule module
121  *
122  * @retval DNN_SUCCESS on the start of async inference.
123  * @retval DNN_ERROR in case async inference cannot be started
124  */
126 
127 /**
128  * Extract input and output frame from the Task Queue after
129  * asynchronous inference.
130  *
131  * @param task_queue pointer to the task queue of the backend
132  * @param in double pointer to the input frame
133  * @param out double pointer to the output frame
134  *
135  * @retval DAST_EMPTY_QUEUE if task queue is empty
136  * @retval DAST_NOT_READY if inference not completed yet.
137  * @retval DAST_SUCCESS if result successfully extracted
138  */
140 
141 /**
142  * Allocate input and output frames and fill the Task
143  * with execution parameters.
144  *
145  * @param task pointer to the allocated task
146  * @param exec_params pointer to execution parameters
147  * @param backend_model void pointer to the backend model
148  * @param input_height height of input frame
149  * @param input_width width of input frame
150  * @param ctx pointer to the backend context
151  *
152  * @retval DNN_SUCCESS if successful
153  * @retval DNN_ERROR if allocation fails
154  */
155 DNNReturnType ff_dnn_fill_gettingoutput_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int input_height, int input_width, void *ctx);
156 
157 #endif
out
FILE * out
Definition: movenc.c:54
thread.h
DNNAsyncExecModule
Common Async Execution Mechanism for the DNN Backends.
Definition: dnn_backend_common.h:58
DNNFunctionType
DNNFunctionType
Definition: dnn_interface.h:52
LastLevelTaskItem
Definition: dnn_backend_common.h:50
LastLevelTaskItem::bbox_index
uint32_t bbox_index
Definition: dnn_backend_common.h:52
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:317
ff_dnn_fill_task
DNNReturnType ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int async, int do_ioproc)
Fill the Task for Backend Execution.
Definition: dnn_backend_common.c:56
TaskItem
Definition: dnn_backend_common.h:36
DNNAsyncExecModule::callback
void(* callback)(void *args)
Completion Callback for the backend.
Definition: dnn_backend_common.h:70
TaskItem::model
void * model
Definition: dnn_backend_common.h:37
Queue
Linear double-ended data structure.
Definition: queue.c:33
LastLevelTaskItem::task
TaskItem * task
Definition: dnn_backend_common.h:51
pthread_attr_t
void pthread_attr_t
Definition: os2threads.h:51
DNNReturnType
DNNReturnType
Definition: dnn_interface.h:33
ctx
AVFormatContext * ctx
Definition: movenc.c:48
TaskItem::inference_todo
uint32_t inference_todo
Definition: dnn_backend_common.h:45
TaskItem::in_frame
AVFrame * in_frame
Definition: dnn_backend_common.h:38
TaskItem::async
uint8_t async
Definition: dnn_backend_common.h:42
TaskItem::inference_done
uint32_t inference_done
Definition: dnn_backend_common.h:46
DNNBackendType
DNNBackendType
Definition: dnn_interface.h:35
queue.h
DNNAsyncExecModule::start_inference
DNNReturnType(* start_inference)(void *request)
Synchronous inference function for the backend with corresponding request item as the argument.
Definition: dnn_backend_common.h:63
pthread_t
Definition: os2threads.h:44
ff_check_exec_params
int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func_type, DNNExecBaseParams *exec_params)
Definition: dnn_backend_common.c:29
ff_dnn_async_module_cleanup
DNNReturnType ff_dnn_async_module_cleanup(DNNAsyncExecModule *async_module)
Join the Async Execution thread and set module pointers to NULL.
Definition: dnn_backend_common.c:92
ff_dnn_get_result_common
DNNAsyncStatusType ff_dnn_get_result_common(Queue *task_queue, AVFrame **in, AVFrame **out)
Extract input and output frame from the Task Queue after asynchronous inference.
Definition: dnn_backend_common.c:141
DNNAsyncExecModule::args
void * args
Argument for the execution functions.
Definition: dnn_backend_common.h:76
TaskItem::output_names
const char ** output_names
Definition: dnn_backend_common.h:41
TaskItem::out_frame
AVFrame * out_frame
Definition: dnn_backend_common.h:39
TaskItem::input_name
const char * input_name
Definition: dnn_backend_common.h:40
DNNExecBaseParams
Definition: dnn_interface.h:67
TaskItem::do_ioproc
uint8_t do_ioproc
Definition: dnn_backend_common.h:43
DNNAsyncStatusType
DNNAsyncStatusType
Definition: dnn_interface.h:45
ff_dnn_start_inference_async
DNNReturnType ff_dnn_start_inference_async(void *ctx, DNNAsyncExecModule *async_module)
Start asynchronous inference routine for the TensorFlow model on a detached thread.
Definition: dnn_backend_common.c:111
ff_dnn_fill_gettingoutput_task
DNNReturnType ff_dnn_fill_gettingoutput_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int input_height, int input_width, void *ctx)
Allocate input and output frames and fill the Task with execution parameters.
Definition: dnn_backend_common.c:161
TaskItem::nb_output
uint32_t nb_output
Definition: dnn_backend_common.h:44