FFmpeg
Data Structures | Macros | Functions | Variables
dnn_backend_torch.cpp File Reference
#include <torch/torch.h>
#include <torch/script.h>
#include "../internal.h"
#include "dnn_io_proc.h"
#include "dnn_backend_common.h"
#include "libavutil/opt.h"
#include "libavutil/mem.h"
#include "queue.h"
#include "safe_queue.h"

Go to the source code of this file.

Data Structures

struct  THOptions
 
struct  THContext
 
struct  THModel
 
struct  THInferRequest
 
struct  THRequestItem
 

Macros

#define OFFSET(x)   offsetof(THContext, x)
 
#define FLAGS   AV_OPT_FLAG_FILTERING_PARAM
 

Functions

 AVFILTER_DEFINE_CLASS (dnn_th)
 
static int extract_lltask_from_task (TaskItem *task, Queue *lltask_queue)
 
static void th_free_request (THInferRequest *request)
 
static void destroy_request_item (THRequestItem **arg)
 
static void dnn_free_model_th (DNNModel **model)
 
static int get_input_th (void *model, DNNData *input, const char *input_name)
 
static void deleter (void *arg)
 
static int fill_model_input_th (THModel *th_model, THRequestItem *request)
 
static int th_start_inference (void *args)
 
static void infer_completion_callback (void *args)
 
static int execute_model_th (THRequestItem *request, Queue *lltask_queue)
 
static int get_output_th (void *model, const char *input_name, int input_width, int input_height, const char *output_name, int *output_width, int *output_height)
 
static THInferRequestth_create_inference_request (void)
 
static DNNModeldnn_load_model_th (const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx)
 
static int dnn_execute_model_th (const DNNModel *model, DNNExecBaseParams *exec_params)
 
static DNNAsyncStatusType dnn_get_result_th (const DNNModel *model, AVFrame **in, AVFrame **out)
 
static int dnn_flush_th (const DNNModel *model)
 

Variables

static const AVOption dnn_th_options []
 
const DNNModule ff_dnn_backend_torch
 

Detailed Description

DNN Torch backend implementation.

Definition in file dnn_backend_torch.cpp.

Macro Definition Documentation

◆ OFFSET

#define OFFSET (   x)    offsetof(THContext, x)

Definition at line 70 of file dnn_backend_torch.cpp.

◆ FLAGS

#define FLAGS   AV_OPT_FLAG_FILTERING_PARAM

Definition at line 71 of file dnn_backend_torch.cpp.

Function Documentation

◆ AVFILTER_DEFINE_CLASS()

AVFILTER_DEFINE_CLASS ( dnn_th  )

◆ extract_lltask_from_task()

static int extract_lltask_from_task ( TaskItem task,
Queue lltask_queue 
)
static

Definition at line 80 of file dnn_backend_torch.cpp.

Referenced by dnn_execute_model_th(), and get_output_th().

◆ th_free_request()

static void th_free_request ( THInferRequest request)
static

◆ destroy_request_item()

static void destroy_request_item ( THRequestItem **  arg)
inlinestatic

◆ dnn_free_model_th()

static void dnn_free_model_th ( DNNModel **  model)
static

Definition at line 129 of file dnn_backend_torch.cpp.

Referenced by dnn_load_model_th().

◆ get_input_th()

static int get_input_th ( void *  model,
DNNData input,
const char *  input_name 
)
static

Definition at line 161 of file dnn_backend_torch.cpp.

Referenced by dnn_load_model_th(), and fill_model_input_th().

◆ deleter()

static void deleter ( void *  arg)
static

Definition at line 173 of file dnn_backend_torch.cpp.

Referenced by fill_model_input_th().

◆ fill_model_input_th()

static int fill_model_input_th ( THModel th_model,
THRequestItem request 
)
static

Definition at line 178 of file dnn_backend_torch.cpp.

Referenced by execute_model_th().

◆ th_start_inference()

static int th_start_inference ( void *  args)
static

Definition at line 237 of file dnn_backend_torch.cpp.

Referenced by dnn_load_model_th(), and execute_model_th().

◆ infer_completion_callback()

static void infer_completion_callback ( void *  args)
static

Definition at line 274 of file dnn_backend_torch.cpp.

Referenced by dnn_load_model_th(), and execute_model_th().

◆ execute_model_th()

static int execute_model_th ( THRequestItem request,
Queue lltask_queue 
)
static

Definition at line 329 of file dnn_backend_torch.cpp.

Referenced by dnn_execute_model_th(), dnn_flush_th(), and get_output_th().

◆ get_output_th()

static int get_output_th ( void *  model,
const char *  input_name,
int  input_width,
int  input_height,
const char *  output_name,
int output_width,
int output_height 
)
static

Definition at line 373 of file dnn_backend_torch.cpp.

Referenced by dnn_load_model_th().

◆ th_create_inference_request()

static THInferRequest* th_create_inference_request ( void  )
static

Definition at line 416 of file dnn_backend_torch.cpp.

Referenced by dnn_load_model_th().

◆ dnn_load_model_th()

static DNNModel* dnn_load_model_th ( const char *  model_filename,
DNNFunctionType  func_type,
const char *  options,
AVFilterContext filter_ctx 
)
static

Definition at line 427 of file dnn_backend_torch.cpp.

◆ dnn_execute_model_th()

static int dnn_execute_model_th ( const DNNModel model,
DNNExecBaseParams exec_params 
)
static

Definition at line 519 of file dnn_backend_torch.cpp.

◆ dnn_get_result_th()

static DNNAsyncStatusType dnn_get_result_th ( const DNNModel model,
AVFrame **  in,
AVFrame **  out 
)
static

Definition at line 568 of file dnn_backend_torch.cpp.

◆ dnn_flush_th()

static int dnn_flush_th ( const DNNModel model)
static

Definition at line 574 of file dnn_backend_torch.cpp.

Variable Documentation

◆ dnn_th_options

const AVOption dnn_th_options[]
static
Initial value:
= {
{ "device", "device to run model", OFFSET(options.device_name), AV_OPT_TYPE_STRING, { .str = "cpu" }, 0, 0, FLAGS },
{ "optimize", "turn on graph executor optimization", OFFSET(options.optimize), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS},
{ NULL }
}

Definition at line 72 of file dnn_backend_torch.cpp.

◆ ff_dnn_backend_torch

const DNNModule ff_dnn_backend_torch
Initial value:
= {
.load_model = dnn_load_model_th,
.execute_model = dnn_execute_model_th,
.get_result = dnn_get_result_th,
.flush = dnn_flush_th,
.free_model = dnn_free_model_th,
}
FLAGS
#define FLAGS
Definition: dnn_backend_torch.cpp:71
dnn_execute_model_th
static int dnn_execute_model_th(const DNNModel *model, DNNExecBaseParams *exec_params)
Definition: dnn_backend_torch.cpp:519
dnn_load_model_th
static DNNModel * dnn_load_model_th(const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx)
Definition: dnn_backend_torch.cpp:427
NULL
#define NULL
Definition: coverity.c:32
options
const OptionDef options[]
dnn_get_result_th
static DNNAsyncStatusType dnn_get_result_th(const DNNModel *model, AVFrame **in, AVFrame **out)
Definition: dnn_backend_torch.cpp:568
OFFSET
#define OFFSET(x)
Definition: dnn_backend_torch.cpp:70
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
dnn_flush_th
static int dnn_flush_th(const DNNModel *model)
Definition: dnn_backend_torch.cpp:574
dnn_free_model_th
static void dnn_free_model_th(DNNModel **model)
Definition: dnn_backend_torch.cpp:129
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:239