FFmpeg
vf_avgblur_vulkan.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "libavutil/opt.h"
20 #include "vulkan.h"
21 #include "internal.h"
22 
23 #define CGS 32
24 
25 typedef struct AvgBlurVulkanContext {
27 
32 
33  /* Shader updators, must be in the main filter struct */
34  VkDescriptorImageInfo input_images[3];
35  VkDescriptorImageInfo tmp_images[3];
36  VkDescriptorImageInfo output_images[3];
37 
38  int size_x;
39  int size_y;
40  int planes;
42 
43 static const char blur_kernel[] = {
44  C(0, shared vec4 cache[DIR(gl_WorkGroupSize) + FILTER_RADIUS*2 + 1]; )
45  C(0, )
46  C(0, void distort(const ivec2 pos, const int idx) )
47  C(0, { )
48  C(1, const uint cp = DIR(gl_LocalInvocationID) + FILTER_RADIUS; )
49  C(0, )
50  C(1, cache[cp] = texture(input_img[idx], pos); )
51  C(0, )
52  C(1, const ivec2 loc_l = pos - INC(FILTER_RADIUS); )
53  C(1, cache[cp - FILTER_RADIUS] = texture(input_img[idx], loc_l); )
54  C(0, )
55  C(1, const ivec2 loc_h = pos + INC(DIR(gl_WorkGroupSize)); )
56  C(1, cache[cp + DIR(gl_WorkGroupSize)] = texture(input_img[idx], loc_h); )
57  C(0, )
58  C(1, barrier(); )
59  C(0, )
60  C(1, vec4 sum = vec4(0); )
61  C(1, for (int p = -FILTER_RADIUS; p <= FILTER_RADIUS; p++) )
62  C(2, sum += cache[cp + p]; )
63  C(0, )
64  C(1, sum /= vec4(FILTER_RADIUS*2 + 1); )
65  C(1, imageStore(output_img[idx], pos, sum); )
66  C(0, } )
67 };
68 
70 {
71  int err;
72  SPIRVShader *shd;
73  AvgBlurVulkanContext *s = ctx->priv;
75  VkSampler *sampler = ff_vk_init_sampler(ctx, 1, VK_FILTER_LINEAR);
76 
77  VulkanDescriptorSetBinding desc_i[2] = {
78  {
79  .name = "input_img",
80  .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
81  .dimensions = 2,
82  .elems = planes,
83  .stages = VK_SHADER_STAGE_COMPUTE_BIT,
84  .samplers = DUP_SAMPLER_ARRAY4(*sampler),
85  },
86  {
87  .name = "output_img",
88  .type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
89  .mem_layout = ff_vk_shader_rep_fmt(s->vkctx.output_format),
90  .mem_quali = "writeonly",
91  .dimensions = 2,
92  .elems = planes,
93  .stages = VK_SHADER_STAGE_COMPUTE_BIT,
94  },
95  };
96 
97  if (!sampler)
98  return AVERROR_EXTERNAL;
99 
100  { /* Create shader for the horizontal pass */
101  desc_i[0].updater = s->input_images;
102  desc_i[1].updater = s->tmp_images;
103 
104  s->pl_hor = ff_vk_create_pipeline(ctx);
105  if (!s->pl_hor)
106  return AVERROR(ENOMEM);
107 
108  shd = ff_vk_init_shader(ctx, s->pl_hor, "avgblur_compute_hor",
109  VK_SHADER_STAGE_COMPUTE_BIT);
110 
111  ff_vk_set_compute_shader_sizes(ctx, shd, (int [3]){ CGS, 1, 1 });
112 
113  RET(ff_vk_add_descriptor_set(ctx, s->pl_hor, shd, desc_i, 2, 0));
114 
115  GLSLF(0, #define FILTER_RADIUS (%i) ,s->size_x - 1);
116  GLSLC(0, #define INC(x) (ivec2(x, 0)) );
117  GLSLC(0, #define DIR(var) (var.x) );
118  GLSLD( blur_kernel );
119  GLSLC(0, void main() );
120  GLSLC(0, { );
121  GLSLC(1, ivec2 size; );
122  GLSLC(1, const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); );
123  for (int i = 0; i < planes; i++) {
124  GLSLC(0, );
125  GLSLF(1, size = imageSize(output_img[%i]); ,i);
126  GLSLC(1, if (IS_WITHIN(pos, size)) { );
127  if (s->planes & (1 << i)) {
128  GLSLF(2, distort(pos, %i); ,i);
129  } else {
130  GLSLF(2, vec4 res = texture(input_img[%i], pos); ,i);
131  GLSLF(2, imageStore(output_img[%i], pos, res); ,i);
132  }
133  GLSLC(1, } );
134  }
135  GLSLC(0, } );
136 
137  RET(ff_vk_compile_shader(ctx, shd, "main"));
138 
141  }
142 
143  { /* Create shader for the vertical pass */
144  desc_i[0].updater = s->tmp_images;
145  desc_i[1].updater = s->output_images;
146 
147  s->pl_ver = ff_vk_create_pipeline(ctx);
148  if (!s->pl_ver)
149  return AVERROR(ENOMEM);
150 
151  shd = ff_vk_init_shader(ctx, s->pl_ver, "avgblur_compute_ver",
152  VK_SHADER_STAGE_COMPUTE_BIT);
153 
154  ff_vk_set_compute_shader_sizes(ctx, shd, (int [3]){ 1, CGS, 1 });
155 
156  RET(ff_vk_add_descriptor_set(ctx, s->pl_ver, shd, desc_i, 2, 0));
157 
158  GLSLF(0, #define FILTER_RADIUS (%i) ,s->size_y - 1);
159  GLSLC(0, #define INC(x) (ivec2(0, x)) );
160  GLSLC(0, #define DIR(var) (var.y) );
161  GLSLD( blur_kernel );
162  GLSLC(0, void main() );
163  GLSLC(0, { );
164  GLSLC(1, ivec2 size; );
165  GLSLC(1, const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); );
166  for (int i = 0; i < planes; i++) {
167  GLSLC(0, );
168  GLSLF(1, size = imageSize(output_img[%i]); ,i);
169  GLSLC(1, if (IS_WITHIN(pos, size)) { );
170  if (s->planes & (1 << i)) {
171  GLSLF(2, distort(pos, %i); ,i);
172  } else {
173  GLSLF(2, vec4 res = texture(input_img[%i], pos); ,i);
174  GLSLF(2, imageStore(output_img[%i], pos, res); ,i);
175  }
176  GLSLC(1, } );
177  }
178  GLSLC(0, } );
179 
180  RET(ff_vk_compile_shader(ctx, shd, "main"));
181 
184  }
185 
186  /* Execution context */
187  RET(ff_vk_create_exec_ctx(ctx, &s->exec,
189 
190  s->initialized = 1;
191 
192  return 0;
193 
194 fail:
195  return err;
196 }
197 
198 static int process_frames(AVFilterContext *avctx, AVFrame *out_f, AVFrame *tmp_f, AVFrame *in_f)
199 {
200  int err;
201  AvgBlurVulkanContext *s = avctx->priv;
202  AVVkFrame *in = (AVVkFrame *)in_f->data[0];
203  AVVkFrame *tmp = (AVVkFrame *)tmp_f->data[0];
204  AVVkFrame *out = (AVVkFrame *)out_f->data[0];
206 
207  for (int i = 0; i < planes; i++) {
208  RET(ff_vk_create_imageview(avctx, &s->input_images[i].imageView, in->img[i],
211 
212  RET(ff_vk_create_imageview(avctx, &s->tmp_images[i].imageView, tmp->img[i],
215 
216  RET(ff_vk_create_imageview(avctx, &s->output_images[i].imageView, out->img[i],
219 
220  s->input_images[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
221  s->tmp_images[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
222  s->output_images[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
223  }
224 
225  ff_vk_update_descriptor_set(avctx, s->pl_hor, 0);
226  ff_vk_update_descriptor_set(avctx, s->pl_ver, 0);
227 
228  ff_vk_start_exec_recording(avctx, s->exec);
229 
230  for (int i = 0; i < planes; i++) {
231  VkImageMemoryBarrier bar[] = {
232  {
233  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
234  .srcAccessMask = 0,
235  .dstAccessMask = VK_ACCESS_SHADER_READ_BIT,
236  .oldLayout = in->layout[i],
237  .newLayout = s->input_images[i].imageLayout,
238  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
239  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
240  .image = in->img[i],
241  .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
242  .subresourceRange.levelCount = 1,
243  .subresourceRange.layerCount = 1,
244  },
245  {
246  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
247  .srcAccessMask = 0,
248  .dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_SHADER_READ_BIT,
249  .oldLayout = tmp->layout[i],
250  .newLayout = s->tmp_images[i].imageLayout,
251  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
252  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
253  .image = tmp->img[i],
254  .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
255  .subresourceRange.levelCount = 1,
256  .subresourceRange.layerCount = 1,
257  },
258  {
259  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
260  .srcAccessMask = 0,
261  .dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT,
262  .oldLayout = out->layout[i],
263  .newLayout = s->output_images[i].imageLayout,
264  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
265  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
266  .image = out->img[i],
267  .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
268  .subresourceRange.levelCount = 1,
269  .subresourceRange.layerCount = 1,
270  },
271  };
272 
273  vkCmdPipelineBarrier(s->exec->buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
274  VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0,
275  0, NULL, 0, NULL, FF_ARRAY_ELEMS(bar), bar);
276 
277  in->layout[i] = bar[0].newLayout;
278  in->access[i] = bar[0].dstAccessMask;
279 
280  tmp->layout[i] = bar[1].newLayout;
281  tmp->access[i] = bar[1].dstAccessMask;
282 
283  out->layout[i] = bar[2].newLayout;
284  out->access[i] = bar[2].dstAccessMask;
285  }
286 
287  ff_vk_bind_pipeline_exec(avctx, s->exec, s->pl_hor);
288 
289  vkCmdDispatch(s->exec->buf, FFALIGN(s->vkctx.output_width, CGS)/CGS,
290  s->vkctx.output_height, 1);
291 
292  ff_vk_bind_pipeline_exec(avctx, s->exec, s->pl_ver);
293 
294  vkCmdDispatch(s->exec->buf, s->vkctx.output_width,
295  FFALIGN(s->vkctx.output_height, CGS)/CGS, 1);
296 
297  ff_vk_add_exec_dep(avctx, s->exec, in_f, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
298  ff_vk_add_exec_dep(avctx, s->exec, out_f, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
299 
300  err = ff_vk_submit_exec_queue(avctx, s->exec);
301  if (err)
302  return err;
303 
304 fail:
305 
306  for (int i = 0; i < planes; i++) {
307  ff_vk_destroy_imageview(avctx, &s->input_images[i].imageView);
308  ff_vk_destroy_imageview(avctx, &s->tmp_images[i].imageView);
309  ff_vk_destroy_imageview(avctx, &s->output_images[i].imageView);
310  }
311 
312  return err;
313 }
314 
316 {
317  int err;
318  AVFrame *tmp = NULL, *out = NULL;
319  AVFilterContext *ctx = link->dst;
320  AvgBlurVulkanContext *s = ctx->priv;
321  AVFilterLink *outlink = ctx->outputs[0];
322 
323  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
324  if (!out) {
325  err = AVERROR(ENOMEM);
326  goto fail;
327  }
328 
329  tmp = ff_get_video_buffer(outlink, outlink->w, outlink->h);
330  if (!out) {
331  err = AVERROR(ENOMEM);
332  goto fail;
333  }
334 
335  if (!s->initialized)
336  RET(init_filter(ctx, in));
337 
338  RET(process_frames(ctx, out, tmp, in));
339 
340  err = av_frame_copy_props(out, in);
341  if (err < 0)
342  goto fail;
343 
344  av_frame_free(&in);
345  av_frame_free(&tmp);
346 
347  return ff_filter_frame(outlink, out);
348 
349 fail:
350  av_frame_free(&in);
351  av_frame_free(&tmp);
352  av_frame_free(&out);
353  return err;
354 }
355 
357 {
358  AvgBlurVulkanContext *s = avctx->priv;
359 
360  ff_vk_filter_uninit(avctx);
361 
362  s->initialized = 0;
363 }
364 
365 #define OFFSET(x) offsetof(AvgBlurVulkanContext, x)
366 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
368  { "sizeX", "Set horizontal radius", OFFSET(size_x), AV_OPT_TYPE_INT, {.i64 = 3}, 1, 32, .flags = FLAGS },
369  { "planes", "Set planes to filter (bitmask)", OFFSET(planes), AV_OPT_TYPE_INT, {.i64 = 0xF}, 0, 0xF, .flags = FLAGS },
370  { "sizeY", "Set vertical radius", OFFSET(size_y), AV_OPT_TYPE_INT, {.i64 = 3}, 1, 32, .flags = FLAGS },
371  { NULL },
372 };
373 
374 AVFILTER_DEFINE_CLASS(avgblur_vulkan);
375 
377  {
378  .name = "default",
379  .type = AVMEDIA_TYPE_VIDEO,
380  .filter_frame = &avgblur_vulkan_filter_frame,
381  .config_props = &ff_vk_filter_config_input,
382  },
383  { NULL }
384 };
385 
387  {
388  .name = "default",
389  .type = AVMEDIA_TYPE_VIDEO,
390  .config_props = &ff_vk_filter_config_output,
391  },
392  { NULL }
393 };
394 
396  .name = "avgblur_vulkan",
397  .description = NULL_IF_CONFIG_SMALL("Apply avgblur mask to input video"),
398  .priv_size = sizeof(AvgBlurVulkanContext),
402  .inputs = avgblur_vulkan_inputs,
403  .outputs = avgblur_vulkan_outputs,
404  .priv_class = &avgblur_vulkan_class,
405  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
406 };
#define NULL
Definition: coverity.c:32
int ff_vk_add_exec_dep(AVFilterContext *avctx, FFVkExecContext *e, AVFrame *frame, VkPipelineStageFlagBits in_wait_dst_flag)
Adds a frame as a queue dependency.
Definition: vulkan.c:389
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:365
int ff_vk_init_pipeline_layout(AVFilterContext *avctx, VulkanPipeline *pl)
Initializes the pipeline layout after all shaders and descriptor sets have been finished.
Definition: vulkan.c:1018
VkDescriptorImageInfo tmp_images[3]
int ff_vk_add_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl, SPIRVShader *shd, VulkanDescriptorSetBinding *desc, int num, int only_print_to_shader)
Adds a descriptor set to the shader and registers them in the pipeline.
Definition: vulkan.c:860
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
void ff_vk_filter_uninit(AVFilterContext *avctx)
Definition: vulkan.c:1225
AVOption.
Definition: opt.h:246
static const AVFilterPad avgblur_vulkan_outputs[]
const char * ff_vk_shader_rep_fmt(enum AVPixelFormat pixfmt)
Gets the glsl format string for a pixel format.
Definition: vulkan.c:681
const char * name
Definition: vulkan.h:63
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2589
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
int ff_vk_init_compute_pipeline(AVFilterContext *avctx, VulkanPipeline *pl)
Initializes a compute pipeline.
Definition: vulkan.c:1117
static int avgblur_vulkan_filter_frame(AVFilterLink *link, AVFrame *in)
int ff_vk_filter_config_output(AVFilterLink *outlink)
Definition: vulkan.c:569
int ff_vk_start_exec_recording(AVFilterContext *avctx, FFVkExecContext *e)
Begin recording to the command buffer.
Definition: vulkan.c:368
VkCommandBuffer buf
Definition: vulkan.h:111
static const char blur_kernel[]
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
const VkFormat * av_vkfmt_from_pixfmt(enum AVPixelFormat p)
Returns the format of each image up to the number of planes for a given sw_format.
VkImage img[AV_NUM_DATA_POINTERS]
Vulkan images to which the memory is bound to.
const char * name
Pad name.
Definition: internal.h:60
#define FLAGS
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1075
#define av_cold
Definition: attributes.h:82
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVOptions.
#define OFFSET(x)
VkDescriptorImageInfo output_images[3]
void ff_vk_set_compute_shader_sizes(AVFilterContext *avctx, SPIRVShader *shd, int local_size[3])
Writes the workgroup size for a shader.
Definition: vulkan.c:750
FFVkExecContext * exec
int queue_family_comp_index
Queue family index for compute ops.
int ff_vk_filter_init(AVFilterContext *avctx)
Definition: vulkan.c:620
ptrdiff_t size
Definition: opengl_enc.c:100
VulkanPipeline * pl_hor
#define FFALIGN(x, a)
Definition: macros.h:48
A filter pad used for either input or output.
Definition: internal.h:54
int ff_vk_filter_query_formats(AVFilterContext *avctx)
General lavfi IO functions.
Definition: vulkan.c:456
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
enum AVPixelFormat input_format
Definition: vulkan.h:141
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
void * priv
private data for use by the filter
Definition: avfilter.h:353
unsigned int pos
Definition: spdifenc.c:410
#define fail()
Definition: checkasm.h:122
VkSampler * ff_vk_init_sampler(AVFilterContext *avctx, int unnorm_coords, VkFilter filt)
Create a Vulkan sampler, will be auto-freed in ff_vk_filter_uninit()
Definition: vulkan.c:633
static int process_frames(AVFilterContext *avctx, AVFrame *out_f, AVFrame *tmp_f, AVFrame *in_f)
VkAccessFlagBits access[AV_NUM_DATA_POINTERS]
Updated after every barrier.
int ff_vk_submit_exec_queue(AVFilterContext *avctx, FFVkExecContext *e)
Submits a command buffer to the queue for execution.
Definition: vulkan.c:423
AVFormatContext * ctx
Definition: movenc.c:48
#define GLSLF(N, S,...)
Definition: vulkan.h:40
#define s(width, name)
Definition: cbs_vp9.c:257
int main(int argc, char *argv[])
AVFilter ff_vf_avgblur_vulkan
#define GLSLC(N, S)
Definition: vulkan.h:38
VulkanFilterContext vkctx
void ff_vk_destroy_imageview(AVFilterContext *avctx, VkImageView *v)
Destroy an imageview.
Definition: vulkan.c:719
void ff_vk_bind_pipeline_exec(AVFilterContext *avctx, FFVkExecContext *e, VulkanPipeline *pl)
Add a command to bind the completed pipeline and its descriptor sets.
Definition: vulkan.c:1152
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
#define FF_ARRAY_ELEMS(a)
static const AVFilterPad avgblur_vulkan_inputs[]
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the C
const VkComponentMapping ff_comp_identity_map
Definition: vulkan.c:44
static void avgblur_vulkan_uninit(AVFilterContext *avctx)
VulkanPipeline * pl_ver
int ff_vk_compile_shader(AVFilterContext *avctx, SPIRVShader *shd, const char *entrypoint)
Compiles the shader, entrypoint must be set to "main".
Definition: vulkan.c:783
VkDescriptorImageInfo input_images[3]
SPIRVShader * ff_vk_init_shader(AVFilterContext *avctx, VulkanPipeline *pl, const char *name, VkShaderStageFlags stage)
Inits a shader for a specific pipeline.
Definition: vulkan.c:729
AVVulkanDeviceContext * hwctx
Definition: vulkan.h:135
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31))))#define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac){}void ff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map){AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);return NULL;}return ac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;}int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){int use_generic=1;int len=in->nb_samples;int p;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Filter definition.
Definition: avfilter.h:144
VulkanPipeline * ff_vk_create_pipeline(AVFilterContext *avctx)
Inits a pipeline.
Definition: vulkan.c:1112
static const AVOption avgblur_vulkan_options[]
const char * name
Filter name.
Definition: avfilter.h:148
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
AVFILTER_DEFINE_CLASS(avgblur_vulkan)
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
int ff_vk_create_imageview(AVFilterContext *avctx, VkImageView *v, VkImage img, VkFormat fmt, const VkComponentMapping map)
Create an imageview.
Definition: vulkan.c:688
#define CGS
enum AVPixelFormat output_format
Definition: vulkan.h:140
#define GLSLD(D)
Definition: vulkan.h:41
int ff_vk_filter_config_input(AVFilterLink *inlink)
Definition: vulkan.c:499
An instance of a filter.
Definition: avfilter.h:338
VkImageLayout layout[AV_NUM_DATA_POINTERS]
int ff_vk_create_exec_ctx(AVFilterContext *avctx, FFVkExecContext **ctx, int queue)
Init an execution context for command recording and queue submission.
Definition: vulkan.c:314
#define RET(x)
Definition: vulkan.h:46
FILE * out
Definition: movenc.c:54
static av_cold int init_filter(AVFilterContext *ctx, AVFrame *in)
#define DUP_SAMPLER_ARRAY4(x)
Definition: vulkan.h:53
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:659
void ff_vk_update_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl, int set_id)
Updates a descriptor set via the updaters defined.
Definition: vulkan.c:1000
static uint8_t tmp[11]
Definition: aes_ctr.c:26