FFmpeg
vsrc_ddagrab.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0A00
22 #undef _WIN32_WINNT
23 #define _WIN32_WINNT 0x0A00
24 #endif
25 
26 #include <windows.h>
27 
28 #define COBJMACROS
29 
30 #include <initguid.h>
31 #include <d3d11.h>
32 #include <dxgi1_2.h>
33 #if HAVE_IDXGIOUTPUT5
34 #include <dxgi1_5.h>
35 #endif
36 
37 #include "libavutil/mem.h"
38 #include "libavutil/opt.h"
39 #include "libavutil/time.h"
40 #include "libavutil/avstring.h"
41 #include "libavutil/avassert.h"
42 #include "libavutil/hwcontext.h"
44 #include "compat/w32dlfcn.h"
45 #include "avfilter.h"
46 #include "filters.h"
47 #include "video.h"
48 
49 #include "vsrc_ddagrab_shaders.h"
50 
51 // avutil/time.h takes and returns time in microseconds
52 #define TIMER_RES 1000000
53 #define TIMER_RES64 INT64_C(1000000)
54 
55 typedef struct DdagrabContext {
56  const AVClass *class;
57 
61 
65 
66  DXGI_OUTPUT_DESC output_desc;
67  IDXGIOutputDuplication *dxgi_outdupl;
69 
71  ID3D11Texture2D *mouse_texture;
72  ID3D11ShaderResourceView* mouse_resource_view;
73  ID3D11Texture2D *mouse_xor_texture;
74  ID3D11ShaderResourceView* mouse_xor_resource_view;
75 
80 
81  DXGI_FORMAT raw_format;
82  int raw_width;
84 
85  ID3D11Texture2D *probed_texture;
86  ID3D11Texture2D *buffer_texture;
87 
88  ID3D11VertexShader *vertex_shader;
89  ID3D11InputLayout *input_layout;
90  ID3D11PixelShader *pixel_shader;
91  ID3D11Buffer *const_buffer;
92  ID3D11SamplerState *sampler_state;
93  ID3D11BlendState *blend_state;
94  ID3D11BlendState *blend_state_xor;
95 
99  int width;
100  int height;
101  int offset_x;
102  int offset_y;
103  int out_fmt;
108 
109 #define OFFSET(x) offsetof(DdagrabContext, x)
110 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
111 static const AVOption ddagrab_options[] = {
112  { "output_idx", "dda output index to capture", OFFSET(output_idx), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS },
113  { "draw_mouse", "draw the mouse pointer", OFFSET(draw_mouse), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
114  { "framerate", "set video frame rate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, { .str = "30" }, 0, INT_MAX, FLAGS },
115  { "video_size", "set video frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, { .str = NULL }, 0, 0, FLAGS },
116  { "offset_x", "capture area x offset", OFFSET(offset_x), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS },
117  { "offset_y", "capture area y offset", OFFSET(offset_y), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS },
118  { "output_fmt", "desired output format", OFFSET(out_fmt), AV_OPT_TYPE_INT, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
119  { "auto", "let dda pick its preferred format", 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
120  { "8bit", "only output default 8 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
121  { "bgra", "only output 8 Bit BGRA", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_B8G8R8A8_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
122  { "10bit", "only output default 10 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R10G10B10A2_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
123  { "x2bgr10", "only output 10 Bit X2BGR10", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R10G10B10A2_UNORM }, 0, INT_MAX, FLAGS, .unit = "output_fmt" },
124  { "16bit", "only output default 16 Bit format", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R16G16B16A16_FLOAT },0, INT_MAX, FLAGS, .unit = "output_fmt" },
125  { "rgbaf16", "only output 16 Bit RGBAF16", 0, AV_OPT_TYPE_CONST, { .i64 = DXGI_FORMAT_R16G16B16A16_FLOAT },0, INT_MAX, FLAGS, .unit = "output_fmt" },
126  { "allow_fallback", "don't error on fallback to default 8 Bit format",
127  OFFSET(allow_fallback), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
128  { "force_fmt", "exclude BGRA from format list (experimental, discouraged by Microsoft)",
129  OFFSET(force_fmt), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
130  { "dup_frames", "duplicate frames to maintain framerate",
131  OFFSET(dup_frames), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
132  { NULL }
133 };
134 
135 AVFILTER_DEFINE_CLASS(ddagrab);
136 
137 static inline void release_resource(void *resource)
138 {
139  IUnknown **resp = (IUnknown**)resource;
140  if (*resp) {
141  IUnknown_Release(*resp);
142  *resp = NULL;
143  }
144 }
145 
147 {
148  DdagrabContext *dda = avctx->priv;
149 
157 
160 
166 
167  av_frame_free(&dda->last_frame);
170 }
171 
173 {
174  DdagrabContext *dda = avctx->priv;
175  IDXGIDevice *dxgi_device = NULL;
176  IDXGIAdapter *dxgi_adapter = NULL;
177  IDXGIOutput *dxgi_output = NULL;
178  IDXGIOutput1 *dxgi_output1 = NULL;
179 #if HAVE_IDXGIOUTPUT5 && HAVE_DPI_AWARENESS_CONTEXT
180  IDXGIOutput5 *dxgi_output5 = NULL;
181 
182  typedef DPI_AWARENESS_CONTEXT (*set_thread_dpi_t)(DPI_AWARENESS_CONTEXT);
183  set_thread_dpi_t set_thread_dpi;
184  HMODULE user32_module;
185 #endif
186  int w, h;
187  HRESULT hr;
188 
189  hr = ID3D11Device_QueryInterface(dda->device_hwctx->device, &IID_IDXGIDevice, (void**)&dxgi_device);
190  if (FAILED(hr)) {
191  av_log(avctx, AV_LOG_ERROR, "Failed querying IDXGIDevice\n");
192  return AVERROR_EXTERNAL;
193  }
194 
195  hr = IDXGIDevice_GetParent(dxgi_device, &IID_IDXGIAdapter, (void**)&dxgi_adapter);
196  IDXGIDevice_Release(dxgi_device);
197  dxgi_device = NULL;
198  if (FAILED(hr)) {
199  av_log(avctx, AV_LOG_ERROR, "Failed getting parent IDXGIAdapter\n");
200  return AVERROR_EXTERNAL;
201  }
202 
203  hr = IDXGIAdapter_EnumOutputs(dxgi_adapter, dda->output_idx, &dxgi_output);
204  IDXGIAdapter_Release(dxgi_adapter);
205  dxgi_adapter = NULL;
206  if (FAILED(hr)) {
207  av_log(avctx, AV_LOG_ERROR, "Failed to enumerate DXGI output %d\n", dda->output_idx);
208  return AVERROR_EXTERNAL;
209  }
210 
211  hr = IDXGIOutput_GetDesc(dxgi_output, &dda->output_desc);
212  if (FAILED(hr)) {
213  IDXGIOutput_Release(dxgi_output);
214  av_log(avctx, AV_LOG_ERROR, "Failed getting output description\n");
215  return AVERROR_EXTERNAL;
216  }
217 
218 #if HAVE_IDXGIOUTPUT5 && HAVE_DPI_AWARENESS_CONTEXT
219  user32_module = dlopen("user32.dll", 0);
220  if (!user32_module) {
221  av_log(avctx, AV_LOG_ERROR, "Failed loading user32.dll\n");
222  return AVERROR_EXTERNAL;
223  }
224 
225  set_thread_dpi = (set_thread_dpi_t)dlsym(user32_module, "SetThreadDpiAwarenessContext");
226 
227  if (set_thread_dpi)
228  hr = IDXGIOutput_QueryInterface(dxgi_output, &IID_IDXGIOutput5, (void**)&dxgi_output5);
229 
230  if (set_thread_dpi && SUCCEEDED(hr)) {
231  DPI_AWARENESS_CONTEXT prev_dpi_ctx;
232  DXGI_FORMAT formats[] = {
234  DXGI_FORMAT_R10G10B10A2_UNORM,
236  };
237  int nb_formats = FF_ARRAY_ELEMS(formats);
238 
239  if(dda->out_fmt == DXGI_FORMAT_B8G8R8A8_UNORM) {
241  nb_formats = 1;
242  } else if (dda->out_fmt) {
243  formats[0] = dda->out_fmt;
245  nb_formats = dda->force_fmt ? 1 : 2;
246  }
247 
248  IDXGIOutput_Release(dxgi_output);
249  dxgi_output = NULL;
250 
251  prev_dpi_ctx = set_thread_dpi(DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2);
252  if (!prev_dpi_ctx)
253  av_log(avctx, AV_LOG_WARNING, "Failed enabling DPI awareness for DDA\n");
254 
255  hr = IDXGIOutput5_DuplicateOutput1(dxgi_output5,
256  (IUnknown*)dda->device_hwctx->device,
257  0,
258  nb_formats,
259  formats,
260  &dda->dxgi_outdupl);
261  IDXGIOutput5_Release(dxgi_output5);
262  dxgi_output5 = NULL;
263 
264  if (prev_dpi_ctx)
265  set_thread_dpi(prev_dpi_ctx);
266 
267  dlclose(user32_module);
268  user32_module = NULL;
269  set_thread_dpi = NULL;
270 
271  av_log(avctx, AV_LOG_DEBUG, "Using IDXGIOutput5 interface\n");
272  } else {
273  dlclose(user32_module);
274  user32_module = NULL;
275  set_thread_dpi = NULL;
276 
277  av_log(avctx, AV_LOG_DEBUG, "Falling back to IDXGIOutput1\n");
278 #else
279  {
280 #endif
281  if (dda->out_fmt && dda->out_fmt != DXGI_FORMAT_B8G8R8A8_UNORM && (!dda->allow_fallback || dda->force_fmt)) {
282  av_log(avctx, AV_LOG_ERROR, "Only 8 bit output supported with legacy API\n");
283  return AVERROR(ENOTSUP);
284  }
285 
286  hr = IDXGIOutput_QueryInterface(dxgi_output, &IID_IDXGIOutput1, (void**)&dxgi_output1);
287  IDXGIOutput_Release(dxgi_output);
288  dxgi_output = NULL;
289  if (FAILED(hr)) {
290  av_log(avctx, AV_LOG_ERROR, "Failed querying IDXGIOutput1\n");
291  return AVERROR_EXTERNAL;
292  }
293 
294  hr = IDXGIOutput1_DuplicateOutput(dxgi_output1,
295  (IUnknown*)dda->device_hwctx->device,
296  &dda->dxgi_outdupl);
297  IDXGIOutput1_Release(dxgi_output1);
298  dxgi_output1 = NULL;
299  }
300 
301  if (hr == DXGI_ERROR_NOT_CURRENTLY_AVAILABLE) {
302  av_log(avctx, AV_LOG_ERROR, "Too many open duplication sessions\n");
303  return AVERROR(EBUSY);
304  } else if (hr == DXGI_ERROR_UNSUPPORTED) {
305  av_log(avctx, AV_LOG_ERROR, "Selected output not supported\n");
306  return AVERROR_EXTERNAL;
307  } else if (hr == E_INVALIDARG) {
308  av_log(avctx, AV_LOG_ERROR, "Invalid output duplication argument\n");
309  return AVERROR(EINVAL);
310  } else if (hr == E_ACCESSDENIED) {
311  av_log(avctx, AV_LOG_ERROR, "Desktop duplication access denied\n");
312  return AVERROR(EPERM);
313  } else if (FAILED(hr)) {
314  av_log(avctx, AV_LOG_ERROR, "Failed duplicating output\n");
315  return AVERROR_EXTERNAL;
316  }
317 
318  w = dda->output_desc.DesktopCoordinates.right - dda->output_desc.DesktopCoordinates.left;
319  h = dda->output_desc.DesktopCoordinates.bottom - dda->output_desc.DesktopCoordinates.top;
320  av_log(avctx, AV_LOG_VERBOSE, "Opened dxgi output %d with dimensions %dx%d\n", dda->output_idx, w, h);
321 
322  return 0;
323 }
324 
325 typedef struct ConstBufferData
326 {
327  float width;
328  float height;
329 
330  uint64_t padding;
332 
333 static const D3D11_INPUT_ELEMENT_DESC vertex_shader_input_layout[] =
334 {
335  { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
336  { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
337 };
338 
340 {
341  DdagrabContext *dda = avctx->priv;
342  ID3D11Device *dev = dda->device_hwctx->device;
343  D3D11_SAMPLER_DESC sampler_desc = { 0 };
344  D3D11_BLEND_DESC blend_desc = { 0 };
345  D3D11_BUFFER_DESC buffer_desc = { 0 };
346  D3D11_SUBRESOURCE_DATA buffer_data = { 0 };
347  ConstBufferData const_data = { 0 };
348  HRESULT hr;
349 
350  hr = ID3D11Device_CreateVertexShader(dev,
353  NULL,
354  &dda->vertex_shader);
355  if (FAILED(hr)) {
356  av_log(avctx, AV_LOG_ERROR, "CreateVertexShader failed: %lx\n", hr);
357  return AVERROR_EXTERNAL;
358  }
359 
360  hr = ID3D11Device_CreateInputLayout(dev,
365  &dda->input_layout);
366  if (FAILED(hr)) {
367  av_log(avctx, AV_LOG_ERROR, "CreateInputLayout failed: %lx\n", hr);
368  return AVERROR_EXTERNAL;
369  }
370 
371  hr = ID3D11Device_CreatePixelShader(dev,
374  NULL,
375  &dda->pixel_shader);
376  if (FAILED(hr)) {
377  av_log(avctx, AV_LOG_ERROR, "CreatePixelShader failed: %lx\n", hr);
378  return AVERROR_EXTERNAL;
379  }
380 
381  const_data = (ConstBufferData){ dda->width, dda->height };
382 
383  buffer_data.pSysMem = &const_data;
384  buffer_desc.ByteWidth = sizeof(const_data);
385  buffer_desc.Usage = D3D11_USAGE_IMMUTABLE;
386  buffer_desc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
387  hr = ID3D11Device_CreateBuffer(dev,
388  &buffer_desc,
389  &buffer_data,
390  &dda->const_buffer);
391  if (FAILED(hr)) {
392  av_log(avctx, AV_LOG_ERROR, "CreateBuffer const buffer failed: %lx\n", hr);
393  return AVERROR_EXTERNAL;
394  }
395 
396  sampler_desc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
397  sampler_desc.AddressU = D3D11_TEXTURE_ADDRESS_CLAMP;
398  sampler_desc.AddressV = D3D11_TEXTURE_ADDRESS_CLAMP;
399  sampler_desc.AddressW = D3D11_TEXTURE_ADDRESS_CLAMP;
400  sampler_desc.ComparisonFunc = D3D11_COMPARISON_NEVER;
401  hr = ID3D11Device_CreateSamplerState(dev,
402  &sampler_desc,
403  &dda->sampler_state);
404  if (FAILED(hr)) {
405  av_log(avctx, AV_LOG_ERROR, "CreateSamplerState failed: %lx\n", hr);
406  return AVERROR_EXTERNAL;
407  }
408 
409  blend_desc.AlphaToCoverageEnable = FALSE;
410  blend_desc.IndependentBlendEnable = FALSE;
411  blend_desc.RenderTarget[0].BlendEnable = TRUE;
412  blend_desc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
413  blend_desc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
414  blend_desc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
415  blend_desc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ONE;
416  blend_desc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
417  blend_desc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
418  blend_desc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
419  hr = ID3D11Device_CreateBlendState(dev,
420  &blend_desc,
421  &dda->blend_state);
422  if (FAILED(hr)) {
423  av_log(avctx, AV_LOG_ERROR, "CreateBlendState failed: %lx\n", hr);
424  return AVERROR_EXTERNAL;
425  }
426 
427  blend_desc.RenderTarget[0].SrcBlend = D3D11_BLEND_INV_DEST_COLOR;
428  blend_desc.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_COLOR;
429  hr = ID3D11Device_CreateBlendState(dev,
430  &blend_desc,
431  &dda->blend_state_xor);
432  if (FAILED(hr)) {
433  av_log(avctx, AV_LOG_ERROR, "CreateBlendState (xor) failed: %lx\n", hr);
434  return AVERROR_EXTERNAL;
435  }
436 
437  return 0;
438 }
439 
441 {
442  DdagrabContext *dda = avctx->priv;
443 
444  dda->last_frame = av_frame_alloc();
445  if (!dda->last_frame)
446  return AVERROR(ENOMEM);
447 
448  dda->mouse_x = -1;
449  dda->mouse_y = -1;
450 
451  return 0;
452 }
453 
455  uint8_t *buf,
456  DXGI_OUTDUPL_POINTER_SHAPE_INFO *shape_info,
457  ID3D11Texture2D **out_tex,
458  ID3D11ShaderResourceView **res_view)
459 {
460  DdagrabContext *dda = avctx->priv;
461  D3D11_TEXTURE2D_DESC desc = { 0 };
462  D3D11_SUBRESOURCE_DATA init_data = { 0 };
463  D3D11_SHADER_RESOURCE_VIEW_DESC resource_desc = { 0 };
464  HRESULT hr;
465 
466  desc.MipLevels = 1;
467  desc.ArraySize = 1;
469  desc.SampleDesc.Count = 1;
470  desc.SampleDesc.Quality = 0;
471  desc.Usage = D3D11_USAGE_IMMUTABLE;
472  desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
473 
474  desc.Width = shape_info->Width;
475  desc.Height = shape_info->Height;
476 
477  init_data.pSysMem = buf;
478  init_data.SysMemPitch = shape_info->Pitch;
479 
480  resource_desc.Format = desc.Format;
481  resource_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
482  resource_desc.Texture2D.MostDetailedMip = 0;
483  resource_desc.Texture2D.MipLevels = 1;
484 
485  hr = ID3D11Device_CreateTexture2D(dda->device_hwctx->device,
486  &desc,
487  &init_data,
488  out_tex);
489  if (FAILED(hr)) {
490  av_log(avctx, AV_LOG_ERROR, "Failed creating pointer texture\n");
491  return AVERROR_EXTERNAL;
492  }
493 
494  hr = ID3D11Device_CreateShaderResourceView(dda->device_hwctx->device,
495  (ID3D11Resource*)*out_tex,
496  &resource_desc,
497  res_view);
498  if (FAILED(hr)) {
499  release_resource(out_tex);
500  av_log(avctx, AV_LOG_ERROR, "CreateShaderResourceView for mouse failed: %lx\n", hr);
501  return AVERROR_EXTERNAL;
502  }
503 
504  return 0;
505 }
506 
507 static int convert_mono_buffer(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int *_width, int *_height, int *_pitch)
508 {
509  int width = *_width, height = *_height, pitch = *_pitch;
510  int real_height = height / 2;
511  int size = real_height * pitch;
512 
513  uint8_t *output = av_malloc(real_height * width * 4);
514  uint8_t *output_xor = av_malloc(real_height * width * 4);
515 
516  int y, x;
517 
518  if (!output || !output_xor) {
519  av_free(output);
520  av_free(output_xor);
521  return AVERROR(ENOMEM);
522  }
523 
524  for (y = 0; y < real_height; y++) {
525  for (x = 0; x < width; x++) {
526  int in_pos = (y * pitch) + (x / 8);
527  int out_pos = 4 * ((y * width) + x);
528  int and_val = (input[in_pos] >> (7 - (x % 8))) & 1;
529  int xor_val = (input[in_pos + size] >> (7 - (x % 8))) & 1;
530 
531  if (!and_val && !xor_val) {
532  // solid black
533  memset(&output[out_pos], 0, 4);
534  output[out_pos + 3] = 0xFF;
535 
536  // transparent
537  memset(&output_xor[out_pos], 0, 4);
538  } else if (and_val && !xor_val) {
539  // transparent
540  memset(&output[out_pos], 0, 4);
541 
542  // transparent
543  memset(&output_xor[out_pos], 0, 4);
544  } else if (!and_val && xor_val) {
545  // solid white
546  memset(&output[out_pos], 0xFF, 4);
547 
548  // transparent
549  memset(&output_xor[out_pos], 0, 4);
550  } else if (and_val && xor_val) {
551  // transparent
552  memset(&output[out_pos], 0, 4);
553 
554  // solid white -> invert color
555  memset(&output_xor[out_pos], 0xFF, 4);
556  }
557  }
558  }
559 
560  *_pitch = width * 4;
561  *_height = real_height;
562  *rgba_out = output;
563  *xor_out = output_xor;
564 
565  return 0;
566 }
567 
568 static int fixup_color_mask(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int width, int height, int pitch)
569 {
570  int size = height * pitch;
571  uint8_t *output = av_malloc(size);
572  uint8_t *output_xor = av_malloc(size);
573  int x, y;
574 
575  if (!output || !output_xor) {
576  av_free(output);
577  av_free(output_xor);
578  return AVERROR(ENOMEM);
579  }
580 
581  memcpy(output, input, size);
582  memcpy(output_xor, input, size);
583 
584  for (y = 0; y < height; y++) {
585  for (x = 0; x < width; x++) {
586  int pos = (y*pitch) + (4*x) + 3;
587  output[pos] = input[pos] ? 0 : 0xFF;
588  output_xor[pos] = input[pos] ? 0xFF : 0;
589  }
590  }
591 
592  *rgba_out = output;
593  *xor_out = output_xor;
594 
595  return 0;
596 }
597 
598 static int update_mouse_pointer(AVFilterContext *avctx, DXGI_OUTDUPL_FRAME_INFO *frame_info)
599 {
600  DdagrabContext *dda = avctx->priv;
601  HRESULT hr;
602  int ret, ret2;
603 
604  if (frame_info->LastMouseUpdateTime.QuadPart == 0)
605  return 0;
606 
607  if (frame_info->PointerPosition.Visible) {
608  switch (dda->output_desc.Rotation) {
609  case DXGI_MODE_ROTATION_ROTATE90:
610  dda->mouse_x = frame_info->PointerPosition.Position.y;
611  dda->mouse_y = dda->output_desc.DesktopCoordinates.right - dda->output_desc.DesktopCoordinates.left - frame_info->PointerPosition.Position.x - 1;
612  break;
613  case DXGI_MODE_ROTATION_ROTATE180:
614  dda->mouse_x = dda->output_desc.DesktopCoordinates.right - dda->output_desc.DesktopCoordinates.left - frame_info->PointerPosition.Position.x - 1;
615  dda->mouse_y = dda->output_desc.DesktopCoordinates.bottom - dda->output_desc.DesktopCoordinates.top - frame_info->PointerPosition.Position.y - 1;
616  break;
617  case DXGI_MODE_ROTATION_ROTATE270:
618  dda->mouse_x = dda->output_desc.DesktopCoordinates.bottom - dda->output_desc.DesktopCoordinates.top - frame_info->PointerPosition.Position.y - 1;
619  dda->mouse_y = frame_info->PointerPosition.Position.x;
620  break;
621  default:
622  dda->mouse_x = frame_info->PointerPosition.Position.x;
623  dda->mouse_y = frame_info->PointerPosition.Position.y;
624  }
625  } else {
626  dda->mouse_x = dda->mouse_y = -1;
627  }
628 
629  if (frame_info->PointerShapeBufferSize) {
630  UINT size = frame_info->PointerShapeBufferSize;
631  DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info;
632  uint8_t *rgba_buf = NULL, *rgb_xor_buf = NULL;
633  uint8_t *buf = av_malloc(size);
634  if (!buf)
635  return AVERROR(ENOMEM);
636 
637  hr = IDXGIOutputDuplication_GetFramePointerShape(dda->dxgi_outdupl,
638  size,
639  buf,
640  &size,
641  &shape_info);
642  if (FAILED(hr)) {
643  av_free(buf);
644  av_log(avctx, AV_LOG_ERROR, "Failed getting pointer shape: %lx\n", hr);
645  return AVERROR_EXTERNAL;
646  }
647 
648  if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME) {
649  ret = convert_mono_buffer(buf, &rgba_buf, &rgb_xor_buf, &shape_info.Width, &shape_info.Height, &shape_info.Pitch);
650  av_freep(&buf);
651  if (ret < 0)
652  return ret;
653  } else if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR) {
654  ret = fixup_color_mask(buf, &rgba_buf, &rgb_xor_buf, shape_info.Width, shape_info.Height, shape_info.Pitch);
655  av_freep(&buf);
656  if (ret < 0)
657  return ret;
658  } else if (shape_info.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR) {
659  rgba_buf = buf;
660  buf = NULL;
661  } else {
662  av_log(avctx, AV_LOG_WARNING, "Unsupported pointer shape type: %d\n", (int)shape_info.Type);
663  av_freep(&buf);
664  return 0;
665  }
666 
671 
672  ret = create_d3d11_pointer_tex(avctx, rgba_buf, &shape_info, &dda->mouse_texture, &dda->mouse_resource_view);
673  ret2 = rgb_xor_buf ? create_d3d11_pointer_tex(avctx, rgb_xor_buf, &shape_info, &dda->mouse_xor_texture, &dda->mouse_xor_resource_view) : 0;
674  av_freep(&rgba_buf);
675  av_freep(&rgb_xor_buf);
676  if (ret < 0)
677  return ret;
678  if (ret2 < 0)
679  return ret2;
680 
681  av_log(avctx, AV_LOG_VERBOSE, "Updated pointer shape texture\n");
682  }
683 
684  return 0;
685 }
686 
687 static int next_frame_internal(AVFilterContext *avctx, ID3D11Texture2D **desktop_texture, int need_frame)
688 {
689  DXGI_OUTDUPL_FRAME_INFO frame_info;
690  DdagrabContext *dda = avctx->priv;
691  IDXGIResource *desktop_resource = NULL;
692  HRESULT hr;
693  int ret;
694 
695  hr = IDXGIOutputDuplication_AcquireNextFrame(
696  dda->dxgi_outdupl,
697  dda->time_timeout,
698  &frame_info,
699  &desktop_resource);
700  if (hr == DXGI_ERROR_WAIT_TIMEOUT) {
701  return AVERROR(EAGAIN);
702  } else if (FAILED(hr)) {
703  av_log(avctx, AV_LOG_ERROR, "AcquireNextFrame failed: %lx\n", hr);
704  return AVERROR_EXTERNAL;
705  }
706 
707  if (dda->draw_mouse) {
709  if (ret < 0)
710  goto error;
711  }
712 
713  if (!frame_info.LastPresentTime.QuadPart || !frame_info.AccumulatedFrames) {
714  if (need_frame) {
715  ret = AVERROR(EAGAIN);
716  goto error;
717  }
718 
719  // Unfortunately, we can't rely on the desktop_resource's format in this case.
720  // The API might even return it in with a format that was not in the initial
721  // list of supported formats, and it can change/flicker randomly.
722  // To work around this, return an internal copy of the last valid texture we got.
723  release_resource(&desktop_resource);
724 
725  // The initial probing should make this impossible.
726  if (!dda->buffer_texture) {
727  av_log(avctx, AV_LOG_ERROR, "No buffer texture while operating!\n");
728  ret = AVERROR_BUG;
729  goto error;
730  }
731 
732  av_log(avctx, AV_LOG_TRACE, "Returning internal buffer for a frame!\n");
733  ID3D11Texture2D_AddRef(dda->buffer_texture);
734  *desktop_texture = dda->buffer_texture;
735  return 0;
736  }
737 
738  hr = IDXGIResource_QueryInterface(desktop_resource, &IID_ID3D11Texture2D, (void**)desktop_texture);
739  release_resource(&desktop_resource);
740  if (FAILED(hr)) {
741  av_log(avctx, AV_LOG_ERROR, "DXGIResource QueryInterface failed\n");
743  goto error;
744  }
745 
746  if (!dda->buffer_texture) {
747  D3D11_TEXTURE2D_DESC desc;
748  ID3D11Texture2D_GetDesc(*desktop_texture, &desc);
749  desc.Usage = D3D11_USAGE_DEFAULT;
750  desc.BindFlags = 0;
751  desc.CPUAccessFlags = 0;
752  desc.MiscFlags = 0;
753 
754  hr = ID3D11Device_CreateTexture2D(dda->device_hwctx->device, &desc, NULL, &dda->buffer_texture);
755  if (FAILED(hr)) {
756  release_resource(desktop_texture);
757  av_log(avctx, AV_LOG_ERROR, "Failed creating internal buffer texture.\n");
758  ret = AVERROR(ENOMEM);
759  goto error;
760  }
761  }
762 
763  ID3D11DeviceContext_CopyResource(dda->device_hwctx->device_context,
764  (ID3D11Resource*)dda->buffer_texture,
765  (ID3D11Resource*)*desktop_texture);
766 
767  return 0;
768 
769 error:
770  release_resource(&desktop_resource);
771 
772  hr = IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
773  if (FAILED(hr))
774  av_log(avctx, AV_LOG_ERROR, "DDA error ReleaseFrame failed!\n");
775 
776  return ret;
777 }
778 
780 {
781  DdagrabContext *dda = avctx->priv;
782  D3D11_TEXTURE2D_DESC desc;
783  int ret;
784 
785  av_assert1(!dda->probed_texture);
786 
787  do {
788  ret = next_frame_internal(avctx, &dda->probed_texture, 1);
789  } while(ret == AVERROR(EAGAIN));
790  if (ret < 0)
791  return ret;
792 
793  ID3D11Texture2D_GetDesc(dda->probed_texture, &desc);
794 
795  dda->raw_format = desc.Format;
796  dda->raw_width = desc.Width;
797  dda->raw_height = desc.Height;
798 
799  if (dda->width <= 0)
800  dda->width = dda->raw_width;
801  if (dda->height <= 0)
802  dda->height = dda->raw_height;
803 
804  return 0;
805 }
806 
808 {
809  DdagrabContext *dda = avctx->priv;
810  int ret = 0;
811 
813  if (!dda->frames_ref)
814  return AVERROR(ENOMEM);
817 
819  dda->frames_ctx->width = dda->width;
820  dda->frames_ctx->height = dda->height;
821  if (avctx->extra_hw_frames > 0)
822  dda->frames_ctx->initial_pool_size = 8 + avctx->extra_hw_frames;
823 
824  switch (dda->raw_format) {
826  av_log(avctx, AV_LOG_VERBOSE, "Probed 8 bit RGB frame format\n");
828  break;
829  case DXGI_FORMAT_R10G10B10A2_UNORM:
830  av_log(avctx, AV_LOG_VERBOSE, "Probed 10 bit RGB frame format\n");
832  break;
834  av_log(avctx, AV_LOG_VERBOSE, "Probed 16 bit float RGB frame format\n");
836  break;
837  default:
838  av_log(avctx, AV_LOG_ERROR, "Unexpected texture output format!\n");
839  return AVERROR_BUG;
840  }
841 
842  if (dda->draw_mouse)
843  dda->frames_hwctx->BindFlags |= D3D11_BIND_RENDER_TARGET;
844 
846  if (ret < 0) {
847  av_log(avctx, AV_LOG_ERROR, "Failed to initialise hardware frames context: %d.\n", ret);
848  goto fail;
849  }
850 
851  return 0;
852 fail:
854  return ret;
855 }
856 
857 static int ddagrab_config_props(AVFilterLink *outlink)
858 {
859  FilterLink *l = ff_filter_link(outlink);
860  AVFilterContext *avctx = outlink->src;
861  DdagrabContext *dda = avctx->priv;
862  int ret;
863 
864  if (avctx->hw_device_ctx) {
866 
868  av_log(avctx, AV_LOG_ERROR, "Non-D3D11VA input hw_device_ctx\n");
869  return AVERROR(EINVAL);
870  }
871 
872  dda->device_ref = av_buffer_ref(avctx->hw_device_ctx);
873  if (!dda->device_ref)
874  return AVERROR(ENOMEM);
875 
876  av_log(avctx, AV_LOG_VERBOSE, "Using provided hw_device_ctx\n");
877  } else {
879  if (ret < 0) {
880  av_log(avctx, AV_LOG_ERROR, "Failed to create D3D11VA device.\n");
881  return ret;
882  }
883 
885 
886  av_log(avctx, AV_LOG_VERBOSE, "Created internal hw_device_ctx\n");
887  }
888 
890 
891  ret = init_dxgi_dda(avctx);
892  if (ret < 0)
893  return ret;
894 
895  ret = probe_output_format(avctx);
896  if (ret < 0)
897  return ret;
898 
899  if (dda->out_fmt && dda->raw_format != dda->out_fmt && (!dda->allow_fallback || dda->force_fmt)) {
900  av_log(avctx, AV_LOG_ERROR, "Requested output format unavailable.\n");
901  return AVERROR(ENOTSUP);
902  }
903 
904  dda->width -= FFMAX(dda->width - dda->raw_width + dda->offset_x, 0);
905  dda->height -= FFMAX(dda->height - dda->raw_height + dda->offset_y, 0);
906 
907  dda->time_base = av_inv_q(dda->framerate);
909  dda->time_timeout = av_rescale_q(1, dda->time_base, (AVRational) { 1, 1000 }) / 2;
910 
911  if (dda->draw_mouse) {
912  ret = init_render_resources(avctx);
913  if (ret < 0)
914  return ret;
915  }
916 
917  ret = init_hwframes_ctx(avctx);
918  if (ret < 0)
919  return ret;
920 
922  if (!l->hw_frames_ctx)
923  return AVERROR(ENOMEM);
924 
925  outlink->w = dda->width;
926  outlink->h = dda->height;
927  outlink->time_base = (AVRational){1, TIMER_RES};
928  l->frame_rate = dda->framerate;
929 
930  return 0;
931 }
932 
934 {
935  DdagrabContext *dda = avctx->priv;
936  ID3D11DeviceContext *devctx = dda->device_hwctx->device_context;
937  ID3D11Texture2D *frame_tex = (ID3D11Texture2D*)frame->data[0];
938  D3D11_RENDER_TARGET_VIEW_DESC target_desc = { 0 };
939  ID3D11RenderTargetView* target_view = NULL;
940  ID3D11Buffer *mouse_vertex_buffer = NULL;
941  D3D11_TEXTURE2D_DESC tex_desc, frame_desc;
942  int num_vertices = 0;
943  int x, y;
944  HRESULT hr;
945  int ret = 0;
946 
947  if (!dda->mouse_texture || dda->mouse_x < 0 || dda->mouse_y < 0)
948  return 0;
949 
950  ID3D11Texture2D_GetDesc(dda->mouse_texture, &tex_desc);
951  ID3D11Texture2D_GetDesc(frame_tex, &frame_desc);
952 
953  x = dda->mouse_x - dda->offset_x;
954  y = dda->mouse_y - dda->offset_y;
955 
956  if (x >= dda->width || y >= dda->height ||
957  -x >= (int)tex_desc.Width || -y >= (int)tex_desc.Height)
958  return 0;
959 
960  target_desc.Format = frame_desc.Format;
961 
962  if (frame_desc.ArraySize > 1) {
963  target_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DARRAY;
964  target_desc.Texture2DArray.ArraySize = 1;
965  target_desc.Texture2DArray.FirstArraySlice = (uintptr_t)frame->data[1];
966  target_desc.Texture2DArray.MipSlice = 0;
967  } else {
968  target_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
969  target_desc.Texture2D.MipSlice = 0;
970  }
971 
972  hr = ID3D11Device_CreateRenderTargetView(dda->device_hwctx->device,
973  (ID3D11Resource*)frame_tex,
974  &target_desc,
975  &target_view);
976  if (FAILED(hr)) {
977  av_log(avctx, AV_LOG_ERROR, "CreateRenderTargetView failed: %lx\n", hr);
979  goto end;
980  }
981 
982  ID3D11DeviceContext_ClearState(devctx);
983 
984  {
985  D3D11_VIEWPORT viewport = { 0 };
986  viewport.Width = dda->width;
987  viewport.Height = dda->height;
988  viewport.MinDepth = 0.0f;
989  viewport.MaxDepth = 1.0f;
990 
991  ID3D11DeviceContext_RSSetViewports(devctx, 1, &viewport);
992  }
993 
994  {
995  FLOAT vertices[] = {
996  // x, y, z, u, v
997  x , y + tex_desc.Height, 0.0f, 0.0f, 1.0f,
998  x , y , 0.0f, 0.0f, 0.0f,
999  x + tex_desc.Width, y + tex_desc.Height, 0.0f, 1.0f, 1.0f,
1000  x + tex_desc.Width, y , 0.0f, 1.0f, 0.0f,
1001  };
1002  UINT stride = sizeof(FLOAT) * 5;
1003  UINT offset = 0;
1004 
1005  D3D11_SUBRESOURCE_DATA init_data = { 0 };
1006  D3D11_BUFFER_DESC buf_desc = { 0 };
1007 
1008  switch (dda->output_desc.Rotation) {
1009  case DXGI_MODE_ROTATION_ROTATE90:
1010  vertices[ 0] = x; vertices[ 1] = y;
1011  vertices[ 5] = x; vertices[ 6] = y - tex_desc.Width;
1012  vertices[10] = x + tex_desc.Height; vertices[11] = y;
1013  vertices[15] = x + tex_desc.Height; vertices[16] = y - tex_desc.Width;
1014  vertices[ 3] = 0.0f; vertices[ 4] = 0.0f;
1015  vertices[ 8] = 1.0f; vertices[ 9] = 0.0f;
1016  vertices[13] = 0.0f; vertices[14] = 1.0f;
1017  vertices[18] = 1.0f; vertices[19] = 1.0f;
1018  break;
1019  case DXGI_MODE_ROTATION_ROTATE180:
1020  vertices[ 0] = x - tex_desc.Width; vertices[ 1] = y;
1021  vertices[ 5] = x - tex_desc.Width; vertices[ 6] = y - tex_desc.Height;
1022  vertices[10] = x; vertices[11] = y;
1023  vertices[15] = x; vertices[16] = y - tex_desc.Height;
1024  vertices[ 3] = 1.0f; vertices[ 4] = 0.0f;
1025  vertices[ 8] = 1.0f; vertices[ 9] = 1.0f;
1026  vertices[13] = 0.0f; vertices[14] = 0.0f;
1027  vertices[18] = 0.0f; vertices[19] = 1.0f;
1028  break;
1029  case DXGI_MODE_ROTATION_ROTATE270:
1030  vertices[ 0] = x - tex_desc.Height; vertices[ 1] = y + tex_desc.Width;
1031  vertices[ 5] = x - tex_desc.Height; vertices[ 6] = y;
1032  vertices[10] = x; vertices[11] = y + tex_desc.Width;
1033  vertices[15] = x; vertices[16] = y;
1034  vertices[ 3] = 1.0f; vertices[ 4] = 1.0f;
1035  vertices[ 8] = 0.0f; vertices[ 9] = 1.0f;
1036  vertices[13] = 1.0f; vertices[14] = 0.0f;
1037  vertices[18] = 0.0f; vertices[19] = 0.0f;
1038  break;
1039  default:
1040  break;
1041  }
1042 
1043  num_vertices = sizeof(vertices) / (sizeof(FLOAT) * 5);
1044 
1045  buf_desc.Usage = D3D11_USAGE_DEFAULT;
1046  buf_desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
1047  buf_desc.ByteWidth = sizeof(vertices);
1048  init_data.pSysMem = vertices;
1049 
1050  hr = ID3D11Device_CreateBuffer(dda->device_hwctx->device,
1051  &buf_desc,
1052  &init_data,
1053  &mouse_vertex_buffer);
1054  if (FAILED(hr)) {
1055  av_log(avctx, AV_LOG_ERROR, "CreateBuffer failed: %lx\n", hr);
1057  goto end;
1058  }
1059 
1060  ID3D11DeviceContext_IASetVertexBuffers(devctx, 0, 1, &mouse_vertex_buffer, &stride, &offset);
1061  ID3D11DeviceContext_IASetInputLayout(devctx, dda->input_layout);
1062  ID3D11DeviceContext_IASetPrimitiveTopology(devctx, D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
1063  }
1064 
1065  ID3D11DeviceContext_VSSetShader(devctx, dda->vertex_shader, NULL, 0);
1066  ID3D11DeviceContext_VSSetConstantBuffers(devctx, 0, 1, &dda->const_buffer);
1067  ID3D11DeviceContext_PSSetSamplers(devctx, 0, 1, &dda->sampler_state);
1068  ID3D11DeviceContext_PSSetShaderResources(devctx, 0, 1, &dda->mouse_resource_view);
1069  ID3D11DeviceContext_PSSetShader(devctx, dda->pixel_shader, NULL, 0);
1070 
1071  ID3D11DeviceContext_OMSetBlendState(devctx, dda->blend_state, NULL, 0xFFFFFFFF);
1072  ID3D11DeviceContext_OMSetRenderTargets(devctx, 1, &target_view, NULL);
1073 
1074  ID3D11DeviceContext_Draw(devctx, num_vertices, 0);
1075 
1076  if (dda->mouse_xor_resource_view) {
1077  ID3D11DeviceContext_PSSetShaderResources(devctx, 0, 1, &dda->mouse_xor_resource_view);
1078  ID3D11DeviceContext_OMSetBlendState(devctx, dda->blend_state_xor, NULL, 0xFFFFFFFF);
1079 
1080  ID3D11DeviceContext_Draw(devctx, num_vertices, 0);
1081  }
1082 
1083 end:
1084  release_resource(&mouse_vertex_buffer);
1085  release_resource(&target_view);
1086 
1087  return ret;
1088 }
1089 
1091 {
1092  AVFilterContext *avctx = outlink->src;
1093  DdagrabContext *dda = avctx->priv;
1094 
1095  ID3D11Texture2D *cur_texture = NULL;
1096  D3D11_TEXTURE2D_DESC desc = { 0 };
1097  D3D11_BOX box = { 0 };
1098 
1099  int64_t time_frame = dda->time_frame;
1100  int64_t now, delay;
1101  AVFrame *frame = NULL;
1102  HRESULT hr;
1103  int ret;
1104 
1105  /* time_frame is in units of microseconds divided by the time_base.
1106  * This means that adding a clean 1M to it is the equivalent of adding
1107  * 1M*time_base microseconds to it, except it avoids all rounding error.
1108  * The only time rounding error occurs is when multiplying to calculate
1109  * the delay. So any rounding error there corrects itself over time.
1110  */
1111  time_frame += TIMER_RES64;
1112  for (;;) {
1113  now = av_gettime_relative();
1114  delay = time_frame * av_q2d(dda->time_base) - now;
1115  if (delay <= 0) {
1116  if (delay < -TIMER_RES64 * av_q2d(dda->time_base)) {
1117  time_frame += TIMER_RES64;
1118  }
1119  break;
1120  }
1121  av_usleep(delay);
1122  }
1123 
1124  if (!dda->first_pts)
1125  dda->first_pts = now;
1126  now -= dda->first_pts;
1127 
1128  if (!dda->probed_texture) {
1129  do {
1130  ret = next_frame_internal(avctx, &cur_texture, 0);
1131  } while (ret == AVERROR(EAGAIN) && !dda->dup_frames);
1132  } else {
1133  cur_texture = dda->probed_texture;
1134  dda->probed_texture = NULL;
1135  ret = 0;
1136  }
1137 
1138  if (ret == AVERROR(EAGAIN) && dda->last_frame->buf[0]) {
1139  frame = av_frame_alloc();
1140  if (!frame)
1141  return AVERROR(ENOMEM);
1142 
1143  ret = av_frame_ref(frame, dda->last_frame);
1144  if (ret < 0) {
1145  av_frame_free(&frame);
1146  return ret;
1147  }
1148 
1149  av_log(avctx, AV_LOG_DEBUG, "Duplicated output frame\n");
1150 
1151  goto frame_done;
1152  } else if (ret == AVERROR(EAGAIN)) {
1153  av_log(avctx, AV_LOG_VERBOSE, "Initial DDA AcquireNextFrame timeout!\n");
1154  return AVERROR(EAGAIN);
1155  } else if (ret < 0) {
1156  return ret;
1157  }
1158 
1159  // AcquireNextFrame sometimes has bursts of delay.
1160  // This increases accuracy of the timestamp, but might upset consumers due to more jittery framerate?
1161  now = av_gettime_relative() - dda->first_pts;
1162 
1163  ID3D11Texture2D_GetDesc(cur_texture, &desc);
1164  if (desc.Format != dda->raw_format ||
1165  (int)desc.Width != dda->raw_width ||
1166  (int)desc.Height != dda->raw_height) {
1167  av_log(avctx, AV_LOG_ERROR, "Output parameters changed!\n");
1169  goto fail;
1170  }
1171 
1172  frame = ff_get_video_buffer(outlink, dda->width, dda->height);
1173  if (!frame) {
1174  ret = AVERROR(ENOMEM);
1175  goto fail;
1176  }
1177 
1178  box.left = dda->offset_x;
1179  box.top = dda->offset_y;
1180  box.right = box.left + dda->width;
1181  box.bottom = box.top + dda->height;
1182  box.front = 0;
1183  box.back = 1;
1184 
1185  ID3D11DeviceContext_CopySubresourceRegion(
1187  (ID3D11Resource*)frame->data[0], (UINT)(intptr_t)frame->data[1],
1188  0, 0, 0,
1189  (ID3D11Resource*)cur_texture, 0,
1190  &box);
1191 
1192  release_resource(&cur_texture);
1193 
1194  hr = IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
1195  if (FAILED(hr)) {
1196  av_log(avctx, AV_LOG_ERROR, "DDA ReleaseFrame failed!\n");
1198  goto fail;
1199  }
1200 
1201  if (dda->draw_mouse) {
1202  ret = draw_mouse_pointer(avctx, frame);
1203  if (ret < 0)
1204  goto fail;
1205  }
1206 
1207  frame->sample_aspect_ratio = (AVRational){1, 1};
1208 
1209  if (desc.Format == DXGI_FORMAT_B8G8R8A8_UNORM ||
1210  desc.Format == DXGI_FORMAT_R10G10B10A2_UNORM) {
1211  // According to MSDN, all integer formats contain sRGB image data
1212  frame->color_range = AVCOL_RANGE_JPEG;
1213  frame->color_primaries = AVCOL_PRI_BT709;
1214  frame->color_trc = AVCOL_TRC_IEC61966_2_1;
1215  frame->colorspace = AVCOL_SPC_RGB;
1216  } else if(desc.Format == DXGI_FORMAT_R16G16B16A16_FLOAT) {
1217  // According to MSDN, all floating point formats contain sRGB image data with linear 1.0 gamma.
1218  frame->color_range = AVCOL_RANGE_JPEG;
1219  frame->color_primaries = AVCOL_PRI_BT709;
1220  frame->color_trc = AVCOL_TRC_LINEAR;
1221  frame->colorspace = AVCOL_SPC_RGB;
1222  } else {
1223  ret = AVERROR_BUG;
1224  goto fail;
1225  }
1226 
1228  if (ret < 0)
1229  return ret;
1230 
1231 frame_done:
1232  frame->pts = now;
1233  dda->time_frame = time_frame;
1234 
1235  return ff_filter_frame(outlink, frame);
1236 
1237 fail:
1238  if (frame)
1239  av_frame_free(&frame);
1240 
1241  if (cur_texture)
1242  IDXGIOutputDuplication_ReleaseFrame(dda->dxgi_outdupl);
1243 
1244  release_resource(&cur_texture);
1245  return ret;
1246 }
1247 
1248 static const AVFilterPad ddagrab_outputs[] = {
1249  {
1250  .name = "default",
1251  .type = AVMEDIA_TYPE_VIDEO,
1252  .request_frame = ddagrab_request_frame,
1253  .config_props = ddagrab_config_props,
1254  },
1255 };
1256 
1258  .p.name = "ddagrab",
1259  .p.description = NULL_IF_CONFIG_SMALL("Grab Windows Desktop images using Desktop Duplication API"),
1260  .p.priv_class = &ddagrab_class,
1261  .p.inputs = NULL,
1262  .p.flags = AVFILTER_FLAG_HWDEVICE,
1263  .priv_size = sizeof(DdagrabContext),
1264  .init = ddagrab_init,
1268  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1269 };
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:32
formats
formats
Definition: signature.h:47
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:117
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
DdagrabContext::raw_height
int raw_height
Definition: vsrc_ddagrab.c:83
frame_info
static int FUNC() frame_info(CodedBitstreamContext *ctx, RWContext *rw, APVRawFrameInfo *current)
Definition: cbs_apv_syntax_template.c:63
av_gettime_relative
int64_t av_gettime_relative(void)
Get the current time in microseconds since some unspecified starting point.
Definition: time.c:56
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
TIMER_RES
#define TIMER_RES
Definition: vsrc_ddagrab.c:52
DdagrabContext::force_fmt
int force_fmt
Definition: vsrc_ddagrab.c:105
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
draw_mouse_pointer
static int draw_mouse_pointer(AVFilterContext *avctx, AVFrame *frame)
Definition: vsrc_ddagrab.c:933
init_render_resources
static av_cold int init_render_resources(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:339
ConstBufferData::width
float width
Definition: vsrc_ddagrab.c:327
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
DXGI_FORMAT_B8G8R8A8_UNORM
@ DXGI_FORMAT_B8G8R8A8_UNORM
Definition: dds.c:91
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
fixup_color_mask
static int fixup_color_mask(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int width, int height, int pitch)
Definition: vsrc_ddagrab.c:568
AVCOL_TRC_LINEAR
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:670
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
Underlying C type is AVRational.
Definition: opt.h:315
DdagrabContext::frames_ctx
AVHWFramesContext * frames_ctx
Definition: vsrc_ddagrab.c:63
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
next_frame_internal
static int next_frame_internal(AVFilterContext *avctx, ID3D11Texture2D **desktop_texture, int need_frame)
Definition: vsrc_ddagrab.c:687
int64_t
long long int64_t
Definition: coverity.c:34
DdagrabContext::blend_state
ID3D11BlendState * blend_state
Definition: vsrc_ddagrab.c:93
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:226
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
DdagrabContext::device_ctx
AVHWDeviceContext * device_ctx
Definition: vsrc_ddagrab.c:59
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:767
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
AVOption
AVOption.
Definition: opt.h:429
DdagrabContext::frames_hwctx
AVD3D11VAFramesContext * frames_hwctx
Definition: vsrc_ddagrab.c:64
ddagrab_request_frame
static int ddagrab_request_frame(AVFilterLink *outlink)
Definition: vsrc_ddagrab.c:1090
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:691
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:356
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
DdagrabContext::width
int width
Definition: vsrc_ddagrab.c:99
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
DdagrabContext::first_pts
int64_t first_pts
Definition: vsrc_ddagrab.c:79
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:220
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
video.h
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:604
DdagrabContext::last_frame
AVFrame * last_frame
Definition: vsrc_ddagrab.c:68
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(ddagrab)
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
vsrc_ddagrab_shaders.h
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:675
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:289
fail
#define fail()
Definition: checkasm.h:206
DdagrabContext::pixel_shader
ID3D11PixelShader * pixel_shader
Definition: vsrc_ddagrab.c:90
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:380
DdagrabContext::framerate
AVRational framerate
Definition: vsrc_ddagrab.c:98
DdagrabContext::mouse_xor_resource_view
ID3D11ShaderResourceView * mouse_xor_resource_view
Definition: vsrc_ddagrab.c:74
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
avassert.h
DdagrabContext::device_hwctx
AVD3D11VADeviceContext * device_hwctx
Definition: vsrc_ddagrab.c:60
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:236
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
ddagrab_options
static const AVOption ddagrab_options[]
Definition: vsrc_ddagrab.c:111
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:106
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
FFFilter
Definition: filters.h:266
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
DdagrabContext::raw_width
int raw_width
Definition: vsrc_ddagrab.c:82
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
filters.h
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
create_d3d11_pointer_tex
static int create_d3d11_pointer_tex(AVFilterContext *avctx, uint8_t *buf, DXGI_OUTDUPL_POINTER_SHAPE_INFO *shape_info, ID3D11Texture2D **out_tex, ID3D11ShaderResourceView **res_view)
Definition: vsrc_ddagrab.c:454
DdagrabContext::dxgi_outdupl
IDXGIOutputDuplication * dxgi_outdupl
Definition: vsrc_ddagrab.c:67
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
vertex_shader_input_layout
static const D3D11_INPUT_ELEMENT_DESC vertex_shader_input_layout[]
Definition: vsrc_ddagrab.c:333
ConstBufferData
Definition: vsrc_ddagrab.c:325
DdagrabContext::mouse_x
int mouse_x
Definition: vsrc_ddagrab.c:70
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:264
if
if(ret)
Definition: filter_design.txt:179
ConstBufferData::height
float height
Definition: vsrc_ddagrab.c:328
DdagrabContext::vertex_shader
ID3D11VertexShader * vertex_shader
Definition: vsrc_ddagrab.c:88
framerate
float framerate
Definition: av1_levels.c:29
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
NULL
#define NULL
Definition: coverity.c:32
ddagrab_config_props
static int ddagrab_config_props(AVFilterLink *outlink)
Definition: vsrc_ddagrab.c:857
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
DdagrabContext::mouse_texture
ID3D11Texture2D * mouse_texture
Definition: vsrc_ddagrab.c:71
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
DdagrabContext::const_buffer
ID3D11Buffer * const_buffer
Definition: vsrc_ddagrab.c:91
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
Underlying C type is two consecutive integers.
Definition: opt.h:303
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:638
time.h
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:198
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:207
ddagrab_uninit
static av_cold void ddagrab_uninit(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:146
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:614
DdagrabContext::mouse_y
int mouse_y
Definition: vsrc_ddagrab.c:70
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
height
#define height
Definition: dsp.h:89
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:278
release_resource
static void release_resource(void *resource)
Definition: vsrc_ddagrab.c:137
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:188
DdagrabContext::out_fmt
int out_fmt
Definition: vsrc_ddagrab.c:103
size
int size
Definition: twinvq_data.h:10344
AVERROR_OUTPUT_CHANGED
#define AVERROR_OUTPUT_CHANGED
Output changed between calls. Reconfiguration is required. (can be OR-ed with AVERROR_INPUT_CHANGED)
Definition: error.h:76
TIMER_RES64
#define TIMER_RES64
Definition: vsrc_ddagrab.c:53
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
DdagrabContext::time_base
AVRational time_base
Definition: vsrc_ddagrab.c:76
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
FLAGS
#define FLAGS
Definition: vsrc_ddagrab.c:110
DdagrabContext::raw_format
DXGI_FORMAT raw_format
Definition: vsrc_ddagrab.c:81
buffer_data
Definition: avio_read_callback.c:37
DXGI_FORMAT_R16G16B16A16_FLOAT
@ DXGI_FORMAT_R16G16B16A16_FLOAT
Definition: dds.c:62
uninit
static void uninit(AVBSFContext *ctx)
Definition: pcm_rechunk.c:68
DdagrabContext::mouse_resource_view
ID3D11ShaderResourceView * mouse_resource_view
Definition: vsrc_ddagrab.c:72
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:57
DdagrabContext::draw_mouse
int draw_mouse
Definition: vsrc_ddagrab.c:97
av_inv_q
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
DdagrabContext::allow_fallback
int allow_fallback
Definition: vsrc_ddagrab.c:104
FLOAT
float FLOAT
Definition: faandct.c:33
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
DdagrabContext::offset_y
int offset_y
Definition: vsrc_ddagrab.c:102
stride
#define stride
Definition: h264pred_template.c:536
DdagrabContext::time_timeout
int64_t time_timeout
Definition: vsrc_ddagrab.c:78
convert_mono_buffer
static int convert_mono_buffer(uint8_t *input, uint8_t **rgba_out, uint8_t **xor_out, int *_width, int *_height, int *_pitch)
Definition: vsrc_ddagrab.c:507
DdagrabContext::output_idx
int output_idx
Definition: vsrc_ddagrab.c:96
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:153
init_dxgi_dda
static av_cold int init_dxgi_dda(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:172
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
pos
unsigned int pos
Definition: spdifenc.c:414
DdagrabContext::input_layout
ID3D11InputLayout * input_layout
Definition: vsrc_ddagrab.c:89
DdagrabContext::dup_frames
int dup_frames
Definition: vsrc_ddagrab.c:106
av_frame_replace
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
Definition: frame.c:376
DdagrabContext::frames_ref
AVBufferRef * frames_ref
Definition: vsrc_ddagrab.c:62
DdagrabContext::time_frame
int64_t time_frame
Definition: vsrc_ddagrab.c:77
DdagrabContext::height
int height
Definition: vsrc_ddagrab.c:100
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
ff_vsrc_ddagrab
const FFFilter ff_vsrc_ddagrab
Definition: vsrc_ddagrab.c:1257
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:190
desc
const char * desc
Definition: libsvtav1.c:79
pixel_shader_bytes
static const uint8_t pixel_shader_bytes[]
Definition: vsrc_ddagrab_shaders.h:101
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
vertex_shader_bytes
static const uint8_t vertex_shader_bytes[]
Definition: vsrc_ddagrab_shaders.h:63
init_hwframes_ctx
static av_cold int init_hwframes_ctx(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:807
DdagrabContext::buffer_texture
ID3D11Texture2D * buffer_texture
Definition: vsrc_ddagrab.c:86
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
update_mouse_pointer
static int update_mouse_pointer(AVFilterContext *avctx, DXGI_OUTDUPL_FRAME_INFO *frame_info)
Definition: vsrc_ddagrab.c:598
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:624
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
DdagrabContext::sampler_state
ID3D11SamplerState * sampler_state
Definition: vsrc_ddagrab.c:92
ddagrab_outputs
static const AVFilterPad ddagrab_outputs[]
Definition: vsrc_ddagrab.c:1248
DdagrabContext::offset_x
int offset_x
Definition: vsrc_ddagrab.c:101
DdagrabContext::probed_texture
ID3D11Texture2D * probed_texture
Definition: vsrc_ddagrab.c:85
ddagrab_init
static av_cold int ddagrab_init(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:440
DdagrabContext::output_desc
DXGI_OUTPUT_DESC output_desc
Definition: vsrc_ddagrab.c:66
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
DdagrabContext::device_ref
AVBufferRef * device_ref
Definition: vsrc_ddagrab.c:58
ConstBufferData::padding
uint64_t padding
Definition: vsrc_ddagrab.c:330
AVD3D11VADeviceContext::device_context
ID3D11DeviceContext * device_context
If unset, this will be set from the device field on init.
Definition: hwcontext_d3d11va.h:64
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
h
h
Definition: vp9dsp_template.c:2070
DdagrabContext
Definition: vsrc_ddagrab.c:55
avstring.h
width
#define width
Definition: dsp.h:89
DdagrabContext::mouse_xor_texture
ID3D11Texture2D * mouse_xor_texture
Definition: vsrc_ddagrab.c:73
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:253
hwcontext_d3d11va.h
OFFSET
#define OFFSET(x)
Definition: vsrc_ddagrab.c:109
probe_output_format
static int probe_output_format(AVFilterContext *avctx)
Definition: vsrc_ddagrab.c:779
w32dlfcn.h
DdagrabContext::blend_state_xor
ID3D11BlendState * blend_state_xor
Definition: vsrc_ddagrab.c:94