FFmpeg
avfoundation.m
Go to the documentation of this file.
1 /*
2  * AVFoundation input device
3  * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * AVFoundation input device
25  * @author Thilo Borgmann <thilo.borgmann@mail.de>
26  */
27 
28 #import <AVFoundation/AVFoundation.h>
29 #include <pthread.h>
30 
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/opt.h"
34 #include "libavutil/avstring.h"
35 #include "libavformat/internal.h"
36 #include "libavutil/internal.h"
37 #include "libavutil/parseutils.h"
38 #include "libavutil/time.h"
39 #include "libavutil/imgutils.h"
40 #include "avdevice.h"
41 
42 static const int avf_time_base = 1000000;
43 
44 static const AVRational avf_time_base_q = {
45  .num = 1,
46  .den = avf_time_base
47 };
48 
51  OSType avf_id;
52 };
53 
54 static const struct AVFPixelFormatSpec avf_pixel_formats[] = {
55  { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
56  { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
57  { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
58  { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
59  { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
60  { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
61  { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
62  { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB },
63  { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA },
64  { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR },
65  { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA },
66  { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
67  { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
68  { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
69  { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
70  { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
71  { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
72  { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
73  { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
74  { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
75  { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
76  { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
77 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
78  { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
79 #endif
80  { AV_PIX_FMT_NONE, 0 }
81 };
82 
83 typedef struct
84 {
85  AVClass* class;
86 
92 
94  int width, height;
95 
102 
108 
109  char *url;
112 
114 
118  int audio_be;
122 
125 
126  enum AVPixelFormat pixel_format;
127 
128  AVCaptureSession *capture_session;
129  AVCaptureVideoDataOutput *video_output;
130  AVCaptureAudioDataOutput *audio_output;
131  CMSampleBufferRef current_frame;
132  CMSampleBufferRef current_audio_frame;
133 
134  AVCaptureDevice *observed_device;
135 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
136  AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
137 #endif
139 } AVFContext;
140 
142 {
143  pthread_mutex_lock(&ctx->frame_lock);
144 }
145 
147 {
148  pthread_mutex_unlock(&ctx->frame_lock);
149 }
150 
151 /** FrameReciever class - delegate for AVCaptureSession
152  */
153 @interface AVFFrameReceiver : NSObject
154 {
156 }
157 
158 - (id)initWithContext:(AVFContext*)context;
159 
160 - (void) captureOutput:(AVCaptureOutput *)captureOutput
161  didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
162  fromConnection:(AVCaptureConnection *)connection;
163 
164 @end
165 
166 @implementation AVFFrameReceiver
167 
168 - (id)initWithContext:(AVFContext*)context
169 {
170  if (self = [super init]) {
171  _context = context;
172 
173  // start observing if a device is set for it
174 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
175  if (_context->observed_device) {
176  NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
177  NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew;
178 
179  [_context->observed_device addObserver: self
180  forKeyPath: keyPath
181  options: options
182  context: _context];
183  }
184 #endif
185  }
186  return self;
187 }
188 
189 - (void)dealloc {
190  // stop observing if a device is set for it
191 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
192  if (_context->observed_device) {
193  NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
194  [_context->observed_device removeObserver: self forKeyPath: keyPath];
195  }
196 #endif
197  [super dealloc];
198 }
199 
200 - (void)observeValueForKeyPath:(NSString *)keyPath
201  ofObject:(id)object
202  change:(NSDictionary *)change
203  context:(void *)context {
204  if (context == _context) {
205 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
206  AVCaptureDeviceTransportControlsPlaybackMode mode =
207  [change[NSKeyValueChangeNewKey] integerValue];
208 
209  if (mode != _context->observed_mode) {
210  if (mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
211  _context->observed_quit = 1;
212  }
213  _context->observed_mode = mode;
214  }
215 #endif
216  } else {
217  [super observeValueForKeyPath: keyPath
218  ofObject: object
219  change: change
220  context: context];
221  }
222 }
223 
224 - (void) captureOutput:(AVCaptureOutput *)captureOutput
225  didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
226  fromConnection:(AVCaptureConnection *)connection
227 {
229 
230  if (_context->current_frame != nil) {
231  CFRelease(_context->current_frame);
232  }
233 
234  _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
235 
237 
239 }
240 
241 @end
242 
243 /** AudioReciever class - delegate for AVCaptureSession
244  */
245 @interface AVFAudioReceiver : NSObject
246 {
248 }
249 
250 - (id)initWithContext:(AVFContext*)context;
251 
252 - (void) captureOutput:(AVCaptureOutput *)captureOutput
253  didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
254  fromConnection:(AVCaptureConnection *)connection;
255 
256 @end
257 
258 @implementation AVFAudioReceiver
259 
260 - (id)initWithContext:(AVFContext*)context
261 {
262  if (self = [super init]) {
263  _context = context;
264  }
265  return self;
266 }
267 
268 - (void) captureOutput:(AVCaptureOutput *)captureOutput
269  didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
270  fromConnection:(AVCaptureConnection *)connection
271 {
273 
274  if (_context->current_audio_frame != nil) {
275  CFRelease(_context->current_audio_frame);
276  }
277 
278  _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
279 
281 
283 }
284 
285 @end
286 
288 {
289  [ctx->capture_session stopRunning];
290 
291  [ctx->capture_session release];
292  [ctx->video_output release];
293  [ctx->audio_output release];
294  [ctx->avf_delegate release];
295  [ctx->avf_audio_delegate release];
296 
297  ctx->capture_session = NULL;
298  ctx->video_output = NULL;
299  ctx->audio_output = NULL;
300  ctx->avf_delegate = NULL;
301  ctx->avf_audio_delegate = NULL;
302 
303  av_freep(&ctx->url);
304  av_freep(&ctx->audio_buffer);
305 
306  pthread_mutex_destroy(&ctx->frame_lock);
307 
308  if (ctx->current_frame) {
309  CFRelease(ctx->current_frame);
310  }
311 }
312 
314 {
315  AVFContext *ctx = (AVFContext*)s->priv_data;
316  char *save;
317 
318  ctx->url = av_strdup(s->url);
319 
320  if (!ctx->url)
321  return AVERROR(ENOMEM);
322  if (ctx->url[0] != ':') {
323  ctx->video_filename = av_strtok(ctx->url, ":", &save);
324  ctx->audio_filename = av_strtok(NULL, ":", &save);
325  } else {
326  ctx->audio_filename = av_strtok(ctx->url, ":", &save);
327  }
328  return 0;
329 }
330 
331 /**
332  * Configure the video device.
333  *
334  * Configure the video device using a run-time approach to access properties
335  * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
336  * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
337  *
338  * The NSUndefinedKeyException must be handled by the caller of this function.
339  *
340  */
341 static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
342 {
343  AVFContext *ctx = (AVFContext*)s->priv_data;
344 
345  double framerate = av_q2d(ctx->framerate);
346  NSObject *range = nil;
347  NSObject *format = nil;
348  NSObject *selected_range = nil;
349  NSObject *selected_format = nil;
350 
351  // try to configure format by formats list
352  // might raise an exception if no format list is given
353  // (then fallback to default, no configuration)
354  @try {
355  for (format in [video_device valueForKey:@"formats"]) {
356  CMFormatDescriptionRef formatDescription;
357  CMVideoDimensions dimensions;
358 
359  formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
360  dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
361 
362  if ((ctx->width == 0 && ctx->height == 0) ||
363  (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
364 
365  selected_format = format;
366 
367  for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
368  double max_framerate;
369 
370  [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
371  if (fabs (framerate - max_framerate) < 0.01) {
372  selected_range = range;
373  break;
374  }
375  }
376  }
377  }
378 
379  if (!selected_format) {
380  av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device.\n",
381  ctx->width, ctx->height);
382  goto unsupported_format;
383  }
384 
385  if (!selected_range) {
386  av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device.\n",
387  framerate);
388  if (ctx->video_is_muxed) {
389  av_log(s, AV_LOG_ERROR, "Falling back to default.\n");
390  } else {
391  goto unsupported_format;
392  }
393  }
394 
395  if ([video_device lockForConfiguration:NULL] == YES) {
396  if (selected_format) {
397  [video_device setValue:selected_format forKey:@"activeFormat"];
398  }
399  if (selected_range) {
400  NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
401  [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
402  [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
403  }
404  } else {
405  av_log(s, AV_LOG_ERROR, "Could not lock device for configuration.\n");
406  return AVERROR(EINVAL);
407  }
408  } @catch(NSException *e) {
409  av_log(ctx, AV_LOG_WARNING, "Configuration of video device failed, falling back to default.\n");
410  }
411 
412  return 0;
413 
414 unsupported_format:
415 
416  av_log(s, AV_LOG_ERROR, "Supported modes:\n");
417  for (format in [video_device valueForKey:@"formats"]) {
418  CMFormatDescriptionRef formatDescription;
419  CMVideoDimensions dimensions;
420 
421  formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
422  dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
423 
424  for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
425  double min_framerate;
426  double max_framerate;
427 
428  [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
429  [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
430  av_log(s, AV_LOG_ERROR, " %dx%d@[%f %f]fps\n",
431  dimensions.width, dimensions.height,
432  min_framerate, max_framerate);
433  }
434  }
435  return AVERROR(EINVAL);
436 }
437 
438 static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
439 {
440  AVFContext *ctx = (AVFContext*)s->priv_data;
441  int ret;
442  NSError *error = nil;
443  AVCaptureInput* capture_input = nil;
444  struct AVFPixelFormatSpec pxl_fmt_spec;
445  NSNumber *pixel_format;
446  NSDictionary *capture_dict;
447  dispatch_queue_t queue;
448 
449  if (ctx->video_device_index < ctx->num_video_devices) {
450  capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease];
451  } else {
452  capture_input = (AVCaptureInput*) video_device;
453  }
454 
455  if (!capture_input) {
456  av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
457  [[error localizedDescription] UTF8String]);
458  return 1;
459  }
460 
461  if ([ctx->capture_session canAddInput:capture_input]) {
462  [ctx->capture_session addInput:capture_input];
463  } else {
464  av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n");
465  return 1;
466  }
467 
468  // Attaching output
469  ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
470 
471  if (!ctx->video_output) {
472  av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
473  return 1;
474  }
475 
476  // Configure device framerate and video size
477  @try {
478  if ((ret = configure_video_device(s, video_device)) < 0) {
479  return ret;
480  }
481  } @catch (NSException *exception) {
482  if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
483  av_log (s, AV_LOG_ERROR, "An error occurred: %s", [exception.reason UTF8String]);
484  return AVERROR_EXTERNAL;
485  }
486  }
487 
488  // select pixel format
489  pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
490 
491  for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
492  if (ctx->pixel_format == avf_pixel_formats[i].ff_id) {
493  pxl_fmt_spec = avf_pixel_formats[i];
494  break;
495  }
496  }
497 
498  // check if selected pixel format is supported by AVFoundation
499  if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
500  av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n",
501  av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
502  return 1;
503  }
504 
505  // check if the pixel format is available for this device
506  if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
507  av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n",
508  av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
509 
510  pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE;
511 
512  av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n");
513  for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) {
514  struct AVFPixelFormatSpec pxl_fmt_dummy;
515  pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE;
516  for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) {
517  if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) {
518  pxl_fmt_dummy = avf_pixel_formats[i];
519  break;
520  }
521  }
522 
523  if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) {
524  av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id));
525 
526  // select first supported pixel format instead of user selected (or default) pixel format
527  if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
528  pxl_fmt_spec = pxl_fmt_dummy;
529  }
530  }
531  }
532 
533  // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
534  if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) {
535  return 1;
536  } else {
537  av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n",
538  av_get_pix_fmt_name(pxl_fmt_spec.ff_id));
539  }
540  }
541 
542  // set videoSettings to an empty dict for receiving raw data of muxed devices
543  if (ctx->capture_raw_data) {
544  ctx->pixel_format = pxl_fmt_spec.ff_id;
545  ctx->video_output.videoSettings = @{ };
546  } else {
547  ctx->pixel_format = pxl_fmt_spec.ff_id;
548  pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
549  capture_dict = [NSDictionary dictionaryWithObject:pixel_format
550  forKey:(id)kCVPixelBufferPixelFormatTypeKey];
551 
552  [ctx->video_output setVideoSettings:capture_dict];
553  }
554  [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
555 
556 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
557  // check for transport control support and set observer device if supported
558  if (!ctx->video_is_screen) {
559  int trans_ctrl = [video_device transportControlsSupported];
560  AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
561 
562  if (trans_ctrl) {
563  ctx->observed_mode = trans_mode;
564  ctx->observed_device = video_device;
565  }
566  }
567 #endif
568 
569  ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
570 
571  queue = dispatch_queue_create("avf_queue", NULL);
572  [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
573  dispatch_release(queue);
574 
575  if ([ctx->capture_session canAddOutput:ctx->video_output]) {
576  [ctx->capture_session addOutput:ctx->video_output];
577  } else {
578  av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
579  return 1;
580  }
581 
582  return 0;
583 }
584 
585 static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
586 {
587  AVFContext *ctx = (AVFContext*)s->priv_data;
588  NSError *error = nil;
589  AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease];
590  dispatch_queue_t queue;
591 
592  if (!audio_dev_input) {
593  av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n",
594  [[error localizedDescription] UTF8String]);
595  return 1;
596  }
597 
598  if ([ctx->capture_session canAddInput:audio_dev_input]) {
599  [ctx->capture_session addInput:audio_dev_input];
600  } else {
601  av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n");
602  return 1;
603  }
604 
605  // Attaching output
606  ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
607 
608  if (!ctx->audio_output) {
609  av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n");
610  return 1;
611  }
612 
613  ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx];
614 
615  queue = dispatch_queue_create("avf_audio_queue", NULL);
616  [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
617  dispatch_release(queue);
618 
619  if ([ctx->capture_session canAddOutput:ctx->audio_output]) {
620  [ctx->capture_session addOutput:ctx->audio_output];
621  } else {
622  av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n");
623  return 1;
624  }
625 
626  return 0;
627 }
628 
630 {
631  AVFContext *ctx = (AVFContext*)s->priv_data;
632  CVImageBufferRef image_buffer;
633  CMBlockBufferRef block_buffer;
634  CGSize image_buffer_size;
635  AVStream* stream = avformat_new_stream(s, NULL);
636 
637  if (!stream) {
638  return 1;
639  }
640 
641  // Take stream info from the first frame.
642  while (ctx->frames_captured < 1) {
643  CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
644  }
645 
646  lock_frames(ctx);
647 
648  ctx->video_stream_index = stream->index;
649 
650  avpriv_set_pts_info(stream, 64, 1, avf_time_base);
651 
652  image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
653  block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
654 
655  if (image_buffer) {
656  image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
657 
658  stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
659  stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
660  stream->codecpar->width = (int)image_buffer_size.width;
661  stream->codecpar->height = (int)image_buffer_size.height;
662  stream->codecpar->format = ctx->pixel_format;
663  } else {
664  stream->codecpar->codec_id = AV_CODEC_ID_DVVIDEO;
665  stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
666  stream->codecpar->format = ctx->pixel_format;
667  }
668 
669  CFRelease(ctx->current_frame);
670  ctx->current_frame = nil;
671 
673 
674  return 0;
675 }
676 
678 {
679  AVFContext *ctx = (AVFContext*)s->priv_data;
680  CMFormatDescriptionRef format_desc;
681  AVStream* stream = avformat_new_stream(s, NULL);
682 
683  if (!stream) {
684  return 1;
685  }
686 
687  // Take stream info from the first frame.
688  while (ctx->audio_frames_captured < 1) {
689  CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
690  }
691 
692  lock_frames(ctx);
693 
694  ctx->audio_stream_index = stream->index;
695 
696  avpriv_set_pts_info(stream, 64, 1, avf_time_base);
697 
698  format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame);
699  const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
700 
701  if (!basic_desc) {
703  av_log(s, AV_LOG_ERROR, "audio format not available\n");
704  return 1;
705  }
706 
707  stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
708  stream->codecpar->sample_rate = basic_desc->mSampleRate;
709  av_channel_layout_default(&stream->codecpar->ch_layout, basic_desc->mChannelsPerFrame);
710 
711  ctx->audio_channels = basic_desc->mChannelsPerFrame;
712  ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
713  ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
714  ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
715  ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
716  ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
717  ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
718 
719  if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
720  ctx->audio_float &&
721  ctx->audio_bits_per_sample == 32 &&
722  ctx->audio_packed) {
723  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE;
724  } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
725  ctx->audio_signed_integer &&
726  ctx->audio_bits_per_sample == 16 &&
727  ctx->audio_packed) {
728  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S16BE : AV_CODEC_ID_PCM_S16LE;
729  } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
730  ctx->audio_signed_integer &&
731  ctx->audio_bits_per_sample == 24 &&
732  ctx->audio_packed) {
733  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S24BE : AV_CODEC_ID_PCM_S24LE;
734  } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
735  ctx->audio_signed_integer &&
736  ctx->audio_bits_per_sample == 32 &&
737  ctx->audio_packed) {
738  stream->codecpar->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_S32BE : AV_CODEC_ID_PCM_S32LE;
739  } else {
741  av_log(s, AV_LOG_ERROR, "audio format is not supported\n");
742  return 1;
743  }
744 
745  if (ctx->audio_non_interleaved) {
746  CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
747  ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
748  ctx->audio_buffer = av_malloc(ctx->audio_buffer_size);
749  if (!ctx->audio_buffer) {
751  av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n");
752  return 1;
753  }
754  }
755 
756  CFRelease(ctx->current_audio_frame);
757  ctx->current_audio_frame = nil;
758 
760 
761  return 0;
762 }
763 
764 static NSArray* getDevicesWithMediaType(AVMediaType mediaType) {
765 #if ((TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000) || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101500))
766  NSMutableArray *deviceTypes = nil;
767  if (mediaType == AVMediaTypeVideo) {
768  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]];
769  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000)
770  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualCamera];
771  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
772  #endif
773  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110100)
774  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
775  #endif
776  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 130000)
777  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
778  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
779  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
780  #endif
781  #if (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 130000)
782  [deviceTypes addObject: AVCaptureDeviceTypeDeskViewCamera];
783  #endif
784  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 150400)
785  [deviceTypes addObject: AVCaptureDeviceTypeBuiltInLiDARDepthCamera];
786  #endif
787  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
788  [deviceTypes addObject: AVCaptureDeviceTypeContinuityCamera];
789  #endif
790  } else if (mediaType == AVMediaTypeAudio) {
791  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
792  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeMicrophone]];
793  #else
794  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInMicrophone]];
795  #endif
796  } else if (mediaType == AVMediaTypeMuxed) {
797  #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
798  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternal]];
799  #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
800  deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternalUnknown]];
801  #else
802  return nil;
803  #endif
804  } else {
805  return nil;
806  }
807 
808  AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession =
809  [AVCaptureDeviceDiscoverySession
810  discoverySessionWithDeviceTypes:deviceTypes
811  mediaType:mediaType
812  position:AVCaptureDevicePositionUnspecified];
813  return [captureDeviceDiscoverySession devices];
814 #else
815  return [AVCaptureDevice devicesWithMediaType:mediaType];
816 #endif
817 }
818 
820 {
821  int ret = 0;
822  NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
823  uint32_t num_screens = 0;
824  AVFContext *ctx = (AVFContext*)s->priv_data;
825  AVCaptureDevice *video_device = nil;
826  AVCaptureDevice *audio_device = nil;
827  // Find capture device
828  NSArray *devices = getDevicesWithMediaType(AVMediaTypeVideo);
829  NSArray *devices_muxed = getDevicesWithMediaType(AVMediaTypeMuxed);
830 
831  ctx->num_video_devices = [devices count] + [devices_muxed count];
832 
833  pthread_mutex_init(&ctx->frame_lock, NULL);
834 
835 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
836  CGGetActiveDisplayList(0, NULL, &num_screens);
837 #endif
838 
839  // List devices if requested
840  if (ctx->list_devices) {
841  int index = 0;
842  av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n");
843  for (AVCaptureDevice *device in devices) {
844  const char *name = [[device localizedName] UTF8String];
845  index = [devices indexOfObject:device];
846  av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
847  }
848  for (AVCaptureDevice *device in devices_muxed) {
849  const char *name = [[device localizedName] UTF8String];
850  index = [devices count] + [devices_muxed indexOfObject:device];
851  av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
852  }
853 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
854  if (num_screens > 0) {
855  CGDirectDisplayID screens[num_screens];
856  CGGetActiveDisplayList(num_screens, screens, &num_screens);
857  for (int i = 0; i < num_screens; i++) {
858  av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", ctx->num_video_devices + i, i);
859  }
860  }
861 #endif
862 
863  av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n");
864  devices = getDevicesWithMediaType(AVMediaTypeAudio);
865  for (AVCaptureDevice *device in devices) {
866  const char *name = [[device localizedName] UTF8String];
867  int index = [devices indexOfObject:device];
868  av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
869  }
870  goto fail;
871  }
872 
873  // parse input filename for video and audio device
875  if (ret)
876  goto fail;
877 
878  // check for device index given in filename
879  if (ctx->video_device_index == -1 && ctx->video_filename) {
880  sscanf(ctx->video_filename, "%d", &ctx->video_device_index);
881  }
882  if (ctx->audio_device_index == -1 && ctx->audio_filename) {
883  sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index);
884  }
885 
886  if (ctx->video_device_index >= 0) {
887  if (ctx->video_device_index < ctx->num_video_devices) {
888  if (ctx->video_device_index < [devices count]) {
889  video_device = [devices objectAtIndex:ctx->video_device_index];
890  } else {
891  video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
892  ctx->video_is_muxed = 1;
893  }
894  } else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
895 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
896  CGDirectDisplayID screens[num_screens];
897  CGGetActiveDisplayList(num_screens, screens, &num_screens);
898  AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->num_video_devices]] autorelease];
899 
900  if (ctx->framerate.num > 0) {
901  capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
902  }
903 
904 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
905  if (ctx->capture_cursor) {
906  capture_screen_input.capturesCursor = YES;
907  } else {
908  capture_screen_input.capturesCursor = NO;
909  }
910 #endif
911 
912  if (ctx->capture_mouse_clicks) {
913  capture_screen_input.capturesMouseClicks = YES;
914  } else {
915  capture_screen_input.capturesMouseClicks = NO;
916  }
917 
918  video_device = (AVCaptureDevice*) capture_screen_input;
919  ctx->video_is_screen = 1;
920 #endif
921  } else {
922  av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
923  goto fail;
924  }
925  } else if (ctx->video_filename &&
926  strncmp(ctx->video_filename, "none", 4)) {
927  if (!strncmp(ctx->video_filename, "default", 7)) {
928  video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
929  } else {
930  // looking for video inputs
931  for (AVCaptureDevice *device in devices) {
932  if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
933  video_device = device;
934  break;
935  }
936  }
937  // looking for muxed inputs
938  for (AVCaptureDevice *device in devices_muxed) {
939  if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
940  video_device = device;
941  ctx->video_is_muxed = 1;
942  break;
943  }
944  }
945 
946 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
947  // looking for screen inputs
948  if (!video_device) {
949  int idx;
950  if(sscanf(ctx->video_filename, "Capture screen %d", &idx) && idx < num_screens) {
951  CGDirectDisplayID screens[num_screens];
952  CGGetActiveDisplayList(num_screens, screens, &num_screens);
953  AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
954  video_device = (AVCaptureDevice*) capture_screen_input;
955  ctx->video_device_index = ctx->num_video_devices + idx;
956  ctx->video_is_screen = 1;
957 
958  if (ctx->framerate.num > 0) {
959  capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
960  }
961 
962 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
963  if (ctx->capture_cursor) {
964  capture_screen_input.capturesCursor = YES;
965  } else {
966  capture_screen_input.capturesCursor = NO;
967  }
968 #endif
969 
970  if (ctx->capture_mouse_clicks) {
971  capture_screen_input.capturesMouseClicks = YES;
972  } else {
973  capture_screen_input.capturesMouseClicks = NO;
974  }
975  }
976  }
977 #endif
978  }
979 
980  if (!video_device) {
981  av_log(ctx, AV_LOG_ERROR, "Video device not found\n");
982  goto fail;
983  }
984  }
985 
986  // get audio device
987  if (ctx->audio_device_index >= 0) {
988  NSArray *devices = getDevicesWithMediaType(AVMediaTypeAudio);
989 
990  if (ctx->audio_device_index >= [devices count]) {
991  av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n");
992  goto fail;
993  }
994 
995  audio_device = [devices objectAtIndex:ctx->audio_device_index];
996  } else if (ctx->audio_filename &&
997  strncmp(ctx->audio_filename, "none", 4)) {
998  if (!strncmp(ctx->audio_filename, "default", 7)) {
999  audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
1000  } else {
1001  NSArray *devices = getDevicesWithMediaType(AVMediaTypeAudio);
1002 
1003  for (AVCaptureDevice *device in devices) {
1004  if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) {
1005  audio_device = device;
1006  break;
1007  }
1008  }
1009  }
1010 
1011  if (!audio_device) {
1012  av_log(ctx, AV_LOG_ERROR, "Audio device not found\n");
1013  goto fail;
1014  }
1015  }
1016 
1017  // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
1018  if (!video_device && !audio_device) {
1019  av_log(s, AV_LOG_ERROR, "No AV capture device found\n");
1020  goto fail;
1021  }
1022 
1023  if (video_device) {
1024  if (ctx->video_device_index < ctx->num_video_devices) {
1025  av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]);
1026  } else {
1027  av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device description] UTF8String]);
1028  }
1029  }
1030  if (audio_device) {
1031  av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
1032  }
1033 
1034  // Initialize capture session
1035  ctx->capture_session = [[AVCaptureSession alloc] init];
1036 
1037  if (video_device && add_video_device(s, video_device)) {
1038  goto fail;
1039  }
1040  if (audio_device && add_audio_device(s, audio_device)) {
1041  }
1042 
1043  [ctx->capture_session startRunning];
1044 
1045  /* Unlock device configuration only after the session is started so it
1046  * does not reset the capture formats */
1047  if (!ctx->video_is_screen) {
1048  [video_device unlockForConfiguration];
1049  }
1050 
1051  if (video_device && get_video_config(s)) {
1052  goto fail;
1053  }
1054 
1055  // set audio stream
1056  if (audio_device && get_audio_config(s)) {
1057  goto fail;
1058  }
1059 
1060  [pool release];
1061  return 0;
1062 
1063 fail:
1064  [pool release];
1066  if (ret)
1067  return ret;
1068  return AVERROR(EIO);
1069 }
1070 
1072  CVPixelBufferRef image_buffer,
1073  AVPacket *pkt)
1074 {
1075  AVFContext *ctx = s->priv_data;
1076  int src_linesize[4];
1077  const uint8_t *src_data[4];
1078  int width = CVPixelBufferGetWidth(image_buffer);
1079  int height = CVPixelBufferGetHeight(image_buffer);
1080  int status;
1081 
1082  memset(src_linesize, 0, sizeof(src_linesize));
1083  memset(src_data, 0, sizeof(src_data));
1084 
1085  status = CVPixelBufferLockBaseAddress(image_buffer, 0);
1086  if (status != kCVReturnSuccess) {
1087  av_log(s, AV_LOG_ERROR, "Could not lock base address: %d (%dx%d)\n", status, width, height);
1088  return AVERROR_EXTERNAL;
1089  }
1090 
1091  if (CVPixelBufferIsPlanar(image_buffer)) {
1092  size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
1093  int i;
1094  for(i = 0; i < plane_count; i++){
1095  src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer, i);
1096  src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer, i);
1097  }
1098  } else {
1099  src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
1100  src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
1101  }
1102 
1104  src_data, src_linesize,
1105  ctx->pixel_format, width, height, 1);
1106 
1107 
1108 
1109  CVPixelBufferUnlockBaseAddress(image_buffer, 0);
1110 
1111  return status;
1112 }
1113 
1115 {
1116  AVFContext* ctx = (AVFContext*)s->priv_data;
1117 
1118  do {
1119  CVImageBufferRef image_buffer;
1120  CMBlockBufferRef block_buffer;
1121  lock_frames(ctx);
1122 
1123  if (ctx->current_frame != nil) {
1124  int status;
1125  int length = 0;
1126 
1127  image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
1128  block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
1129 
1130  if (image_buffer != nil) {
1131  length = (int)CVPixelBufferGetDataSize(image_buffer);
1132  } else if (block_buffer != nil) {
1133  length = (int)CMBlockBufferGetDataLength(block_buffer);
1134  } else {
1135  unlock_frames(ctx);
1136  return AVERROR(EINVAL);
1137  }
1138 
1139  if (av_new_packet(pkt, length) < 0) {
1140  unlock_frames(ctx);
1141  return AVERROR(EIO);
1142  }
1143 
1144  CMItemCount count;
1145  CMSampleTimingInfo timing_info;
1146 
1147  if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_frame, 1, &timing_info, &count) == noErr) {
1148  AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
1149  pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
1150  }
1151 
1152  pkt->stream_index = ctx->video_stream_index;
1154 
1155  if (image_buffer) {
1156  status = copy_cvpixelbuffer(s, image_buffer, pkt);
1157  } else {
1158  status = 0;
1159  OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
1160  if (ret != kCMBlockBufferNoErr) {
1161  status = AVERROR(EIO);
1162  }
1163  }
1164  CFRelease(ctx->current_frame);
1165  ctx->current_frame = nil;
1166 
1167  if (status < 0) {
1168  unlock_frames(ctx);
1169  return status;
1170  }
1171  } else if (ctx->current_audio_frame != nil) {
1172  CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame);
1173  int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
1174 
1175  if (!block_buffer || !block_buffer_size) {
1176  unlock_frames(ctx);
1177  return AVERROR(EIO);
1178  }
1179 
1180  if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) {
1181  unlock_frames(ctx);
1182  return AVERROR_BUFFER_TOO_SMALL;
1183  }
1184 
1185  if (av_new_packet(pkt, block_buffer_size) < 0) {
1186  unlock_frames(ctx);
1187  return AVERROR(EIO);
1188  }
1189 
1190  CMItemCount count;
1191  CMSampleTimingInfo timing_info;
1192 
1193  if (CMSampleBufferGetOutputSampleTimingInfoArray(ctx->current_audio_frame, 1, &timing_info, &count) == noErr) {
1194  AVRational timebase_q = av_make_q(1, timing_info.presentationTimeStamp.timescale);
1195  pkt->pts = pkt->dts = av_rescale_q(timing_info.presentationTimeStamp.value, timebase_q, avf_time_base_q);
1196  }
1197 
1198  pkt->stream_index = ctx->audio_stream_index;
1200 
1201  if (ctx->audio_non_interleaved) {
1202  int sample, c, shift, num_samples;
1203 
1204  OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer);
1205  if (ret != kCMBlockBufferNoErr) {
1206  unlock_frames(ctx);
1207  return AVERROR(EIO);
1208  }
1209 
1210  num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3));
1211 
1212  // transform decoded frame into output format
1213  #define INTERLEAVE_OUTPUT(bps) \
1214  { \
1215  int##bps##_t **src; \
1216  int##bps##_t *dest; \
1217  src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
1218  if (!src) { \
1219  unlock_frames(ctx); \
1220  return AVERROR(EIO); \
1221  } \
1222  \
1223  for (c = 0; c < ctx->audio_channels; c++) { \
1224  src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
1225  } \
1226  dest = (int##bps##_t*)pkt->data; \
1227  shift = bps - ctx->audio_bits_per_sample; \
1228  for (sample = 0; sample < num_samples; sample++) \
1229  for (c = 0; c < ctx->audio_channels; c++) \
1230  *dest++ = src[c][sample] << shift; \
1231  av_freep(&src); \
1232  }
1233 
1234  if (ctx->audio_bits_per_sample <= 16) {
1235  INTERLEAVE_OUTPUT(16)
1236  } else {
1237  INTERLEAVE_OUTPUT(32)
1238  }
1239  } else {
1240  OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
1241  if (ret != kCMBlockBufferNoErr) {
1242  unlock_frames(ctx);
1243  return AVERROR(EIO);
1244  }
1245  }
1246 
1247  CFRelease(ctx->current_audio_frame);
1248  ctx->current_audio_frame = nil;
1249  } else {
1250  pkt->data = NULL;
1251  unlock_frames(ctx);
1252  if (ctx->observed_quit) {
1253  return AVERROR_EOF;
1254  } else {
1255  return AVERROR(EAGAIN);
1256  }
1257  }
1258 
1259  unlock_frames(ctx);
1260  } while (!pkt->data);
1261 
1262  return 0;
1263 }
1264 
1266 {
1267  AVFContext* ctx = (AVFContext*)s->priv_data;
1269  return 0;
1270 }
1271 
1272 static const AVOption options[] = {
1273  { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1274  { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
1275  { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
1276  { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
1277  { "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
1278  { "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
1279  { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1280  { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1281  { "capture_raw_data", "capture the raw data from device connection", offsetof(AVFContext, capture_raw_data), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1282  { "drop_late_frames", "drop frames that are available later than expected", offsetof(AVFContext, drop_late_frames), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
1283 
1284  { NULL },
1285 };
1286 
1287 static const AVClass avf_class = {
1288  .class_name = "AVFoundation indev",
1289  .item_name = av_default_item_name,
1290  .option = options,
1291  .version = LIBAVUTIL_VERSION_INT,
1293 };
1294 
1296  .name = "avfoundation",
1297  .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"),
1298  .priv_data_size = sizeof(AVFContext),
1301  .read_close = avf_close,
1302  .flags = AVFMT_NOFILE,
1303  .priv_class = &avf_class,
1304 };
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:31
AV_CODEC_ID_PCM_S16LE
@ AV_CODEC_ID_PCM_S16LE
Definition: codec_id.h:331
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AV_CODEC_ID_PCM_F32BE
@ AV_CODEC_ID_PCM_F32BE
Definition: codec_id.h:351
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVFContext::audio_buffer_size
int audio_buffer_size
Definition: avfoundation.m:124
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVFContext::audio_float
int audio_float
Definition: avfoundation.m:117
AVFContext::observed_quit
int observed_quit
Definition: avfoundation.m:138
unlock_frames
static void unlock_frames(AVFContext *ctx)
Definition: avfoundation.m:146
avformat_new_stream
AVStream * avformat_new_stream(AVFormatContext *s, const struct AVCodec *c)
Add a new stream to a media file.
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:238
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
AV_CODEC_ID_RAWVIDEO
@ AV_CODEC_ID_RAWVIDEO
Definition: codec_id.h:65
AVFContext::current_audio_frame
CMSampleBufferRef current_audio_frame
Definition: avfoundation.m:132
pixdesc.h
AVFContext::audio_frames_captured
int audio_frames_captured
Definition: avfoundation.m:88
AVPacket::data
uint8_t * data
Definition: packet.h:522
AVOption
AVOption.
Definition: opt.h:251
AV_PIX_FMT_BGR24
@ AV_PIX_FMT_BGR24
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:76
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:577
parse_device_name
static int parse_device_name(AVFormatContext *s)
Definition: avfoundation.m:313
AV_PIX_FMT_RGB555BE
@ AV_PIX_FMT_RGB555BE
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined
Definition: pixfmt.h:114
AVFContext::audio_channels
int audio_channels
Definition: avfoundation.m:115
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
AVFContext::video_filename
char * video_filename
Definition: avfoundation.m:110
AVFPixelFormatSpec::avf_id
OSType avf_id
Definition: avfoundation.m:51
AVFContext::audio_be
int audio_be
Definition: avfoundation.m:118
AVFContext::capture_cursor
int capture_cursor
Definition: avfoundation.m:96
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *st, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: avformat.c:830
AV_CODEC_ID_PCM_S16BE
@ AV_CODEC_ID_PCM_S16BE
Definition: codec_id.h:332
fail
#define fail()
Definition: checkasm.h:179
avf_close
static int avf_close(AVFormatContext *s)
Definition: avfoundation.m:1265
avf_time_base
static const int avf_time_base
Definition: avfoundation.m:42
read_close
static av_cold int read_close(AVFormatContext *ctx)
Definition: libcdio.c:143
AVFContext::current_frame
CMSampleBufferRef current_frame
Definition: avfoundation.m:131
AVFPixelFormatSpec::ff_id
enum AVPixelFormat ff_id
Definition: avfoundation.m:50
AVFContext::observed_device
AVCaptureDevice * observed_device
Definition: avfoundation.m:134
AVERROR_BUFFER_TOO_SMALL
#define AVERROR_BUFFER_TOO_SMALL
Buffer too small.
Definition: error.h:53
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVFContext::framerate
AVRational framerate
Definition: avfoundation.m:93
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:485
description
Tag description
Definition: snow.txt:206
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
avf_time_base_q
static const AVRational avf_time_base_q
Definition: avfoundation.m:44
AVInputFormat
Definition: avformat.h:549
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:494
read_packet
static int read_packet(void *opaque, uint8_t *buf, int buf_size)
Definition: avio_read_callback.c:41
AVFContext::num_video_devices
int num_video_devices
Definition: avfoundation.m:113
INTERLEAVE_OUTPUT
#define INTERLEAVE_OUTPUT(bps)
width
#define width
getDevicesWithMediaType
static NSArray * getDevicesWithMediaType(AVMediaType mediaType)
Definition: avfoundation.m:764
s
#define s(width, name)
Definition: cbs_vp9.c:198
av_new_packet
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:98
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVInputFormat::name
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:554
AVFAudioReceiver::_context
AVFContext * _context
Definition: avfoundation.m:247
options
static const AVOption options[]
Definition: avfoundation.m:1272
add_audio_device
static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device)
Definition: avfoundation.m:585
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
av_strtok
char * av_strtok(char *s, const char *delim, char **saveptr)
Split the string into several tokens which can be accessed by successive calls to av_strtok().
Definition: avstring.c:178
AVFContext::capture_mouse_clicks
int capture_mouse_clicks
Definition: avfoundation.m:97
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVFContext::frame_lock
pthread_mutex_t frame_lock
Definition: avfoundation.m:89
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVFContext::capture_raw_data
int capture_raw_data
Definition: avfoundation.m:98
AVFContext::list_devices
int list_devices
Definition: avfoundation.m:103
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AVFPixelFormatSpec
Definition: avfoundation.m:49
get_video_config
static int get_video_config(AVFormatContext *s)
Definition: avfoundation.m:629
if
if(ret)
Definition: filter_design.txt:179
AVFContext::audio_packed
int audio_packed
Definition: avfoundation.m:120
AVFFrameReceiver::_context
AVFContext * _context
Definition: avfoundation.m:155
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
AVFormatContext
Format I/O context.
Definition: avformat.h:1202
internal.h
AVFContext::video_output
AVCaptureVideoDataOutput * video_output
Definition: avfoundation.m:129
framerate
float framerate
Definition: av1_levels.c:29
AVFContext::audio_signed_integer
int audio_signed_integer
Definition: avfoundation.m:119
AV_PIX_FMT_RGB565LE
@ AV_PIX_FMT_RGB565LE
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
Definition: pixfmt.h:113
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
read_header
static int read_header(FFV1Context *f)
Definition: ffv1dec.c:550
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
fabs
static __device__ float fabs(float a)
Definition: cuda_runtime.h:182
NULL
#define NULL
Definition: coverity.c:32
AVFContext::drop_late_frames
int drop_late_frames
Definition: avfoundation.m:99
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
add_video_device
static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
Definition: avfoundation.m:438
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVFFrameReceiver
FrameReciever class - delegate for AVCaptureSession.
Definition: avfoundation.m:153
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:83
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:235
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
parseutils.h
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
time.h
AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
@ AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
Definition: log.h:41
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:483
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
avf_read_packet
static int avf_read_packet(AVFormatContext *s, AVPacket *pkt)
Definition: avfoundation.m:1114
AVFContext::width
int width
Definition: avfoundation.m:94
configure_video_device
static int configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
Configure the video device.
Definition: avfoundation.m:341
index
int index
Definition: gxfenc.c:89
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:79
AVFContext::audio_buffer
int32_t * audio_buffer
Definition: avfoundation.m:123
AVFContext::video_stream_index
int video_stream_index
Definition: avfoundation.m:105
AV_CODEC_ID_PCM_S24LE
@ AV_CODEC_ID_PCM_S24LE
Definition: codec_id.h:343
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:365
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:75
AVMediaType
AVMediaType
Definition: avutil.h:199
AVPacket::size
int size
Definition: packet.h:523
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:106
destroy_context
static void destroy_context(AVFContext *ctx)
Definition: avfoundation.m:287
shift
static int shift(int a, int b)
Definition: bonk.c:262
AVFContext::url
char * url
Definition: avfoundation.m:109
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1285
sample
#define sample
Definition: flacdsp_template.c:44
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
ff_avfoundation_demuxer
const AVInputFormat ff_avfoundation_demuxer
Definition: avfoundation.m:1295
AVFMT_NOFILE
#define AVFMT_NOFILE
Demuxer will use avio_open, no opened file should be provided by the caller.
Definition: avformat.h:469
AVFContext::audio_non_interleaved
int audio_non_interleaved
Definition: avfoundation.m:121
range
enum AVColorRange range
Definition: mediacodec_wrapper.c:2646
avdevice.h
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:521
height
#define height
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:174
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:528
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_channel_layout_default
void av_channel_layout_default(AVChannelLayout *ch_layout, int nb_channels)
Get the default channel layout for a given number of channels.
Definition: channel_layout.c:1033
lock_frames
static void lock_frames(AVFContext *ctx)
Definition: avfoundation.m:141
AVFContext::audio_stream_index
int audio_stream_index
Definition: avfoundation.m:107
copy_cvpixelbuffer
static int copy_cvpixelbuffer(AVFormatContext *s, CVPixelBufferRef image_buffer, AVPacket *pkt)
Definition: avfoundation.m:1071
AV_PIX_FMT_RGB555LE
@ AV_PIX_FMT_RGB555LE
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:115
AVFContext::audio_bits_per_sample
int audio_bits_per_sample
Definition: avfoundation.m:116
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:515
avf_read_header
static int avf_read_header(AVFormatContext *s)
Definition: avfoundation.m:819
internal.h
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:282
AV_CODEC_ID_DVVIDEO
@ AV_CODEC_ID_DVVIDEO
Definition: codec_id.h:76
AV_CODEC_ID_PCM_S32BE
@ AV_CODEC_ID_PCM_S32BE
Definition: codec_id.h:340
AVFContext::frames_captured
int frames_captured
Definition: avfoundation.m:87
AVFContext::video_is_muxed
int video_is_muxed
Definition: avfoundation.m:100
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:841
AV_PIX_FMT_0BGR
@ AV_PIX_FMT_0BGR
packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
Definition: pixfmt.h:264
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
AVFContext::audio_device_index
int audio_device_index
Definition: avfoundation.m:106
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
avf_pixel_formats
static const struct AVFPixelFormatSpec avf_pixel_formats[]
Definition: avfoundation.m:54
AVFContext::audio_output
AVCaptureAudioDataOutput * audio_output
Definition: avfoundation.m:130
id
enum AVCodecID id
Definition: dts2pts.c:364
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
status
ov_status_e status
Definition: dnn_backend_openvino.c:120
AVFContext::avf_audio_delegate
id avf_audio_delegate
Definition: avfoundation.m:91
channel_layout.h
AVFContext::video_is_screen
int video_is_screen
Definition: avfoundation.m:101
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
AV_OPT_TYPE_PIXEL_FMT
@ AV_OPT_TYPE_PIXEL_FMT
Definition: opt.h:236
AVPacket::stream_index
int stream_index
Definition: packet.h:524
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFContext::audio_filename
char * audio_filename
Definition: avfoundation.m:111
AV_PIX_FMT_RGB565BE
@ AV_PIX_FMT_RGB565BE
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
Definition: pixfmt.h:112
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:270
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_CODEC_ID_PCM_S32LE
@ AV_CODEC_ID_PCM_S32LE
Definition: codec_id.h:339
get_audio_config
static int get_audio_config(AVFormatContext *s)
Definition: avfoundation.m:677
AVFContext
Definition: avfoundation.m:83
timing_info
static int FUNC() timing_info(CodedBitstreamContext *ctx, RWContext *rw, AV1RawTimingInfo *current)
Definition: cbs_av1_syntax_template.c:158
av_image_copy_to_buffer
int av_image_copy_to_buffer(uint8_t *dst, int dst_size, const uint8_t *const src_data[4], const int src_linesize[4], enum AVPixelFormat pix_fmt, int width, int height, int align)
Copy image data from an image into a buffer.
Definition: imgutils.c:501
AVPacket
This structure stores compressed data.
Definition: packet.h:499
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
int32_t
int32_t
Definition: audioconvert.c:56
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
AVFContext::video_device_index
int video_device_index
Definition: avfoundation.m:104
AV_PIX_FMT_0RGB
@ AV_PIX_FMT_0RGB
packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
Definition: pixfmt.h:262
AV_CODEC_ID_PCM_F32LE
@ AV_CODEC_ID_PCM_F32LE
Definition: codec_id.h:352
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVFAudioReceiver
AudioReciever class - delegate for AVCaptureSession.
Definition: avfoundation.m:245
avstring.h
AVFContext::avf_delegate
id avf_delegate
Definition: avfoundation.m:90
AV_PIX_FMT_YUVA444P16LE
@ AV_PIX_FMT_YUVA444P16LE
planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
Definition: pixfmt.h:192
avf_class
static const AVClass avf_class
Definition: avfoundation.m:1287
int
int
Definition: ffmpeg_filter.c:424
AVFContext::capture_session
AVCaptureSession * capture_session
Definition: avfoundation.m:128
AV_CODEC_ID_PCM_S24BE
@ AV_CODEC_ID_PCM_S24BE
Definition: codec_id.h:344
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2888
AV_PIX_FMT_BGR48BE
@ AV_PIX_FMT_BGR48BE
packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:145
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:75