avcodec/x86/h264_idct: Fix ff_h264_luma_dc_dequant_idct_sse2 checkasm failures
[ffmpeg.git] / libavutil / hwcontext_videotoolbox.c
1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #include "config.h"
20
21 #include <stdint.h>
22 #include <string.h>
23
24 #include <VideoToolbox/VideoToolbox.h>
25
26 #include "buffer.h"
27 #include "buffer_internal.h"
28 #include "common.h"
29 #include "hwcontext.h"
30 #include "hwcontext_internal.h"
31 #include "hwcontext_videotoolbox.h"
32 #include "mem.h"
33 #include "pixfmt.h"
34 #include "pixdesc.h"
35
36 typedef struct VTFramesContext {
37 /**
38 * The public AVVTFramesContext. See hwcontext_videotoolbox.h for it.
39 */
40 AVVTFramesContext p;
41 CVPixelBufferPoolRef pool;
42 } VTFramesContext;
43
44 static const struct {
45 uint32_t cv_fmt;
46 bool full_range;
47 enum AVPixelFormat pix_fmt;
48 } cv_pix_fmts[] = {
49 { kCVPixelFormatType_420YpCbCr8Planar, false, AV_PIX_FMT_YUV420P },
50 { kCVPixelFormatType_420YpCbCr8PlanarFullRange, true, AV_PIX_FMT_YUV420P },
51 { kCVPixelFormatType_422YpCbCr8, false, AV_PIX_FMT_UYVY422 },
52 { kCVPixelFormatType_32BGRA, true, AV_PIX_FMT_BGRA },
53 #ifdef kCFCoreFoundationVersionNumber10_7
54 { kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, false, AV_PIX_FMT_NV12 },
55 { kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, true, AV_PIX_FMT_NV12 },
56 { kCVPixelFormatType_4444AYpCbCr8, false, AV_PIX_FMT_AYUV },
57 { kCVPixelFormatType_4444AYpCbCr16, false, AV_PIX_FMT_AYUV64 },
58 #endif
59 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
60 { kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, false, AV_PIX_FMT_P010 },
61 { kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, true, AV_PIX_FMT_P010 },
62 #endif
63 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
64 { kCVPixelFormatType_422YpCbCr8BiPlanarVideoRange, false, AV_PIX_FMT_NV16 },
65 { kCVPixelFormatType_422YpCbCr8BiPlanarFullRange, true, AV_PIX_FMT_NV16 },
66 #endif
67 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
68 { kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange, false, AV_PIX_FMT_P210 },
69 { kCVPixelFormatType_422YpCbCr10BiPlanarFullRange, true, AV_PIX_FMT_P210 },
70 #endif
71 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
72 { kCVPixelFormatType_422YpCbCr16BiPlanarVideoRange, false, AV_PIX_FMT_P216 },
73 #endif
74 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
75 { kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange, false, AV_PIX_FMT_NV24 },
76 { kCVPixelFormatType_444YpCbCr8BiPlanarFullRange, true, AV_PIX_FMT_NV24 },
77 #endif
78 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
79 { kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange, false, AV_PIX_FMT_P410 },
80 { kCVPixelFormatType_444YpCbCr10BiPlanarFullRange, true, AV_PIX_FMT_P410 },
81 #endif
82 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
83 { kCVPixelFormatType_444YpCbCr16BiPlanarVideoRange, false, AV_PIX_FMT_P416 },
84 #endif
85 };
86
87 static const enum AVPixelFormat supported_formats[] = {
88 #ifdef kCFCoreFoundationVersionNumber10_7
89 AV_PIX_FMT_NV12,
90 AV_PIX_FMT_AYUV,
91 AV_PIX_FMT_AYUV64,
92 #endif
93 AV_PIX_FMT_YUV420P,
94 AV_PIX_FMT_UYVY422,
95 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
96 AV_PIX_FMT_P010,
97 #endif
98 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
99 AV_PIX_FMT_NV16,
100 #endif
101 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
102 AV_PIX_FMT_P210,
103 #endif
104 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
105 AV_PIX_FMT_P216,
106 #endif
107 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
108 AV_PIX_FMT_NV24,
109 #endif
110 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
111 AV_PIX_FMT_P410,
112 #endif
113 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
114 AV_PIX_FMT_P416,
115 #endif
116 AV_PIX_FMT_BGRA,
117 };
118
119 static int vt_frames_get_constraints(AVHWDeviceContext *ctx,
120 const void *hwconfig,
121 AVHWFramesConstraints *constraints)
122 {
123 int i;
124
125 constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_formats) + 1,
126 sizeof(*constraints->valid_sw_formats));
127 if (!constraints->valid_sw_formats)
128 return AVERROR(ENOMEM);
129
130 for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++)
131 constraints->valid_sw_formats[i] = supported_formats[i];
132 constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_formats)] = AV_PIX_FMT_NONE;
133
134 constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
135 if (!constraints->valid_hw_formats)
136 return AVERROR(ENOMEM);
137
138 constraints->valid_hw_formats[0] = AV_PIX_FMT_VIDEOTOOLBOX;
139 constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
140
141 return 0;
142 }
143
144 enum AVPixelFormat av_map_videotoolbox_format_to_pixfmt(uint32_t cv_fmt)
145 {
146 int i;
147 for (i = 0; i < FF_ARRAY_ELEMS(cv_pix_fmts); i++) {
148 if (cv_pix_fmts[i].cv_fmt == cv_fmt)
149 return cv_pix_fmts[i].pix_fmt;
150 }
151 return AV_PIX_FMT_NONE;
152 }
153
154 static uint32_t vt_format_from_pixfmt(enum AVPixelFormat pix_fmt,
155 enum AVColorRange range)
156 {
157 for (int i = 0; i < FF_ARRAY_ELEMS(cv_pix_fmts); i++) {
158 if (cv_pix_fmts[i].pix_fmt == pix_fmt) {
159 int full_range = (range == AVCOL_RANGE_JPEG);
160
161 // Don't care if unspecified
162 if (range == AVCOL_RANGE_UNSPECIFIED)
163 return cv_pix_fmts[i].cv_fmt;
164
165 if (cv_pix_fmts[i].full_range == full_range)
166 return cv_pix_fmts[i].cv_fmt;
167 }
168 }
169
170 return 0;
171 }
172
173 uint32_t av_map_videotoolbox_format_from_pixfmt(enum AVPixelFormat pix_fmt)
174 {
175 return av_map_videotoolbox_format_from_pixfmt2(pix_fmt, false);
176 }
177
178 uint32_t av_map_videotoolbox_format_from_pixfmt2(enum AVPixelFormat pix_fmt, bool full_range)
179 {
180 return vt_format_from_pixfmt(pix_fmt, full_range ? AVCOL_RANGE_JPEG : AVCOL_RANGE_MPEG);
181 }
182
183 static int vt_pool_alloc(AVHWFramesContext *ctx)
184 {
185 VTFramesContext *fctx = ctx->hwctx;
186 AVVTFramesContext *hw_ctx = &fctx->p;
187 CVReturn err;
188 CFNumberRef w, h, pixfmt;
189 uint32_t cv_pixfmt;
190 CFMutableDictionaryRef attributes, iosurface_properties;
191
192 attributes = CFDictionaryCreateMutable(
193 NULL,
194 2,
195 &kCFTypeDictionaryKeyCallBacks,
196 &kCFTypeDictionaryValueCallBacks);
197
198 cv_pixfmt = vt_format_from_pixfmt(ctx->sw_format, hw_ctx->color_range);
199 pixfmt = CFNumberCreate(NULL, kCFNumberSInt32Type, &cv_pixfmt);
200 CFDictionarySetValue(
201 attributes,
202 kCVPixelBufferPixelFormatTypeKey,
203 pixfmt);
204 CFRelease(pixfmt);
205
206 iosurface_properties = CFDictionaryCreateMutable(
207 NULL,
208 0,
209 &kCFTypeDictionaryKeyCallBacks,
210 &kCFTypeDictionaryValueCallBacks);
211 CFDictionarySetValue(attributes, kCVPixelBufferIOSurfacePropertiesKey, iosurface_properties);
212 CFRelease(iosurface_properties);
213
214 w = CFNumberCreate(NULL, kCFNumberSInt32Type, &ctx->width);
215 h = CFNumberCreate(NULL, kCFNumberSInt32Type, &ctx->height);
216 CFDictionarySetValue(attributes, kCVPixelBufferWidthKey, w);
217 CFDictionarySetValue(attributes, kCVPixelBufferHeightKey, h);
218 CFRelease(w);
219 CFRelease(h);
220
221 err = CVPixelBufferPoolCreate(
222 NULL,
223 NULL,
224 attributes,
225 &fctx->pool);
226 CFRelease(attributes);
227
228 if (err == kCVReturnSuccess)
229 return 0;
230
231 av_log(ctx, AV_LOG_ERROR, "Error creating CVPixelBufferPool: %d\n", err);
232 return AVERROR_EXTERNAL;
233 }
234
235 static void videotoolbox_buffer_release(void *opaque, uint8_t *data)
236 {
237 CVPixelBufferRelease((CVPixelBufferRef)data);
238 }
239
240 static AVBufferRef *vt_pool_alloc_buffer(void *opaque, size_t size)
241 {
242 CVPixelBufferRef pixbuf;
243 AVBufferRef *buf;
244 CVReturn err;
245 AVHWFramesContext *ctx = opaque;
246 VTFramesContext *fctx = ctx->hwctx;
247
248 err = CVPixelBufferPoolCreatePixelBuffer(
249 NULL,
250 fctx->pool,
251 &pixbuf
252 );
253 if (err != kCVReturnSuccess) {
254 av_log(ctx, AV_LOG_ERROR, "Failed to create pixel buffer from pool: %d\n", err);
255 return NULL;
256 }
257
258 buf = av_buffer_create((uint8_t *)pixbuf, size,
259 videotoolbox_buffer_release, NULL, 0);
260 if (!buf) {
261 CVPixelBufferRelease(pixbuf);
262 return NULL;
263 }
264 return buf;
265 }
266
267 static void vt_frames_uninit(AVHWFramesContext *ctx)
268 {
269 VTFramesContext *fctx = ctx->hwctx;
270 if (fctx->pool) {
271 CVPixelBufferPoolRelease(fctx->pool);
272 fctx->pool = NULL;
273 }
274 }
275
276 static int vt_frames_init(AVHWFramesContext *ctx)
277 {
278 int i, ret;
279
280 for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
281 if (ctx->sw_format == supported_formats[i])
282 break;
283 }
284 if (i == FF_ARRAY_ELEMS(supported_formats)) {
285 av_log(ctx, AV_LOG_ERROR, "Pixel format '%s' is not supported\n",
286 av_get_pix_fmt_name(ctx->sw_format));
287 return AVERROR(ENOSYS);
288 }
289
290 if (!ctx->pool) {
291 ffhwframesctx(ctx)->pool_internal = av_buffer_pool_init2(
292 sizeof(CVPixelBufferRef), ctx, vt_pool_alloc_buffer, NULL);
293 if (!ffhwframesctx(ctx)->pool_internal)
294 return AVERROR(ENOMEM);
295 }
296
297 ret = vt_pool_alloc(ctx);
298 if (ret < 0)
299 return ret;
300
301 return 0;
302 }
303
304 static int vt_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
305 {
306 frame->buf[0] = av_buffer_pool_get(ctx->pool);
307 if (!frame->buf[0])
308 return AVERROR(ENOMEM);
309
310 frame->data[3] = frame->buf[0]->data;
311 frame->format = AV_PIX_FMT_VIDEOTOOLBOX;
312 frame->width = ctx->width;
313 frame->height = ctx->height;
314
315 return 0;
316 }
317
318 static int vt_transfer_get_formats(AVHWFramesContext *ctx,
319 enum AVHWFrameTransferDirection dir,
320 enum AVPixelFormat **formats)
321 {
322 enum AVPixelFormat *fmts = av_malloc_array(2, sizeof(*fmts));
323 if (!fmts)
324 return AVERROR(ENOMEM);
325
326 fmts[0] = ctx->sw_format;
327 fmts[1] = AV_PIX_FMT_NONE;
328
329 *formats = fmts;
330 return 0;
331 }
332
333 static void vt_unmap(AVHWFramesContext *ctx, HWMapDescriptor *hwmap)
334 {
335 CVPixelBufferRef pixbuf = (CVPixelBufferRef)hwmap->source->data[3];
336
337 CVPixelBufferUnlockBaseAddress(pixbuf, (uintptr_t)hwmap->priv);
338 }
339
340 static int vt_pixbuf_set_par(void *log_ctx,
341 CVPixelBufferRef pixbuf, const AVFrame *src)
342 {
343 CFMutableDictionaryRef par = NULL;
344 CFNumberRef num = NULL, den = NULL;
345 AVRational avpar = src->sample_aspect_ratio;
346
347 if (avpar.num == 0) {
348 CVBufferRemoveAttachment(pixbuf, kCVImageBufferPixelAspectRatioKey);
349 return 0;
350 }
351
352 av_reduce(&avpar.num, &avpar.den,
353 avpar.num, avpar.den,
354 0xFFFFFFFF);
355
356 num = CFNumberCreate(kCFAllocatorDefault,
357 kCFNumberIntType,
358 &avpar.num);
359
360 den = CFNumberCreate(kCFAllocatorDefault,
361 kCFNumberIntType,
362 &avpar.den);
363
364 par = CFDictionaryCreateMutable(kCFAllocatorDefault,
365 2,
366 &kCFCopyStringDictionaryKeyCallBacks,
367 &kCFTypeDictionaryValueCallBacks);
368
369 if (!par || !num || !den) {
370 if (par) CFRelease(par);
371 if (num) CFRelease(num);
372 if (den) CFRelease(den);
373 return AVERROR(ENOMEM);
374 }
375
376 CFDictionarySetValue(
377 par,
378 kCVImageBufferPixelAspectRatioHorizontalSpacingKey,
379 num);
380 CFDictionarySetValue(
381 par,
382 kCVImageBufferPixelAspectRatioVerticalSpacingKey,
383 den);
384
385 CVBufferSetAttachment(
386 pixbuf,
387 kCVImageBufferPixelAspectRatioKey,
388 par,
389 kCVAttachmentMode_ShouldPropagate
390 );
391
392 CFRelease(par);
393 CFRelease(num);
394 CFRelease(den);
395
396 return 0;
397 }
398
399 CFStringRef av_map_videotoolbox_chroma_loc_from_av(enum AVChromaLocation loc)
400 {
401 switch (loc) {
402 case AVCHROMA_LOC_LEFT:
403 return kCVImageBufferChromaLocation_Left;
404 case AVCHROMA_LOC_CENTER:
405 return kCVImageBufferChromaLocation_Center;
406 case AVCHROMA_LOC_TOP:
407 return kCVImageBufferChromaLocation_Top;
408 case AVCHROMA_LOC_BOTTOM:
409 return kCVImageBufferChromaLocation_Bottom;
410 case AVCHROMA_LOC_TOPLEFT:
411 return kCVImageBufferChromaLocation_TopLeft;
412 case AVCHROMA_LOC_BOTTOMLEFT:
413 return kCVImageBufferChromaLocation_BottomLeft;
414 default:
415 return NULL;
416 }
417 }
418
419 static int vt_pixbuf_set_chromaloc(void *log_ctx,
420 CVPixelBufferRef pixbuf, const AVFrame *src)
421 {
422 CFStringRef loc = av_map_videotoolbox_chroma_loc_from_av(src->chroma_location);
423
424 if (loc) {
425 CVBufferSetAttachment(
426 pixbuf,
427 kCVImageBufferChromaLocationTopFieldKey,
428 loc,
429 kCVAttachmentMode_ShouldPropagate);
430 } else
431 CVBufferRemoveAttachment(
432 pixbuf,
433 kCVImageBufferChromaLocationTopFieldKey);
434
435 return 0;
436 }
437
438 CFStringRef av_map_videotoolbox_color_matrix_from_av(enum AVColorSpace space)
439 {
440 switch (space) {
441 case AVCOL_SPC_BT2020_CL:
442 case AVCOL_SPC_BT2020_NCL:
443 #if HAVE_KCVIMAGEBUFFERYCBCRMATRIX_ITU_R_2020
444 if (__builtin_available(macOS 10.11, iOS 9, *))
445 return kCVImageBufferYCbCrMatrix_ITU_R_2020;
446 #endif
447 return CFSTR("ITU_R_2020");
448 case AVCOL_SPC_BT470BG:
449 case AVCOL_SPC_SMPTE170M:
450 return kCVImageBufferYCbCrMatrix_ITU_R_601_4;
451 case AVCOL_SPC_BT709:
452 return kCVImageBufferYCbCrMatrix_ITU_R_709_2;
453 case AVCOL_SPC_SMPTE240M:
454 return kCVImageBufferYCbCrMatrix_SMPTE_240M_1995;
455 default:
456 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
457 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
458 return CVYCbCrMatrixGetStringForIntegerCodePoint(space);
459 #endif
460 case AVCOL_SPC_UNSPECIFIED:
461 return NULL;
462 }
463 }
464
465 CFStringRef av_map_videotoolbox_color_primaries_from_av(enum AVColorPrimaries pri)
466 {
467 switch (pri) {
468 case AVCOL_PRI_BT2020:
469 #if HAVE_KCVIMAGEBUFFERCOLORPRIMARIES_ITU_R_2020
470 if (__builtin_available(macOS 10.11, iOS 9, *))
471 return kCVImageBufferColorPrimaries_ITU_R_2020;
472 #endif
473 return CFSTR("ITU_R_2020");
474 case AVCOL_PRI_BT709:
475 return kCVImageBufferColorPrimaries_ITU_R_709_2;
476 case AVCOL_PRI_SMPTE170M:
477 return kCVImageBufferColorPrimaries_SMPTE_C;
478 case AVCOL_PRI_BT470BG:
479 return kCVImageBufferColorPrimaries_EBU_3213;
480 default:
481 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
482 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
483 return CVColorPrimariesGetStringForIntegerCodePoint(pri);
484 #endif
485 case AVCOL_PRI_UNSPECIFIED:
486 return NULL;
487 }
488 }
489
490 CFStringRef av_map_videotoolbox_color_trc_from_av(enum AVColorTransferCharacteristic trc)
491 {
492
493 switch (trc) {
494 case AVCOL_TRC_SMPTE2084:
495 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_2084_PQ
496 if (__builtin_available(macOS 10.13, iOS 11, *))
497 return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ;
498 #endif
499 return CFSTR("SMPTE_ST_2084_PQ");
500 case AVCOL_TRC_BT2020_10:
501 case AVCOL_TRC_BT2020_12:
502 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2020
503 if (__builtin_available(macOS 10.11, iOS 9, *))
504 return kCVImageBufferTransferFunction_ITU_R_2020;
505 #endif
506 return CFSTR("ITU_R_2020");
507 case AVCOL_TRC_BT709:
508 return kCVImageBufferTransferFunction_ITU_R_709_2;
509 case AVCOL_TRC_SMPTE240M:
510 return kCVImageBufferTransferFunction_SMPTE_240M_1995;
511 case AVCOL_TRC_SMPTE428:
512 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_428_1
513 if (__builtin_available(macOS 10.12, iOS 10, *))
514 return kCVImageBufferTransferFunction_SMPTE_ST_428_1;
515 #endif
516 return CFSTR("SMPTE_ST_428_1");
517 case AVCOL_TRC_ARIB_STD_B67:
518 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
519 if (__builtin_available(macOS 10.13, iOS 11, *))
520 return kCVImageBufferTransferFunction_ITU_R_2100_HLG;
521 #endif
522 return CFSTR("ITU_R_2100_HLG");
523 case AVCOL_TRC_GAMMA22:
524 return kCVImageBufferTransferFunction_UseGamma;
525 case AVCOL_TRC_GAMMA28:
526 return kCVImageBufferTransferFunction_UseGamma;
527 default:
528 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
529 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
530 return CVTransferFunctionGetStringForIntegerCodePoint(trc);
531 #endif
532 case AVCOL_TRC_UNSPECIFIED:
533 return NULL;
534 }
535 }
536
537 /**
538 * Copy all attachments for the specified mode from the given buffer.
539 */
540 static CFDictionaryRef vt_cv_buffer_copy_attachments(CVBufferRef buffer,
541 CVAttachmentMode attachment_mode)
542 {
543 // Check that our SDK is at least macOS 12 / iOS 15 / tvOS 15
544 #if (TARGET_OS_OSX && defined(__MAC_12_0) && __MAC_OS_X_VERSION_MAX_ALLOWED >= __MAC_12_0) || \
545 (TARGET_OS_IOS && defined(__IPHONE_15_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_15_0) || \
546 (TARGET_OS_TV && defined(__TVOS_15_0) && __TV_OS_VERSION_MAX_ALLOWED >= __TVOS_15_0)
547 // On recent enough versions, just use the respective API
548 if (__builtin_available(macOS 12.0, iOS 15.0, tvOS 15.0, *))
549 return CVBufferCopyAttachments(buffer, attachment_mode);
550 #endif
551
552 // Check that the target is lower than macOS 12 / iOS 15 / tvOS 15
553 // else this would generate a deprecation warning and anyway never run because
554 // the runtime availability check above would be always true.
555 #if (TARGET_OS_OSX && (!defined(__MAC_12_0) || __MAC_OS_X_VERSION_MIN_REQUIRED < __MAC_12_0)) || \
556 (TARGET_OS_IOS && (!defined(__IPHONE_15_0) || __IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_15_0)) || \
557 (TARGET_OS_TV && (!defined(__TVOS_15_0) || __TV_OS_VERSION_MIN_REQUIRED < __TVOS_15_0))
558 // Fallback on SDKs or runtime versions < macOS 12 / iOS 15 / tvOS 15
559 CFDictionaryRef dict = CVBufferGetAttachments(buffer, attachment_mode);
560 return (dict) ? CFDictionaryCreateCopy(NULL, dict) : NULL;
561 #else
562 return NULL; // Impossible, just make the compiler happy
563 #endif
564 }
565
566 static int vt_pixbuf_set_colorspace(void *log_ctx,
567 CVPixelBufferRef pixbuf, const AVFrame *src)
568 {
569 CGColorSpaceRef colorspace = NULL;
570 CFStringRef colormatrix = NULL, colorpri = NULL, colortrc = NULL;
571 Float32 gamma = 0;
572
573 colormatrix = av_map_videotoolbox_color_matrix_from_av(src->colorspace);
574 if (colormatrix)
575 CVBufferSetAttachment(pixbuf, kCVImageBufferYCbCrMatrixKey,
576 colormatrix, kCVAttachmentMode_ShouldPropagate);
577 else {
578 CVBufferRemoveAttachment(pixbuf, kCVImageBufferYCbCrMatrixKey);
579 if (src->colorspace != AVCOL_SPC_UNSPECIFIED && src->colorspace != AVCOL_SPC_RGB)
580 av_log(log_ctx, AV_LOG_WARNING,
581 "Color space %s is not supported.\n",
582 av_color_space_name(src->colorspace));
583 }
584
585 colorpri = av_map_videotoolbox_color_primaries_from_av(src->color_primaries);
586 if (colorpri)
587 CVBufferSetAttachment(pixbuf, kCVImageBufferColorPrimariesKey,
588 colorpri, kCVAttachmentMode_ShouldPropagate);
589 else {
590 CVBufferRemoveAttachment(pixbuf, kCVImageBufferColorPrimariesKey);
591 if (src->color_primaries != AVCOL_PRI_UNSPECIFIED)
592 av_log(log_ctx, AV_LOG_WARNING,
593 "Color primaries %s is not supported.\n",
594 av_color_primaries_name(src->color_primaries));
595 }
596
597 colortrc = av_map_videotoolbox_color_trc_from_av(src->color_trc);
598 if (colortrc)
599 CVBufferSetAttachment(pixbuf, kCVImageBufferTransferFunctionKey,
600 colortrc, kCVAttachmentMode_ShouldPropagate);
601 else {
602 CVBufferRemoveAttachment(pixbuf, kCVImageBufferTransferFunctionKey);
603 if (src->color_trc != AVCOL_TRC_UNSPECIFIED)
604 av_log(log_ctx, AV_LOG_WARNING,
605 "Color transfer function %s is not supported.\n",
606 av_color_transfer_name(src->color_trc));
607 }
608
609 if (src->color_trc == AVCOL_TRC_GAMMA22)
610 gamma = 2.2;
611 else if (src->color_trc == AVCOL_TRC_GAMMA28)
612 gamma = 2.8;
613
614 if (gamma != 0) {
615 CFNumberRef gamma_level = CFNumberCreate(NULL, kCFNumberFloat32Type, &gamma);
616 CVBufferSetAttachment(pixbuf, kCVImageBufferGammaLevelKey,
617 gamma_level, kCVAttachmentMode_ShouldPropagate);
618 CFRelease(gamma_level);
619 } else
620 CVBufferRemoveAttachment(pixbuf, kCVImageBufferGammaLevelKey);
621
622 #if (TARGET_OS_OSX && __MAC_OS_X_VERSION_MAX_ALLOWED >= 100800) || \
623 (TARGET_OS_IOS && __IPHONE_OS_VERSION_MAX_ALLOWED >= 100000)
624 if (__builtin_available(macOS 10.8, iOS 10, *)) {
625 CFDictionaryRef attachments =
626 vt_cv_buffer_copy_attachments(pixbuf, kCVAttachmentMode_ShouldPropagate);
627
628 if (attachments) {
629 colorspace =
630 CVImageBufferCreateColorSpaceFromAttachments(attachments);
631 CFRelease(attachments);
632 }
633 }
634 #endif
635
636 // Done outside the above preprocessor code and if's so that
637 // in any case a wrong kCVImageBufferCGColorSpaceKey is removed
638 // if the above code is not used or fails.
639 if (colorspace) {
640 CVBufferSetAttachment(pixbuf, kCVImageBufferCGColorSpaceKey,
641 colorspace, kCVAttachmentMode_ShouldPropagate);
642 CFRelease(colorspace);
643 } else
644 CVBufferRemoveAttachment(pixbuf, kCVImageBufferCGColorSpaceKey);
645
646 return 0;
647 }
648
649 static int vt_pixbuf_set_attachments(void *log_ctx,
650 CVPixelBufferRef pixbuf, const AVFrame *src)
651 {
652 int ret;
653 ret = vt_pixbuf_set_par(log_ctx, pixbuf, src);
654 if (ret < 0)
655 return ret;
656 ret = vt_pixbuf_set_colorspace(log_ctx, pixbuf, src);
657 if (ret < 0)
658 return ret;
659 ret = vt_pixbuf_set_chromaloc(log_ctx, pixbuf, src);
660 if (ret < 0)
661 return ret;
662 return 0;
663 }
664
665 int av_vt_pixbuf_set_attachments(void *log_ctx,
666 CVPixelBufferRef pixbuf, const AVFrame *src)
667 {
668 return vt_pixbuf_set_attachments(log_ctx, pixbuf, src);
669 }
670
671 static int vt_map_frame(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src,
672 int flags)
673 {
674 CVPixelBufferRef pixbuf = (CVPixelBufferRef)src->data[3];
675 OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf);
676 CVReturn err;
677 uint32_t map_flags = 0;
678 int ret;
679 int i;
680 enum AVPixelFormat format;
681
682 format = av_map_videotoolbox_format_to_pixfmt(pixel_format);
683 if (dst->format != format) {
684 av_log(ctx, AV_LOG_ERROR, "Unsupported or mismatching pixel format: %s\n",
685 av_fourcc2str(pixel_format));
686 return AVERROR_UNKNOWN;
687 }
688
689 if (CVPixelBufferGetWidth(pixbuf) != ctx->width ||
690 CVPixelBufferGetHeight(pixbuf) != ctx->height) {
691 av_log(ctx, AV_LOG_ERROR, "Inconsistent frame dimensions.\n");
692 return AVERROR_UNKNOWN;
693 }
694
695 if (flags == AV_HWFRAME_MAP_READ)
696 map_flags = kCVPixelBufferLock_ReadOnly;
697
698 err = CVPixelBufferLockBaseAddress(pixbuf, map_flags);
699 if (err != kCVReturnSuccess) {
700 av_log(ctx, AV_LOG_ERROR, "Error locking the pixel buffer.\n");
701 return AVERROR_UNKNOWN;
702 }
703
704 if (CVPixelBufferIsPlanar(pixbuf)) {
705 int planes = CVPixelBufferGetPlaneCount(pixbuf);
706 for (i = 0; i < planes; i++) {
707 dst->data[i] = CVPixelBufferGetBaseAddressOfPlane(pixbuf, i);
708 dst->linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf, i);
709 }
710 } else {
711 dst->data[0] = CVPixelBufferGetBaseAddress(pixbuf);
712 dst->linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf);
713 }
714
715 ret = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, vt_unmap,
716 (void *)(uintptr_t)map_flags);
717 if (ret < 0)
718 goto unlock;
719
720 return 0;
721
722 unlock:
723 CVPixelBufferUnlockBaseAddress(pixbuf, map_flags);
724 return ret;
725 }
726
727 static int vt_transfer_data_from(AVHWFramesContext *hwfc,
728 AVFrame *dst, const AVFrame *src)
729 {
730 AVFrame *map;
731 int err;
732
733 if (dst->width > hwfc->width || dst->height > hwfc->height)
734 return AVERROR(EINVAL);
735
736 map = av_frame_alloc();
737 if (!map)
738 return AVERROR(ENOMEM);
739 map->format = dst->format;
740
741 err = vt_map_frame(hwfc, map, src, AV_HWFRAME_MAP_READ);
742 if (err)
743 goto fail;
744
745 map->width = dst->width;
746 map->height = dst->height;
747
748 err = av_frame_copy(dst, map);
749 if (err)
750 goto fail;
751
752 err = 0;
753 fail:
754 av_frame_free(&map);
755 return err;
756 }
757
758 static int vt_transfer_data_to(AVHWFramesContext *hwfc,
759 AVFrame *dst, const AVFrame *src)
760 {
761 AVFrame *map;
762 int err;
763
764 if (src->width > hwfc->width || src->height > hwfc->height)
765 return AVERROR(EINVAL);
766
767 map = av_frame_alloc();
768 if (!map)
769 return AVERROR(ENOMEM);
770 map->format = src->format;
771
772 err = vt_map_frame(hwfc, map, dst, AV_HWFRAME_MAP_WRITE | AV_HWFRAME_MAP_OVERWRITE);
773 if (err)
774 goto fail;
775
776 map->width = src->width;
777 map->height = src->height;
778
779 err = av_frame_copy(map, src);
780 if (err)
781 goto fail;
782
783 err = vt_pixbuf_set_attachments(hwfc, (CVPixelBufferRef)dst->data[3], src);
784 if (err)
785 goto fail;
786
787 err = 0;
788 fail:
789 av_frame_free(&map);
790 return err;
791 }
792
793 static int vt_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
794 const AVFrame *src, int flags)
795 {
796 int err;
797
798 if (dst->format == AV_PIX_FMT_NONE)
799 dst->format = hwfc->sw_format;
800 else if (dst->format != hwfc->sw_format)
801 return AVERROR(ENOSYS);
802
803 err = vt_map_frame(hwfc, dst, src, flags);
804 if (err)
805 return err;
806
807 dst->width = src->width;
808 dst->height = src->height;
809
810 err = av_frame_copy_props(dst, src);
811 if (err)
812 return err;
813
814 return 0;
815 }
816
817 static int vt_device_create(AVHWDeviceContext *ctx, const char *device,
818 AVDictionary *opts, int flags)
819 {
820 if (device && device[0]) {
821 av_log(ctx, AV_LOG_ERROR, "Device selection unsupported.\n");
822 return AVERROR_UNKNOWN;
823 }
824
825 return 0;
826 }
827
828 const HWContextType ff_hwcontext_type_videotoolbox = {
829 .type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
830 .name = "videotoolbox",
831
832 .frames_hwctx_size = sizeof(VTFramesContext),
833
834 .device_create = vt_device_create,
835 .frames_init = vt_frames_init,
836 .frames_get_buffer = vt_get_buffer,
837 .frames_get_constraints = vt_frames_get_constraints,
838 .frames_uninit = vt_frames_uninit,
839 .transfer_get_formats = vt_transfer_get_formats,
840 .transfer_data_to = vt_transfer_data_to,
841 .transfer_data_from = vt_transfer_data_from,
842 .map_from = vt_map_from,
843
844 .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VIDEOTOOLBOX, AV_PIX_FMT_NONE },
845 };