summaryrefslogtreecommitdiff
path: root/contrib/SDL-3.2.8/src/camera/coremedia
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/SDL-3.2.8/src/camera/coremedia')
-rw-r--r--contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m508
1 files changed, 508 insertions, 0 deletions
diff --git a/contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m b/contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m
new file mode 100644
index 0000000..2ecfd13
--- /dev/null
+++ b/contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m
@@ -0,0 +1,508 @@
1/*
2 Simple DirectMedia Layer
3 Copyright (C) 1997-2025 Sam Lantinga <slouken@libsdl.org>
4
5 This software is provided 'as-is', without any express or implied
6 warranty. In no event will the authors be held liable for any damages
7 arising from the use of this software.
8
9 Permission is granted to anyone to use this software for any purpose,
10 including commercial applications, and to alter it and redistribute it
11 freely, subject to the following restrictions:
12
13 1. The origin of this software must not be misrepresented; you must not
14 claim that you wrote the original software. If you use this software
15 in a product, an acknowledgment in the product documentation would be
16 appreciated but is not required.
17 2. Altered source versions must be plainly marked as such, and must not be
18 misrepresented as being the original software.
19 3. This notice may not be removed or altered from any source distribution.
20*/
21#include "SDL_internal.h"
22
23#ifdef SDL_CAMERA_DRIVER_COREMEDIA
24
25#include "../SDL_syscamera.h"
26#include "../SDL_camera_c.h"
27#include "../../thread/SDL_systhread.h"
28
29#import <AVFoundation/AVFoundation.h>
30#import <CoreMedia/CoreMedia.h>
31
32/*
33 * Need to link with:: CoreMedia CoreVideo
34 *
35 * Add in pInfo.list:
36 * <key>NSCameraUsageDescription</key> <string>Access camera</string>
37 *
38 *
39 * MACOSX:
40 * Add to the Code Sign Entitlement file:
41 * <key>com.apple.security.device.camera</key> <true/>
42 */
43
44static void CoreMediaFormatToSDL(FourCharCode fmt, SDL_PixelFormat *pixel_format, SDL_Colorspace *colorspace)
45{
46 switch (fmt) {
47 #define CASE(x, y, z) case x: *pixel_format = y; *colorspace = z; return
48 // the 16LE ones should use 16BE if we're on a Bigendian system like PowerPC,
49 // but at current time there is no bigendian Apple platform that has CoreMedia.
50 CASE(kCMPixelFormat_16LE555, SDL_PIXELFORMAT_XRGB1555, SDL_COLORSPACE_SRGB);
51 CASE(kCMPixelFormat_16LE5551, SDL_PIXELFORMAT_RGBA5551, SDL_COLORSPACE_SRGB);
52 CASE(kCMPixelFormat_16LE565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB);
53 CASE(kCMPixelFormat_24RGB, SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB);
54 CASE(kCMPixelFormat_32ARGB, SDL_PIXELFORMAT_ARGB32, SDL_COLORSPACE_SRGB);
55 CASE(kCMPixelFormat_32BGRA, SDL_PIXELFORMAT_BGRA32, SDL_COLORSPACE_SRGB);
56 CASE(kCMPixelFormat_422YpCbCr8, SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED);
57 CASE(kCMPixelFormat_422YpCbCr8_yuvs, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED);
58 CASE(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED);
59 CASE(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_FULL);
60 CASE(kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_LIMITED);
61 CASE(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_FULL);
62 #undef CASE
63 default:
64 #if DEBUG_CAMERA
65 SDL_Log("CAMERA: Unknown format FourCharCode '%d'", (int) fmt);
66 #endif
67 break;
68 }
69 *pixel_format = SDL_PIXELFORMAT_UNKNOWN;
70 *colorspace = SDL_COLORSPACE_UNKNOWN;
71}
72
73@class SDLCaptureVideoDataOutputSampleBufferDelegate;
74
75// just a simple wrapper to help ARC manage memory...
76@interface SDLPrivateCameraData : NSObject
77@property(nonatomic, retain) AVCaptureSession *session;
78@property(nonatomic, retain) SDLCaptureVideoDataOutputSampleBufferDelegate *delegate;
79@property(nonatomic, assign) CMSampleBufferRef current_sample;
80@end
81
82@implementation SDLPrivateCameraData
83@end
84
85
86static bool CheckCameraPermissions(SDL_Camera *device)
87{
88 if (device->permission == 0) { // still expecting a permission result.
89 if (@available(macOS 14, *)) {
90 const AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
91 if (status != AVAuthorizationStatusNotDetermined) { // NotDetermined == still waiting for an answer from the user.
92 SDL_CameraPermissionOutcome(device, (status == AVAuthorizationStatusAuthorized) ? true : false);
93 }
94 } else {
95 SDL_CameraPermissionOutcome(device, true); // always allowed (or just unqueryable...?) on older macOS.
96 }
97 }
98
99 return (device->permission > 0);
100}
101
102// this delegate just receives new video frames on a Grand Central Dispatch queue, and fires off the
103// main device thread iterate function directly to consume it.
104@interface SDLCaptureVideoDataOutputSampleBufferDelegate : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
105 @property SDL_Camera *device;
106 -(id) init:(SDL_Camera *) dev;
107 -(void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
108@end
109
110@implementation SDLCaptureVideoDataOutputSampleBufferDelegate
111
112 -(id) init:(SDL_Camera *) dev {
113 if ( self = [super init] ) {
114 _device = dev;
115 }
116 return self;
117 }
118
119 - (void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
120 {
121 SDL_Camera *device = self.device;
122 if (!device || !device->hidden) {
123 return; // oh well.
124 }
125
126 if (!CheckCameraPermissions(device)) {
127 return; // nothing to do right now, dump what is probably a completely black frame.
128 }
129
130 SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden;
131 hidden.current_sample = sampleBuffer;
132 SDL_CameraThreadIterate(device);
133 hidden.current_sample = NULL;
134 }
135
136 - (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
137 {
138 #if DEBUG_CAMERA
139 SDL_Log("CAMERA: Drop frame.");
140 #endif
141 }
142@end
143
144static bool COREMEDIA_WaitDevice(SDL_Camera *device)
145{
146 return true; // this isn't used atm, since we run our own thread out of Grand Central Dispatch.
147}
148
149static SDL_CameraFrameResult COREMEDIA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS)
150{
151 SDL_CameraFrameResult result = SDL_CAMERA_FRAME_READY;
152 SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden;
153 CMSampleBufferRef sample_buffer = hidden.current_sample;
154 hidden.current_sample = NULL;
155 SDL_assert(sample_buffer != NULL); // should only have been called from our delegate with a new frame.
156
157 CMSampleTimingInfo timinginfo;
158 if (CMSampleBufferGetSampleTimingInfo(sample_buffer, 0, &timinginfo) == noErr) {
159 *timestampNS = (Uint64) (CMTimeGetSeconds(timinginfo.presentationTimeStamp) * ((Float64) SDL_NS_PER_SECOND));
160 } else {
161 SDL_assert(!"this shouldn't happen, I think.");
162 *timestampNS = 0;
163 }
164
165 CVImageBufferRef image = CMSampleBufferGetImageBuffer(sample_buffer); // does not retain `image` (and we don't want it to).
166 const int numPlanes = (int) CVPixelBufferGetPlaneCount(image);
167 const int planar = (int) CVPixelBufferIsPlanar(image);
168
169 #if DEBUG_CAMERA
170 const int w = (int) CVPixelBufferGetWidth(image);
171 const int h = (int) CVPixelBufferGetHeight(image);
172 const int sz = (int) CVPixelBufferGetDataSize(image);
173 const int pitch = (int) CVPixelBufferGetBytesPerRow(image);
174 SDL_Log("CAMERA: buffer planar=%d numPlanes=%d %d x %d sz=%d pitch=%d", planar, numPlanes, w, h, sz, pitch);
175 #endif
176
177 // !!! FIXME: this currently copies the data to the surface (see FIXME about non-contiguous planar surfaces, but in theory we could just keep this locked until ReleaseFrame...
178 CVPixelBufferLockBaseAddress(image, 0);
179
180 frame->w = (int)CVPixelBufferGetWidth(image);
181 frame->h = (int)CVPixelBufferGetHeight(image);
182
183 if ((planar == 0) && (numPlanes == 0)) {
184 const int pitch = (int) CVPixelBufferGetBytesPerRow(image);
185 const size_t buflen = pitch * frame->h;
186 frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen);
187 if (frame->pixels == NULL) {
188 result = SDL_CAMERA_FRAME_ERROR;
189 } else {
190 frame->pitch = pitch;
191 SDL_memcpy(frame->pixels, CVPixelBufferGetBaseAddress(image), buflen);
192 }
193 } else {
194 // !!! FIXME: we have an open issue in SDL3 to allow SDL_Surface to support non-contiguous planar data, but we don't have it yet.
195 size_t buflen = 0;
196 for (int i = 0; i < numPlanes; i++) {
197 size_t plane_height = CVPixelBufferGetHeightOfPlane(image, i);
198 size_t plane_pitch = CVPixelBufferGetBytesPerRowOfPlane(image, i);
199 size_t plane_size = (plane_pitch * plane_height);
200 buflen += plane_size;
201 }
202
203 frame->pitch = (int)CVPixelBufferGetBytesPerRowOfPlane(image, 0); // this is what SDL3 currently expects
204 frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen);
205 if (frame->pixels == NULL) {
206 result = SDL_CAMERA_FRAME_ERROR;
207 } else {
208 Uint8 *dst = frame->pixels;
209 for (int i = 0; i < numPlanes; i++) {
210 const void *src = CVPixelBufferGetBaseAddressOfPlane(image, i);
211 size_t plane_height = CVPixelBufferGetHeightOfPlane(image, i);
212 size_t plane_pitch = CVPixelBufferGetBytesPerRowOfPlane(image, i);
213 size_t plane_size = (plane_pitch * plane_height);
214 SDL_memcpy(dst, src, plane_size);
215 dst += plane_size;
216 }
217 }
218 }
219
220 CVPixelBufferUnlockBaseAddress(image, 0);
221
222 return result;
223}
224
225static void COREMEDIA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame)
226{
227 // !!! FIXME: this currently copies the data to the surface, but in theory we could just keep this locked until ReleaseFrame...
228 SDL_aligned_free(frame->pixels);
229}
230
231static void COREMEDIA_CloseDevice(SDL_Camera *device)
232{
233 if (device && device->hidden) {
234 SDLPrivateCameraData *hidden = (SDLPrivateCameraData *) CFBridgingRelease(device->hidden);
235 device->hidden = NULL;
236
237 AVCaptureSession *session = hidden.session;
238 if (session) {
239 hidden.session = nil;
240 [session stopRunning];
241 [session removeInput:[session.inputs objectAtIndex:0]];
242 [session removeOutput:(AVCaptureVideoDataOutput*)[session.outputs objectAtIndex:0]];
243 session = nil;
244 }
245
246 hidden.delegate = NULL;
247 hidden.current_sample = NULL;
248 }
249}
250
251static bool COREMEDIA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec)
252{
253 AVCaptureDevice *avdevice = (__bridge AVCaptureDevice *) device->handle;
254
255 // Pick format that matches the spec
256 const int w = spec->width;
257 const int h = spec->height;
258 const float rate = (float)spec->framerate_numerator / spec->framerate_denominator;
259 AVCaptureDeviceFormat *spec_format = nil;
260 NSArray<AVCaptureDeviceFormat *> *formats = [avdevice formats];
261 for (AVCaptureDeviceFormat *format in formats) {
262 CMFormatDescriptionRef formatDescription = [format formatDescription];
263 SDL_PixelFormat device_format = SDL_PIXELFORMAT_UNKNOWN;
264 SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN;
265 CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(formatDescription), &device_format, &device_colorspace);
266 if (device_format != spec->format || device_colorspace != spec->colorspace) {
267 continue;
268 }
269
270 const CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDescription);
271 if ((int)dim.width != w || (int)dim.height != h) {
272 continue;
273 }
274
275 const float FRAMERATE_EPSILON = 0.01f;
276 for (AVFrameRateRange *framerate in format.videoSupportedFrameRateRanges) {
277 if (rate > (framerate.minFrameRate - FRAMERATE_EPSILON) &&
278 rate < (framerate.maxFrameRate + FRAMERATE_EPSILON)) {
279 spec_format = format;
280 break;
281 }
282 }
283
284 if (spec_format != nil) {
285 break;
286 }
287 }
288
289 if (spec_format == nil) {
290 return SDL_SetError("camera spec format not available");
291 } else if (![avdevice lockForConfiguration:NULL]) {
292 return SDL_SetError("Cannot lockForConfiguration");
293 }
294
295 avdevice.activeFormat = spec_format;
296 [avdevice unlockForConfiguration];
297
298 AVCaptureSession *session = [[AVCaptureSession alloc] init];
299 if (session == nil) {
300 return SDL_SetError("Failed to allocate/init AVCaptureSession");
301 }
302
303 session.sessionPreset = AVCaptureSessionPresetHigh;
304#if defined(SDL_PLATFORM_IOS)
305 if (@available(iOS 10.0, tvOS 17.0, *)) {
306 session.automaticallyConfiguresCaptureDeviceForWideColor = NO;
307 }
308#endif
309
310 NSError *error = nil;
311 AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:avdevice error:&error];
312 if (!input) {
313 return SDL_SetError("Cannot create AVCaptureDeviceInput");
314 }
315
316 AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
317 if (!output) {
318 return SDL_SetError("Cannot create AVCaptureVideoDataOutput");
319 }
320
321 output.videoSettings = @{
322 (id)kCVPixelBufferWidthKey : @(spec->width),
323 (id)kCVPixelBufferHeightKey : @(spec->height),
324 (id)kCVPixelBufferPixelFormatTypeKey : @(CMFormatDescriptionGetMediaSubType([spec_format formatDescription]))
325 };
326
327 char threadname[64];
328 SDL_GetCameraThreadName(device, threadname, sizeof (threadname));
329 dispatch_queue_t queue = dispatch_queue_create(threadname, NULL);
330 //dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
331 if (!queue) {
332 return SDL_SetError("dispatch_queue_create() failed");
333 }
334
335 SDLCaptureVideoDataOutputSampleBufferDelegate *delegate = [[SDLCaptureVideoDataOutputSampleBufferDelegate alloc] init:device];
336 if (delegate == nil) {
337 return SDL_SetError("Cannot create SDLCaptureVideoDataOutputSampleBufferDelegate");
338 }
339 [output setSampleBufferDelegate:delegate queue:queue];
340
341 if (![session canAddInput:input]) {
342 return SDL_SetError("Cannot add AVCaptureDeviceInput");
343 }
344 [session addInput:input];
345
346 if (![session canAddOutput:output]) {
347 return SDL_SetError("Cannot add AVCaptureVideoDataOutput");
348 }
349 [session addOutput:output];
350
351 [session commitConfiguration];
352
353 SDLPrivateCameraData *hidden = [[SDLPrivateCameraData alloc] init];
354 if (hidden == nil) {
355 return SDL_SetError("Cannot create SDLPrivateCameraData");
356 }
357
358 hidden.session = session;
359 hidden.delegate = delegate;
360 hidden.current_sample = NULL;
361 device->hidden = (struct SDL_PrivateCameraData *)CFBridgingRetain(hidden);
362
363 [session startRunning]; // !!! FIXME: docs say this can block while camera warms up and shouldn't be done on main thread. Maybe push through `queue`?
364
365 CheckCameraPermissions(device); // check right away, in case the process is already granted permission.
366
367 return true;
368}
369
370static void COREMEDIA_FreeDeviceHandle(SDL_Camera *device)
371{
372 if (device && device->handle) {
373 CFBridgingRelease(device->handle);
374 }
375}
376
377static void GatherCameraSpecs(AVCaptureDevice *device, CameraFormatAddData *add_data)
378{
379 SDL_zerop(add_data);
380
381 for (AVCaptureDeviceFormat *fmt in device.formats) {
382 if (CMFormatDescriptionGetMediaType(fmt.formatDescription) != kCMMediaType_Video) {
383 continue;
384 }
385
386//NSLog(@"Available camera format: %@\n", fmt);
387 SDL_PixelFormat device_format = SDL_PIXELFORMAT_UNKNOWN;
388 SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN;
389 CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(fmt.formatDescription), &device_format, &device_colorspace);
390 if (device_format == SDL_PIXELFORMAT_UNKNOWN) {
391 continue;
392 }
393
394 const CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(fmt.formatDescription);
395 const int w = (int) dims.width;
396 const int h = (int) dims.height;
397 for (AVFrameRateRange *framerate in fmt.videoSupportedFrameRateRanges) {
398 int min_numerator = 0, min_denominator = 1;
399 int max_numerator = 0, max_denominator = 1;
400
401 SDL_CalculateFraction(framerate.minFrameRate, &min_numerator, &min_denominator);
402 SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, min_numerator, min_denominator);
403 SDL_CalculateFraction(framerate.maxFrameRate, &max_numerator, &max_denominator);
404 if (max_numerator != min_numerator || max_denominator != min_denominator) {
405 SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, max_numerator, max_denominator);
406 }
407 }
408 }
409}
410
411static bool FindCoreMediaCameraByUniqueID(SDL_Camera *device, void *userdata)
412{
413 NSString *uniqueid = (__bridge NSString *) userdata;
414 AVCaptureDevice *avdev = (__bridge AVCaptureDevice *) device->handle;
415 return ([uniqueid isEqualToString:avdev.uniqueID]) ? true : false;
416}
417
418static void MaybeAddDevice(AVCaptureDevice *avdevice)
419{
420 if (!avdevice.connected) {
421 return; // not connected.
422 } else if (![avdevice hasMediaType:AVMediaTypeVideo]) {
423 return; // not a camera.
424 } else if (SDL_FindPhysicalCameraByCallback(FindCoreMediaCameraByUniqueID, (__bridge void *) avdevice.uniqueID)) {
425 return; // already have this one.
426 }
427
428 CameraFormatAddData add_data;
429 GatherCameraSpecs(avdevice, &add_data);
430 if (add_data.num_specs > 0) {
431 SDL_CameraPosition position = SDL_CAMERA_POSITION_UNKNOWN;
432 if (avdevice.position == AVCaptureDevicePositionFront) {
433 position = SDL_CAMERA_POSITION_FRONT_FACING;
434 } else if (avdevice.position == AVCaptureDevicePositionBack) {
435 position = SDL_CAMERA_POSITION_BACK_FACING;
436 }
437 SDL_AddCamera(avdevice.localizedName.UTF8String, position, add_data.num_specs, add_data.specs, (void *) CFBridgingRetain(avdevice));
438 }
439
440 SDL_free(add_data.specs);
441}
442
443static void COREMEDIA_DetectDevices(void)
444{
445 NSArray<AVCaptureDevice *> *devices = nil;
446
447 if (@available(macOS 10.15, iOS 13, *)) {
448 // kind of annoying that there isn't a "give me anything that looks like a camera" option,
449 // so this list will need to be updated when Apple decides to add
450 // AVCaptureDeviceTypeBuiltInQuadrupleCamera some day.
451 NSArray *device_types = @[
452 #ifdef SDL_PLATFORM_IOS
453 AVCaptureDeviceTypeBuiltInTelephotoCamera,
454 AVCaptureDeviceTypeBuiltInDualCamera,
455 AVCaptureDeviceTypeBuiltInDualWideCamera,
456 AVCaptureDeviceTypeBuiltInTripleCamera,
457 AVCaptureDeviceTypeBuiltInUltraWideCamera,
458 #else
459 AVCaptureDeviceTypeExternalUnknown,
460 #endif
461 AVCaptureDeviceTypeBuiltInWideAngleCamera
462 ];
463
464 AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
465 discoverySessionWithDeviceTypes:device_types
466 mediaType:AVMediaTypeVideo
467 position:AVCaptureDevicePositionUnspecified];
468
469 devices = discoverySession.devices;
470 // !!! FIXME: this can use Key Value Observation to get hotplug events.
471 } else {
472 // this is deprecated but works back to macOS 10.7; 10.15 added AVCaptureDeviceDiscoverySession as a replacement.
473 devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
474 // !!! FIXME: this can use AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification with NSNotificationCenter to get hotplug events.
475 }
476
477 for (AVCaptureDevice *device in devices) {
478 MaybeAddDevice(device);
479 }
480}
481
482static void COREMEDIA_Deinitialize(void)
483{
484 // !!! FIXME: disable hotplug.
485}
486
487static bool COREMEDIA_Init(SDL_CameraDriverImpl *impl)
488{
489 impl->DetectDevices = COREMEDIA_DetectDevices;
490 impl->OpenDevice = COREMEDIA_OpenDevice;
491 impl->CloseDevice = COREMEDIA_CloseDevice;
492 impl->WaitDevice = COREMEDIA_WaitDevice;
493 impl->AcquireFrame = COREMEDIA_AcquireFrame;
494 impl->ReleaseFrame = COREMEDIA_ReleaseFrame;
495 impl->FreeDeviceHandle = COREMEDIA_FreeDeviceHandle;
496 impl->Deinitialize = COREMEDIA_Deinitialize;
497
498 impl->ProvidesOwnCallbackThread = true;
499
500 return true;
501}
502
503CameraBootStrap COREMEDIA_bootstrap = {
504 "coremedia", "SDL Apple CoreMedia camera driver", COREMEDIA_Init, false
505};
506
507#endif // SDL_CAMERA_DRIVER_COREMEDIA
508