CvVideoCamera2.mm 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575
  1. //
  2. // CvVideoCamera2.mm
  3. //
  4. // Created by Giles Payne on 2020/03/11.
  5. //
  6. #import "Mat.h"
  7. #import "CvCamera2.h"
  8. #import <UIKit/UIKit.h>
  9. static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}
  10. #pragma mark - Private Interface
  11. @interface CvVideoCamera2 () {
  12. int recordingCountDown;
  13. }
  14. - (void)createVideoDataOutput;
  15. - (void)createVideoFileOutput;
  16. @property (nonatomic, strong) CALayer *customPreviewLayer;
  17. @property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
  18. @end
  19. #pragma mark - Implementation
  20. @implementation CvVideoCamera2
  21. {
  22. id<CvVideoCameraDelegate2> _delegate;
  23. dispatch_queue_t videoDataOutputQueue;
  24. CMTime lastSampleTime;
  25. }
  26. - (void)setDelegate:(id<CvVideoCameraDelegate2>)newDelegate {
  27. _delegate = newDelegate;
  28. }
  29. - (id<CvVideoCameraDelegate2>)delegate {
  30. return _delegate;
  31. }
  32. #pragma mark - Constructors
  33. - (id)initWithParentView:(UIView*)parent {
  34. self = [super initWithParentView:parent];
  35. if (self) {
  36. parent.contentMode = UIViewContentModeScaleAspectFill;
  37. self.useAVCaptureVideoPreviewLayer = NO;
  38. self.recordVideo = NO;
  39. self.rotateVideo = NO;
  40. self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
  41. self.defaultAVCaptureSessionPreset = AVCaptureSessionPresetHigh;
  42. self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
  43. self.defaultFPS = 30;
  44. self.grayscaleMode = NO;
  45. }
  46. return self;
  47. }
  48. #pragma mark - Public interface
  49. - (void)start {
  50. if (self.running == YES) {
  51. return;
  52. }
  53. recordingCountDown = 10;
  54. [super start];
  55. if (self.recordVideo == YES) {
  56. NSError* error = nil;
  57. if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
  58. [[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
  59. }
  60. if (error == nil) {
  61. NSLog(@"[Camera] Delete file %@", [self videoFileString]);
  62. }
  63. }
  64. }
  65. - (void)stop {
  66. if (self.running == YES) {
  67. [super stop];
  68. if (self.recordVideo == YES) {
  69. if (self.recordAssetWriter) {
  70. if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) {
  71. [self.recordAssetWriter finishWritingWithCompletionHandler:^void() {
  72. NSLog(@"[Camera] recording stopped");
  73. }];
  74. } else {
  75. NSLog(@"[Camera] Recording Error: asset writer status is not writing");
  76. }
  77. }
  78. }
  79. if (self.customPreviewLayer) {
  80. [self.customPreviewLayer removeFromSuperlayer];
  81. self.customPreviewLayer = nil;
  82. }
  83. }
  84. }
  85. // TODO fix
  86. - (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
  87. NSLog(@"layout preview layer");
  88. if (self.parentView != nil) {
  89. CALayer* layer = self.customPreviewLayer;
  90. CGRect bounds = self.customPreviewLayer.bounds;
  91. int rotation_angle = 0;
  92. bool flip_bounds = false;
  93. switch (interfaceOrientation) {
  94. case UIInterfaceOrientationPortrait:
  95. NSLog(@"to Portrait");
  96. rotation_angle = 270;
  97. break;
  98. case UIInterfaceOrientationPortraitUpsideDown:
  99. rotation_angle = 90;
  100. NSLog(@"to UpsideDown");
  101. break;
  102. case UIInterfaceOrientationLandscapeLeft:
  103. rotation_angle = 0;
  104. NSLog(@"to LandscapeLeft");
  105. break;
  106. case UIInterfaceOrientationLandscapeRight:
  107. rotation_angle = 180;
  108. NSLog(@"to LandscapeRight");
  109. break;
  110. default:
  111. break; // leave the layer in its last known orientation
  112. }
  113. switch (self.defaultAVCaptureVideoOrientation) {
  114. case AVCaptureVideoOrientationLandscapeRight:
  115. rotation_angle += 180;
  116. break;
  117. case AVCaptureVideoOrientationPortraitUpsideDown:
  118. rotation_angle += 270;
  119. break;
  120. case AVCaptureVideoOrientationPortrait:
  121. rotation_angle += 90;
  122. case AVCaptureVideoOrientationLandscapeLeft:
  123. break;
  124. default:
  125. break;
  126. }
  127. rotation_angle = rotation_angle % 360;
  128. if (rotation_angle == 90 || rotation_angle == 270) {
  129. flip_bounds = true;
  130. }
  131. if (flip_bounds) {
  132. NSLog(@"flip bounds");
  133. bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
  134. }
  135. layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
  136. self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
  137. layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
  138. layer.bounds = bounds;
  139. }
  140. }
  141. // TODO fix
  142. - (void)layoutPreviewLayer {
  143. NSLog(@"layout preview layer");
  144. if (self.parentView != nil) {
  145. CALayer* layer = self.customPreviewLayer;
  146. CGRect bounds = self.customPreviewLayer.bounds;
  147. int rotation_angle = 0;
  148. bool flip_bounds = false;
  149. switch (self.currentDeviceOrientation) {
  150. case UIDeviceOrientationPortrait:
  151. rotation_angle = 270;
  152. break;
  153. case UIDeviceOrientationPortraitUpsideDown:
  154. rotation_angle = 90;
  155. break;
  156. case UIDeviceOrientationLandscapeLeft:
  157. NSLog(@"left");
  158. rotation_angle = 180;
  159. break;
  160. case UIDeviceOrientationLandscapeRight:
  161. NSLog(@"right");
  162. rotation_angle = 0;
  163. break;
  164. case UIDeviceOrientationFaceUp:
  165. case UIDeviceOrientationFaceDown:
  166. default:
  167. break; // leave the layer in its last known orientation
  168. }
  169. switch (self.defaultAVCaptureVideoOrientation) {
  170. case AVCaptureVideoOrientationLandscapeRight:
  171. rotation_angle += 180;
  172. break;
  173. case AVCaptureVideoOrientationPortraitUpsideDown:
  174. rotation_angle += 270;
  175. break;
  176. case AVCaptureVideoOrientationPortrait:
  177. rotation_angle += 90;
  178. case AVCaptureVideoOrientationLandscapeLeft:
  179. break;
  180. default:
  181. break;
  182. }
  183. rotation_angle = rotation_angle % 360;
  184. if (rotation_angle == 90 || rotation_angle == 270) {
  185. flip_bounds = true;
  186. }
  187. if (flip_bounds) {
  188. NSLog(@"flip bounds");
  189. bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
  190. }
  191. layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
  192. layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
  193. layer.bounds = bounds;
  194. }
  195. }
  196. #pragma mark - Private Interface
  197. - (void)createVideoDataOutput {
  198. // Make a video data output
  199. self.videoDataOutput = [AVCaptureVideoDataOutput new];
  200. // In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
  201. // In color mode we, BGRA format is used
  202. OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
  203. self.videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
  204. forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  205. // discard if the data output queue is blocked (as we process the still image)
  206. [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
  207. if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
  208. [self.captureSession addOutput:self.videoDataOutput];
  209. }
  210. [[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
  211. // set default FPS
  212. AVCaptureDeviceInput *currentInput = [self.captureSession.inputs objectAtIndex:0];
  213. AVCaptureDevice *device = currentInput.device;
  214. NSError *error = nil;
  215. [device lockForConfiguration:&error];
  216. float maxRate = ((AVFrameRateRange*) [device.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
  217. if (maxRate > self.defaultFPS - 1 && error == nil) {
  218. [device setActiveVideoMinFrameDuration:CMTimeMake(1, self.defaultFPS)];
  219. [device setActiveVideoMaxFrameDuration:CMTimeMake(1, self.defaultFPS)];
  220. NSLog(@"[Camera] FPS set to %d", self.defaultFPS);
  221. } else {
  222. NSLog(@"[Camera] unable to set defaultFPS at %d FPS, max is %f FPS", self.defaultFPS, maxRate);
  223. }
  224. if (error != nil) {
  225. NSLog(@"[Camera] unable to set defaultFPS: %@", error);
  226. }
  227. [device unlockForConfiguration];
  228. // set video mirroring for front camera (more intuitive)
  229. if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
  230. if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
  231. [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
  232. } else {
  233. [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
  234. }
  235. }
  236. // set default video orientation
  237. if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
  238. [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
  239. }
  240. // create a custom preview layer
  241. self.customPreviewLayer = [CALayer layer];
  242. self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
  243. self.customPreviewLayer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
  244. [self updateOrientation];
  245. // create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
  246. // a serial dispatch queue must be used to guarantee that video frames will be delivered in order
  247. // see the header doc for setSampleBufferDelegate:queue: for more information
  248. videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
  249. [self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
  250. NSLog(@"[Camera] created AVCaptureVideoDataOutput");
  251. }
  252. - (void)createVideoFileOutput {
  253. /* Video File Output in H.264, via AVAsserWriter */
  254. NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight);
  255. NSDictionary *outputSettings
  256. = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
  257. [NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
  258. AVVideoCodecH264, AVVideoCodecKey,
  259. nil
  260. ];
  261. self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
  262. int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
  263. self.recordPixelBufferAdaptor =
  264. [[AVAssetWriterInputPixelBufferAdaptor alloc]
  265. initWithAssetWriterInput:self.recordAssetWriterInput
  266. sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
  267. NSError* error = nil;
  268. NSLog(@"Create AVAssetWriter with url: %@", [self videoFileURL]);
  269. self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
  270. fileType:AVFileTypeMPEG4
  271. error:&error];
  272. if (error != nil) {
  273. NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
  274. }
  275. [self.recordAssetWriter addInput:self.recordAssetWriterInput];
  276. self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
  277. NSLog(@"[Camera] created AVAssetWriter");
  278. }
  279. - (void)createCaptureOutput {
  280. [self createVideoDataOutput];
  281. if (self.recordVideo == YES) {
  282. [self createVideoFileOutput];
  283. }
  284. }
  285. - (void)createCustomVideoPreview {
  286. [self.parentView.layer addSublayer:self.customPreviewLayer];
  287. }
  288. - (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
  289. CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
  290. NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
  291. [NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
  292. [NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
  293. nil];
  294. CVPixelBufferRef pxbuffer = NULL;
  295. CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
  296. frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) CFBridgingRetain(options),
  297. &pxbuffer);
  298. NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
  299. CVPixelBufferLockBaseAddress(pxbuffer, 0);
  300. void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
  301. CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
  302. CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
  303. frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
  304. kCGImageAlphaPremultipliedFirst);
  305. CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
  306. CGImageGetHeight(image)), image);
  307. CGColorSpaceRelease(rgbColorSpace);
  308. CGContextRelease(context);
  309. CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
  310. return pxbuffer;
  311. }
  312. #pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
  313. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
  314. {
  315. (void)captureOutput;
  316. (void)connection;
  317. auto strongDelegate = self.delegate;
  318. if (strongDelegate) {
  319. // convert from Core Media to Core Video
  320. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  321. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  322. void* bufferAddress;
  323. size_t width;
  324. size_t height;
  325. size_t bytesPerRow;
  326. CGColorSpaceRef colorSpace;
  327. CGContextRef context;
  328. int format_opencv;
  329. OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
  330. if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
  331. format_opencv = CV_8UC1;
  332. bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
  333. width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
  334. height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
  335. bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
  336. } else { // expect kCVPixelFormatType_32BGRA
  337. format_opencv = CV_8UC4;
  338. bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
  339. width = CVPixelBufferGetWidth(imageBuffer);
  340. height = CVPixelBufferGetHeight(imageBuffer);
  341. bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
  342. }
  343. // delegate image processing to the delegate
  344. cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
  345. CGImage* dstImage;
  346. if ([strongDelegate respondsToSelector:@selector(processImage:)]) {
  347. [strongDelegate processImage:[Mat fromNative:image]];
  348. }
  349. // check if matrix data pointer or dimensions were changed by the delegate
  350. bool iOSimage = false;
  351. if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
  352. iOSimage = true;
  353. }
  354. // (create color space, create graphics context, render buffer)
  355. CGBitmapInfo bitmapInfo;
  356. // basically we decide if it's a grayscale, rgb or rgba image
  357. if (image.channels() == 1) {
  358. colorSpace = CGColorSpaceCreateDeviceGray();
  359. bitmapInfo = kCGImageAlphaNone;
  360. } else if (image.channels() == 3) {
  361. colorSpace = CGColorSpaceCreateDeviceRGB();
  362. bitmapInfo = kCGImageAlphaNone;
  363. if (iOSimage) {
  364. bitmapInfo |= kCGBitmapByteOrder32Little;
  365. } else {
  366. bitmapInfo |= kCGBitmapByteOrder32Big;
  367. }
  368. } else {
  369. colorSpace = CGColorSpaceCreateDeviceRGB();
  370. bitmapInfo = kCGImageAlphaPremultipliedFirst;
  371. if (iOSimage) {
  372. bitmapInfo |= kCGBitmapByteOrder32Little;
  373. } else {
  374. bitmapInfo |= kCGBitmapByteOrder32Big;
  375. }
  376. }
  377. if (iOSimage) {
  378. context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
  379. dstImage = CGBitmapContextCreateImage(context);
  380. CGContextRelease(context);
  381. } else {
  382. NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
  383. CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
  384. // Creating CGImage from cv::Mat
  385. dstImage = CGImageCreate(image.cols, // width
  386. image.rows, // height
  387. 8, // bits per component
  388. 8 * image.elemSize(), // bits per pixel
  389. image.step, // bytesPerRow
  390. colorSpace, // colorspace
  391. bitmapInfo, // bitmap info
  392. provider, // CGDataProviderRef
  393. NULL, // decode
  394. false, // should interpolate
  395. kCGRenderingIntentDefault // intent
  396. );
  397. CGDataProviderRelease(provider);
  398. }
  399. // render buffer
  400. dispatch_sync(dispatch_get_main_queue(), ^{
  401. self.customPreviewLayer.contents = (__bridge id)dstImage;
  402. });
  403. recordingCountDown--;
  404. if (self.recordVideo == YES && recordingCountDown < 0) {
  405. lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  406. // CMTimeShow(lastSampleTime);
  407. if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
  408. [self.recordAssetWriter startWriting];
  409. [self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
  410. if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
  411. NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
  412. return;
  413. } else {
  414. NSLog(@"[Camera] Video recording started");
  415. }
  416. }
  417. if (self.recordAssetWriterInput.readyForMoreMediaData) {
  418. CVImageBufferRef pixelBuffer = [self pixelBufferFromCGImage:dstImage];
  419. if (! [self.recordPixelBufferAdaptor appendPixelBuffer:pixelBuffer
  420. withPresentationTime:lastSampleTime] ) {
  421. NSLog(@"Video Writing Error");
  422. }
  423. if (pixelBuffer != nullptr)
  424. CVPixelBufferRelease(pixelBuffer);
  425. }
  426. }
  427. // cleanup
  428. CGImageRelease(dstImage);
  429. CGColorSpaceRelease(colorSpace);
  430. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  431. }
  432. }
  433. - (void)updateOrientation {
  434. if (self.rotateVideo == YES)
  435. {
  436. NSLog(@"rotate..");
  437. self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
  438. [self layoutPreviewLayer];
  439. }
  440. }
  441. - (void)saveVideo {
  442. if (self.recordVideo == NO) {
  443. return;
  444. }
  445. UISaveVideoAtPathToSavedPhotosAlbum([self videoFileString], nil, nil, NULL);
  446. }
  447. - (NSURL *)videoFileURL {
  448. NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
  449. NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
  450. NSFileManager *fileManager = [NSFileManager defaultManager];
  451. if ([fileManager fileExistsAtPath:outputPath]) {
  452. NSLog(@"file exists");
  453. }
  454. return outputURL;
  455. }
  456. - (NSString *)videoFileString {
  457. NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
  458. return outputPath;
  459. }
  460. @end