GPUImageMovieWriter.m 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934
  1. #import "GPUImageMovieWriter.h"
  2. #import "GPUImageContext.h"
  3. #import "GLProgram.h"
  4. #import "GPUImageFilter.h"
  5. NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING
  6. (
  7. varying highp vec2 textureCoordinate;
  8. uniform sampler2D inputImageTexture;
  9. void main()
  10. {
  11. gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;
  12. }
  13. );
  14. @interface GPUImageMovieWriter ()
  15. {
  16. GLuint movieFramebuffer, movieRenderbuffer;
  17. GLProgram *colorSwizzlingProgram;
  18. GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;
  19. GLint colorSwizzlingInputTextureUniform;
  20. GPUImageFramebuffer *firstInputFramebuffer;
  21. CMTime startTime, previousFrameTime, previousAudioTime;
  22. dispatch_queue_t audioQueue, videoQueue;
  23. BOOL audioEncodingIsFinished, videoEncodingIsFinished;
  24. BOOL isRecording;
  25. }
  26. // Movie recording
  27. - (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;
  28. // Frame rendering
  29. - (void)createDataFBO;
  30. - (void)destroyDataFBO;
  31. - (void)setFilterFBO;
  32. - (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
  33. @end
  34. @implementation GPUImageMovieWriter
  35. @synthesize hasAudioTrack = _hasAudioTrack;
  36. @synthesize encodingLiveVideo = _encodingLiveVideo;
  37. @synthesize shouldPassthroughAudio = _shouldPassthroughAudio;
  38. @synthesize completionBlock;
  39. @synthesize failureBlock;
  40. @synthesize videoInputReadyCallback;
  41. @synthesize audioInputReadyCallback;
  42. @synthesize enabled;
  43. @synthesize shouldInvalidateAudioSampleWhenDone = _shouldInvalidateAudioSampleWhenDone;
  44. @synthesize paused = _paused;
  45. @synthesize movieWriterContext = _movieWriterContext;
  46. @synthesize delegate = _delegate;
  47. #pragma mark -
  48. #pragma mark Initialization and teardown
  49. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
  50. {
  51. return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];
  52. }
  53. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;
  54. {
  55. if (!(self = [super init]))
  56. {
  57. return nil;
  58. }
  59. _shouldInvalidateAudioSampleWhenDone = NO;
  60. self.enabled = YES;
  61. alreadyFinishedRecording = NO;
  62. videoEncodingIsFinished = NO;
  63. audioEncodingIsFinished = NO;
  64. videoSize = newSize;
  65. movieURL = newMovieURL;
  66. fileType = newFileType;
  67. startTime = kCMTimeInvalid;
  68. _encodingLiveVideo = [[outputSettings objectForKey:@"EncodingLiveVideo"] isKindOfClass:[NSNumber class]] ? [[outputSettings objectForKey:@"EncodingLiveVideo"] boolValue] : YES;
  69. previousFrameTime = kCMTimeNegativeInfinity;
  70. previousAudioTime = kCMTimeNegativeInfinity;
  71. inputRotation = kGPUImageNoRotation;
  72. _movieWriterContext = [[GPUImageContext alloc] init];
  73. [_movieWriterContext useSharegroup:[[[GPUImageContext sharedImageProcessingContext] context] sharegroup]];
  74. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  75. [_movieWriterContext useAsCurrentContext];
  76. if ([GPUImageContext supportsFastTextureUpload])
  77. {
  78. colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
  79. }
  80. else
  81. {
  82. colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
  83. }
  84. if (!colorSwizzlingProgram.initialized)
  85. {
  86. [colorSwizzlingProgram addAttribute:@"position"];
  87. [colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"];
  88. if (![colorSwizzlingProgram link])
  89. {
  90. NSString *progLog = [colorSwizzlingProgram programLog];
  91. NSLog(@"Program link log: %@", progLog);
  92. NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];
  93. NSLog(@"Fragment shader compile log: %@", fragLog);
  94. NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];
  95. NSLog(@"Vertex shader compile log: %@", vertLog);
  96. colorSwizzlingProgram = nil;
  97. NSAssert(NO, @"Filter shader link failed");
  98. }
  99. }
  100. colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"];
  101. colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"];
  102. colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"];
  103. [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
  104. glEnableVertexAttribArray(colorSwizzlingPositionAttribute);
  105. glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);
  106. });
  107. [self initializeMovieWithOutputSettings:outputSettings];
  108. return self;
  109. }
  110. - (void)dealloc;
  111. {
  112. [self destroyDataFBO];
  113. #if !OS_OBJECT_USE_OBJC
  114. if( audioQueue != NULL )
  115. {
  116. dispatch_release(audioQueue);
  117. }
  118. if( videoQueue != NULL )
  119. {
  120. dispatch_release(videoQueue);
  121. }
  122. #endif
  123. }
  124. #pragma mark -
  125. #pragma mark Movie recording
  126. - (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;
  127. {
  128. isRecording = NO;
  129. self.enabled = YES;
  130. NSError *error = nil;
  131. assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];
  132. if (error != nil)
  133. {
  134. NSLog(@"Error: %@", error);
  135. if (failureBlock)
  136. {
  137. failureBlock(error);
  138. }
  139. else
  140. {
  141. if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])
  142. {
  143. [self.delegate movieRecordingFailedWithError:error];
  144. }
  145. }
  146. }
  147. // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.
  148. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);
  149. // use default output settings if none specified
  150. if (outputSettings == nil)
  151. {
  152. NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];
  153. [settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
  154. [settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];
  155. [settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];
  156. outputSettings = settings;
  157. }
  158. // custom output settings specified
  159. else
  160. {
  161. NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];
  162. NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];
  163. NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];
  164. NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters.");
  165. if( [outputSettings objectForKey:@"EncodingLiveVideo"] ) {
  166. NSMutableDictionary *tmp = [outputSettings mutableCopy];
  167. [tmp removeObjectForKey:@"EncodingLiveVideo"];
  168. outputSettings = tmp;
  169. }
  170. }
  171. /*
  172. NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  173. [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,
  174. [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,
  175. [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,
  176. [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,
  177. nil];
  178. NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  179. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,
  180. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,
  181. nil];
  182. NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
  183. [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];
  184. [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];
  185. [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];
  186. [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];
  187. [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];
  188. [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];
  189. */
  190. assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
  191. assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  192. // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.
  193. NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
  194. [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
  195. [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
  196. nil];
  197. // NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
  198. // nil];
  199. assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
  200. [assetWriter addInput:assetWriterVideoInput];
  201. }
  202. - (void)setEncodingLiveVideo:(BOOL) value
  203. {
  204. _encodingLiveVideo = value;
  205. if (isRecording) {
  206. NSAssert(NO, @"Can not change Encoding Live Video while recording");
  207. }
  208. else
  209. {
  210. assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  211. assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  212. }
  213. }
  214. - (void)startRecording;
  215. {
  216. alreadyFinishedRecording = NO;
  217. startTime = kCMTimeInvalid;
  218. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  219. if (audioInputReadyCallback == NULL)
  220. {
  221. [assetWriter startWriting];
  222. }
  223. });
  224. isRecording = YES;
  225. // [assetWriter startSessionAtSourceTime:kCMTimeZero];
  226. }
  227. - (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
  228. {
  229. assetWriterVideoInput.transform = orientationTransform;
  230. [self startRecording];
  231. }
  232. - (void)cancelRecording;
  233. {
  234. if (assetWriter.status == AVAssetWriterStatusCompleted)
  235. {
  236. return;
  237. }
  238. isRecording = NO;
  239. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  240. alreadyFinishedRecording = YES;
  241. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  242. {
  243. videoEncodingIsFinished = YES;
  244. [assetWriterVideoInput markAsFinished];
  245. }
  246. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  247. {
  248. audioEncodingIsFinished = YES;
  249. [assetWriterAudioInput markAsFinished];
  250. }
  251. [assetWriter cancelWriting];
  252. });
  253. }
  254. - (void)finishRecording;
  255. {
  256. [self finishRecordingWithCompletionHandler:NULL];
  257. }
  258. - (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
  259. {
  260. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  261. isRecording = NO;
  262. if (assetWriter.status == AVAssetWriterStatusCompleted || assetWriter.status == AVAssetWriterStatusCancelled || assetWriter.status == AVAssetWriterStatusUnknown)
  263. {
  264. if (handler)
  265. runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
  266. return;
  267. }
  268. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  269. {
  270. videoEncodingIsFinished = YES;
  271. [assetWriterVideoInput markAsFinished];
  272. }
  273. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  274. {
  275. audioEncodingIsFinished = YES;
  276. [assetWriterAudioInput markAsFinished];
  277. }
  278. #if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))
  279. // Not iOS 6 SDK
  280. [assetWriter finishWriting];
  281. if (handler)
  282. runAsynchronouslyOnContextQueue(_movieWriterContext,handler);
  283. #else
  284. // iOS 6 SDK
  285. if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {
  286. // Running iOS 6
  287. [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];
  288. }
  289. else {
  290. // Not running iOS 6
  291. #pragma clang diagnostic push
  292. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  293. [assetWriter finishWriting];
  294. #pragma clang diagnostic pop
  295. if (handler)
  296. runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
  297. }
  298. #endif
  299. });
  300. }
  301. - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
  302. {
  303. if (!isRecording)
  304. {
  305. return;
  306. }
  307. // if (_hasAudioTrack && CMTIME_IS_VALID(startTime))
  308. if (_hasAudioTrack)
  309. {
  310. CFRetain(audioBuffer);
  311. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);
  312. if (CMTIME_IS_INVALID(startTime))
  313. {
  314. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  315. if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
  316. {
  317. [assetWriter startWriting];
  318. }
  319. [assetWriter startSessionAtSourceTime:currentSampleTime];
  320. startTime = currentSampleTime;
  321. });
  322. }
  323. if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo)
  324. {
  325. NSLog(@"1: Had to drop an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  326. if (_shouldInvalidateAudioSampleWhenDone)
  327. {
  328. CMSampleBufferInvalidate(audioBuffer);
  329. }
  330. CFRelease(audioBuffer);
  331. return;
  332. }
  333. previousAudioTime = currentSampleTime;
  334. //if the consumer wants to do something with the audio samples before writing, let him.
  335. if (self.audioProcessingCallback) {
  336. //need to introspect into the opaque CMBlockBuffer structure to find its raw sample buffers.
  337. CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(audioBuffer);
  338. CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(audioBuffer);
  339. AudioBufferList audioBufferList;
  340. CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(audioBuffer,
  341. NULL,
  342. &audioBufferList,
  343. sizeof(audioBufferList),
  344. NULL,
  345. NULL,
  346. kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
  347. &buffer
  348. );
  349. //passing a live pointer to the audio buffers, try to process them in-place or we might have syncing issues.
  350. for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {
  351. SInt16 *samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;
  352. self.audioProcessingCallback(&samples, numSamplesInBuffer);
  353. }
  354. }
  355. // NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);
  356. void(^write)() = ^() {
  357. while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) {
  358. NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5];
  359. //NSLog(@"audio waiting...");
  360. [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
  361. }
  362. if (!assetWriterAudioInput.readyForMoreMediaData)
  363. {
  364. NSLog(@"2: Had to drop an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  365. }
  366. else if(assetWriter.status == AVAssetWriterStatusWriting)
  367. {
  368. if (![assetWriterAudioInput appendSampleBuffer:audioBuffer])
  369. NSLog(@"Problem appending audio buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  370. }
  371. else
  372. {
  373. //NSLog(@"Wrote an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  374. }
  375. if (_shouldInvalidateAudioSampleWhenDone)
  376. {
  377. CMSampleBufferInvalidate(audioBuffer);
  378. }
  379. CFRelease(audioBuffer);
  380. };
  381. // runAsynchronouslyOnContextQueue(_movieWriterContext, write);
  382. if( _encodingLiveVideo )
  383. {
  384. runAsynchronouslyOnContextQueue(_movieWriterContext, write);
  385. }
  386. else
  387. {
  388. write();
  389. }
  390. }
  391. }
  392. - (void)enableSynchronizationCallbacks;
  393. {
  394. if (videoInputReadyCallback != NULL)
  395. {
  396. if( assetWriter.status != AVAssetWriterStatusWriting )
  397. {
  398. [assetWriter startWriting];
  399. }
  400. videoQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.videoReadingQueue", NULL);
  401. [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{
  402. if( _paused )
  403. {
  404. //NSLog(@"video requestMediaDataWhenReadyOnQueue paused");
  405. // if we don't sleep, we'll get called back almost immediately, chewing up CPU
  406. usleep(10000);
  407. return;
  408. }
  409. //NSLog(@"video requestMediaDataWhenReadyOnQueue begin");
  410. while( assetWriterVideoInput.readyForMoreMediaData && ! _paused )
  411. {
  412. if( videoInputReadyCallback && ! videoInputReadyCallback() && ! videoEncodingIsFinished )
  413. {
  414. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  415. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  416. {
  417. videoEncodingIsFinished = YES;
  418. [assetWriterVideoInput markAsFinished];
  419. }
  420. });
  421. }
  422. }
  423. //NSLog(@"video requestMediaDataWhenReadyOnQueue end");
  424. }];
  425. }
  426. if (audioInputReadyCallback != NULL)
  427. {
  428. audioQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioReadingQueue", NULL);
  429. [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{
  430. if( _paused )
  431. {
  432. //NSLog(@"audio requestMediaDataWhenReadyOnQueue paused");
  433. // if we don't sleep, we'll get called back almost immediately, chewing up CPU
  434. usleep(10000);
  435. return;
  436. }
  437. //NSLog(@"audio requestMediaDataWhenReadyOnQueue begin");
  438. while( assetWriterAudioInput.readyForMoreMediaData && ! _paused )
  439. {
  440. if( audioInputReadyCallback && ! audioInputReadyCallback() && ! audioEncodingIsFinished )
  441. {
  442. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  443. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  444. {
  445. audioEncodingIsFinished = YES;
  446. [assetWriterAudioInput markAsFinished];
  447. }
  448. });
  449. }
  450. }
  451. //NSLog(@"audio requestMediaDataWhenReadyOnQueue end");
  452. }];
  453. }
  454. }
  455. #pragma mark -
  456. #pragma mark Frame rendering
  457. - (void)createDataFBO;
  458. {
  459. glActiveTexture(GL_TEXTURE1);
  460. glGenFramebuffers(1, &movieFramebuffer);
  461. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  462. if ([GPUImageContext supportsFastTextureUpload])
  463. {
  464. // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
  465. CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget);
  466. /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion
  467. * regardless of the kCVImageBufferYCbCrMatrixKey value.
  468. * Tagging the resulting video file as BT.601, is the best option right now.
  469. * Creating a proper BT.709 video is not possible at the moment.
  470. */
  471. CVBufferSetAttachment(renderTarget, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
  472. CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);
  473. CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
  474. CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [_movieWriterContext coreVideoTextureCache], renderTarget,
  475. NULL, // texture attributes
  476. GL_TEXTURE_2D,
  477. GL_RGBA, // opengl format
  478. (int)videoSize.width,
  479. (int)videoSize.height,
  480. GL_BGRA, // native iOS format
  481. GL_UNSIGNED_BYTE,
  482. 0,
  483. &renderTexture);
  484. glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
  485. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  486. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  487. glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
  488. }
  489. else
  490. {
  491. glGenRenderbuffers(1, &movieRenderbuffer);
  492. glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);
  493. glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height);
  494. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);
  495. }
  496. GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
  497. NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
  498. }
  499. - (void)destroyDataFBO;
  500. {
  501. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  502. [_movieWriterContext useAsCurrentContext];
  503. if (movieFramebuffer)
  504. {
  505. glDeleteFramebuffers(1, &movieFramebuffer);
  506. movieFramebuffer = 0;
  507. }
  508. if (movieRenderbuffer)
  509. {
  510. glDeleteRenderbuffers(1, &movieRenderbuffer);
  511. movieRenderbuffer = 0;
  512. }
  513. if ([GPUImageContext supportsFastTextureUpload])
  514. {
  515. if (renderTexture)
  516. {
  517. CFRelease(renderTexture);
  518. }
  519. if (renderTarget)
  520. {
  521. CVPixelBufferRelease(renderTarget);
  522. }
  523. }
  524. });
  525. }
  526. - (void)setFilterFBO;
  527. {
  528. if (!movieFramebuffer)
  529. {
  530. [self createDataFBO];
  531. }
  532. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  533. glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);
  534. }
  535. - (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
  536. {
  537. [_movieWriterContext useAsCurrentContext];
  538. [self setFilterFBO];
  539. [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
  540. glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
  541. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  542. // This needs to be flipped to write out to video correctly
  543. static const GLfloat squareVertices[] = {
  544. -1.0f, -1.0f,
  545. 1.0f, -1.0f,
  546. -1.0f, 1.0f,
  547. 1.0f, 1.0f,
  548. };
  549. const GLfloat *textureCoordinates = [GPUImageFilter textureCoordinatesForRotation:inputRotation];
  550. glActiveTexture(GL_TEXTURE4);
  551. glBindTexture(GL_TEXTURE_2D, [inputFramebufferToUse texture]);
  552. glUniform1i(colorSwizzlingInputTextureUniform, 4);
  553. // NSLog(@"Movie writer framebuffer: %@", inputFramebufferToUse);
  554. glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  555. glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
  556. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  557. glFinish();
  558. }
  559. #pragma mark -
  560. #pragma mark GPUImageInput protocol
  561. - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
  562. {
  563. if (!isRecording)
  564. {
  565. [firstInputFramebuffer unlock];
  566. return;
  567. }
  568. // Drop frames forced by images and other things with no time constants
  569. // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case
  570. if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) )
  571. {
  572. [firstInputFramebuffer unlock];
  573. return;
  574. }
  575. if (CMTIME_IS_INVALID(startTime))
  576. {
  577. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  578. if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
  579. {
  580. [assetWriter startWriting];
  581. }
  582. [assetWriter startSessionAtSourceTime:frameTime];
  583. startTime = frameTime;
  584. });
  585. }
  586. GPUImageFramebuffer *inputFramebufferForBlock = firstInputFramebuffer;
  587. glFinish();
  588. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  589. if (!assetWriterVideoInput.readyForMoreMediaData && _encodingLiveVideo)
  590. {
  591. [inputFramebufferForBlock unlock];
  592. NSLog(@"1: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  593. return;
  594. }
  595. // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
  596. [_movieWriterContext useAsCurrentContext];
  597. [self renderAtInternalSizeUsingFramebuffer:inputFramebufferForBlock];
  598. CVPixelBufferRef pixel_buffer = NULL;
  599. if ([GPUImageContext supportsFastTextureUpload])
  600. {
  601. pixel_buffer = renderTarget;
  602. CVPixelBufferLockBaseAddress(pixel_buffer, 0);
  603. }
  604. else
  605. {
  606. CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);
  607. if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))
  608. {
  609. CVPixelBufferRelease(pixel_buffer);
  610. return;
  611. }
  612. else
  613. {
  614. CVPixelBufferLockBaseAddress(pixel_buffer, 0);
  615. GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);
  616. glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);
  617. }
  618. }
  619. void(^write)() = ^() {
  620. while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {
  621. NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
  622. // NSLog(@"video waiting...");
  623. [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
  624. }
  625. if (!assetWriterVideoInput.readyForMoreMediaData)
  626. {
  627. NSLog(@"2: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  628. }
  629. else if(self.assetWriter.status == AVAssetWriterStatusWriting)
  630. {
  631. if (![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])
  632. NSLog(@"Problem appending pixel buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  633. }
  634. else
  635. {
  636. NSLog(@"Couldn't write a frame");
  637. //NSLog(@"Wrote a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  638. }
  639. CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
  640. previousFrameTime = frameTime;
  641. if (![GPUImageContext supportsFastTextureUpload])
  642. {
  643. CVPixelBufferRelease(pixel_buffer);
  644. }
  645. };
  646. write();
  647. [inputFramebufferForBlock unlock];
  648. });
  649. }
  650. - (NSInteger)nextAvailableTextureIndex;
  651. {
  652. return 0;
  653. }
  654. - (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
  655. {
  656. [newInputFramebuffer lock];
  657. // runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  658. firstInputFramebuffer = newInputFramebuffer;
  659. // });
  660. }
  661. - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
  662. {
  663. inputRotation = newInputRotation;
  664. }
  665. - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
  666. {
  667. }
  668. - (CGSize)maximumOutputSize;
  669. {
  670. return videoSize;
  671. }
  672. - (void)endProcessing
  673. {
  674. if (completionBlock)
  675. {
  676. if (!alreadyFinishedRecording)
  677. {
  678. alreadyFinishedRecording = YES;
  679. completionBlock();
  680. }
  681. }
  682. else
  683. {
  684. if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])
  685. {
  686. [_delegate movieRecordingCompleted];
  687. }
  688. }
  689. }
  690. - (BOOL)shouldIgnoreUpdatesToThisTarget;
  691. {
  692. return NO;
  693. }
  694. - (BOOL)wantsMonochromeInput;
  695. {
  696. return NO;
  697. }
  698. - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
  699. {
  700. }
  701. #pragma mark -
  702. #pragma mark Accessors
  703. - (void)setHasAudioTrack:(BOOL)newValue
  704. {
  705. [self setHasAudioTrack:newValue audioSettings:nil];
  706. }
  707. - (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;
  708. {
  709. _hasAudioTrack = newValue;
  710. if (_hasAudioTrack)
  711. {
  712. if (_shouldPassthroughAudio)
  713. {
  714. // Do not set any settings so audio will be the same as passthrough
  715. audioOutputSettings = nil;
  716. }
  717. else if (audioOutputSettings == nil)
  718. {
  719. AVAudioSession *sharedAudioSession = [AVAudioSession sharedInstance];
  720. double preferredHardwareSampleRate;
  721. if ([sharedAudioSession respondsToSelector:@selector(sampleRate)])
  722. {
  723. preferredHardwareSampleRate = [sharedAudioSession sampleRate];
  724. }
  725. else
  726. {
  727. #pragma clang diagnostic push
  728. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  729. preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];
  730. #pragma clang diagnostic pop
  731. }
  732. AudioChannelLayout acl;
  733. bzero( &acl, sizeof(acl));
  734. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  735. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  736. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
  737. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  738. [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,
  739. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  740. //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,
  741. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  742. nil];
  743. /*
  744. AudioChannelLayout acl;
  745. bzero( &acl, sizeof(acl));
  746. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  747. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  748. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
  749. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  750. [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
  751. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  752. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  753. nil];*/
  754. }
  755. assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
  756. [assetWriter addInput:assetWriterAudioInput];
  757. assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  758. }
  759. else
  760. {
  761. // Remove audio track if it exists
  762. }
  763. }
  764. - (NSArray*)metaData {
  765. return assetWriter.metadata;
  766. }
  767. - (void)setMetaData:(NSArray*)metaData {
  768. assetWriter.metadata = metaData;
  769. }
  770. - (CMTime)duration {
  771. if( ! CMTIME_IS_VALID(startTime) )
  772. return kCMTimeZero;
  773. if( ! CMTIME_IS_NEGATIVE_INFINITY(previousFrameTime) )
  774. return CMTimeSubtract(previousFrameTime, startTime);
  775. if( ! CMTIME_IS_NEGATIVE_INFINITY(previousAudioTime) )
  776. return CMTimeSubtract(previousAudioTime, startTime);
  777. return kCMTimeZero;
  778. }
  779. - (CGAffineTransform)transform {
  780. return assetWriterVideoInput.transform;
  781. }
  782. - (void)setTransform:(CGAffineTransform)transform {
  783. assetWriterVideoInput.transform = transform;
  784. }
  785. - (AVAssetWriter*)assetWriter {
  786. return assetWriter;
  787. }
  788. @end