13 #include <CoreVideo/CoreVideo.h>
14 #import <AVFoundation/AVFoundation.h>
15 #include <OpenGL/CGLMacro.h>
20 "title" :
"VuoAvWriterObject",
28 "AVFoundation.framework",
29 "CoreMedia.framework",
44 @property (retain) AVAssetWriter* assetWriter;
45 @property (retain) AVAssetWriterInput* videoInput;
46 @property (retain) AVAssetWriterInput* audioInput;
47 @property (retain) AVAssetWriterInputPixelBufferAdaptor* avAdaptor;
52 @property (retain) NSDate* startDate;
59 @property bool firstFrame;
66 return self.assetWriter != nil;
69 - (BOOL) setupAssetWriterWithUrl:(NSURL*) fileUrl imageWidth:(
int)width imageHeight:(
int)height channelCount:(
int)channels movieFormat:(
VuoMovieFormat)format
73 self.lastImageTimestamp = kCMTimeNegativeInfinity;
74 self.lastAudioTimestamp = kCMTimeNegativeInfinity;
76 self.originalWidth = width;
77 self.originalHeight = height;
79 NSString* videoEncoding = AVVideoCodecJPEG;
82 videoEncoding = AVVideoCodecH264;
83 else if(format.
imageEncoding == VuoMovieImageEncoding_ProRes4444)
84 videoEncoding = AVVideoCodecAppleProRes4444;
85 else if(format.
imageEncoding == VuoMovieImageEncoding_ProRes422)
86 videoEncoding = AVVideoCodecAppleProRes422;
87 else if (format.
imageEncoding == VuoMovieImageEncoding_ProRes422HQ)
89 if (NSProcessInfo.processInfo.operatingSystemVersion.minorVersion >= 15)
90 videoEncoding =
@"apch";
93 VUserLog(
"Error: macOS 10.%d doesn't support ProRes 422 HQ.", (
int)NSProcessInfo.processInfo.operatingSystemVersion.minorVersion);
97 else if (format.
imageEncoding == VuoMovieImageEncoding_ProRes422LT)
99 if (NSProcessInfo.processInfo.operatingSystemVersion.minorVersion >= 15)
100 videoEncoding =
@"apcs";
103 VUserLog(
"Error: macOS 10.%d doesn't support ProRes 422 LT.", (
int)NSProcessInfo.processInfo.operatingSystemVersion.minorVersion);
107 else if (format.
imageEncoding == VuoMovieImageEncoding_ProRes422Proxy)
109 if (NSProcessInfo.processInfo.operatingSystemVersion.minorVersion >= 15)
110 videoEncoding =
@"apco";
113 VUserLog(
"Error: macOS 10.%d doesn't support ProRes 422 Proxy.", (
int)NSProcessInfo.processInfo.operatingSystemVersion.minorVersion);
119 if (NSProcessInfo.processInfo.operatingSystemVersion.minorVersion >= 13)
120 videoEncoding =
@"hvc1";
123 VUserLog(
"Error: macOS 10.%d doesn't support HEVC/h.265.", (
int)NSProcessInfo.processInfo.operatingSystemVersion.minorVersion);
127 else if (format.
imageEncoding == VuoMovieImageEncoding_HEVCAlpha)
129 if (NSProcessInfo.processInfo.operatingSystemVersion.minorVersion >= 15)
130 videoEncoding =
@"muxa";
133 VUserLog(
"Error: macOS 10.%d doesn't support HEVC/h.265 with alpha channel.", (
int)NSProcessInfo.processInfo.operatingSystemVersion.minorVersion);
139 self.assetWriter = [[[AVAssetWriter alloc] initWithURL:fileUrl fileType:AVFileTypeQuickTimeMovie error:&error] autorelease];
143 VUserLog(
"AVAssetWriter initWithURL failed with error %s", [[error localizedDescription] UTF8String]);
148 NSMutableDictionary *videoOutputSettings = [@{
149 AVVideoCodecKey: videoEncoding,
150 AVVideoWidthKey: [NSNumber numberWithInt:self.originalWidth],
151 AVVideoHeightKey: [NSNumber numberWithInt:self.originalHeight]
156 float bitrate = clampedQuality * width * height * 60. * fudge;
158 if( [videoEncoding isEqualToString:AVVideoCodecJPEG] )
159 videoOutputSettings[AVVideoCompressionPropertiesKey] = @{
160 AVVideoQualityKey: @(clampedQuality),
162 else if ([videoEncoding isEqualToString:AVVideoCodecH264]
163 || [videoEncoding isEqualToString:
@"hvc1" ])
164 videoOutputSettings[AVVideoCompressionPropertiesKey] = @{
165 AVVideoAverageBitRateKey: @(bitrate),
167 else if ([videoEncoding isEqualToString:
@"muxa" ])
169 videoOutputSettings[AVVideoCompressionPropertiesKey] = @{
170 AVVideoAverageBitRateKey: @(bitrate),
171 @"TargetQualityForAlpha" : @(clampedQuality),
172 @"AlphaChannelMode" :
@"PremultipliedAlpha",
176 self.videoInput = [[[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings] autorelease];
177 [videoOutputSettings release];
178 [
self.videoInput setExpectsMediaDataInRealTime:YES];
180 NSDictionary *pa = @{
181 (NSString *)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
182 (NSString *)kCVPixelBufferWidthKey: [NSNumber numberWithInt:self.originalWidth],
183 (NSString *)kCVPixelBufferHeightKey: [NSNumber numberWithInt:
self.originalHeight],
186 self.avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:pa];
188 if ([
self.assetWriter canAddInput:
self.videoInput])
190 [
self.assetWriter addInput:self.videoInput];
194 VUserLog(
"Failed adding a video input to the AVWriter.");
195 self.assetWriter = nil;
196 self.videoInput = nil;
197 self.avAdaptor = nil;
202 self.originalChannelCount = channels;
207 AudioStreamBasicDescription audioFormat;
208 bzero(&audioFormat,
sizeof(audioFormat));
211 audioFormat.mFormatID = kAudioFormatLinearPCM;
212 audioFormat.mFramesPerPacket = 1;
213 audioFormat.mChannelsPerFrame = channels;
214 int bytes_per_sample =
sizeof(float);
215 audioFormat.mFormatFlags = kAudioFormatFlagIsFloat;
216 audioFormat.mBitsPerChannel = bytes_per_sample * 8;
217 audioFormat.mBytesPerPacket = bytes_per_sample * channels;
218 audioFormat.mBytesPerFrame = bytes_per_sample * channels;
220 CMFormatDescriptionRef fmt;
221 CMAudioFormatDescriptionCreate(kCFAllocatorDefault,
230 self.audio_fmt_desc = fmt;
232 AudioChannelLayout acl;
233 bzero( &acl,
sizeof(acl));
234 acl.mChannelLayoutTag = channels > 1 ? kAudioChannelLayoutTag_Stereo : kAudioChannelLayoutTag_Mono;
236 int audioEncoding = kAudioFormatLinearPCM;
237 NSDictionary* audioOutputSettings;
241 audioEncoding = kAudioFormatLinearPCM;
242 audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
243 [NSNumber numberWithInt: audioEncoding ], AVFormatIDKey,
244 [NSNumber numberWithInt: channels ], AVNumberOfChannelsKey,
245 [NSNumber numberWithFloat: VuoAudioSamples_sampleRate], AVSampleRateKey,
246 [NSNumber numberWithInt: channels],AVNumberOfChannelsKey,
247 [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
248 [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
249 [NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
250 [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
251 [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
256 audioEncoding = kAudioFormatMPEG4AAC;
259 audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
260 [ NSNumber numberWithInt: audioEncoding ], AVFormatIDKey,
261 [ NSNumber numberWithInt: channels ], AVNumberOfChannelsKey,
262 [ NSNumber numberWithFloat: VuoAudioSamples_sampleRate], AVSampleRateKey,
263 [ NSNumber numberWithInt:audioBitrate], AVEncoderBitRateKey,
264 [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
268 self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
269 self.audioInput.expectsMediaDataInRealTime=YES;
270 self.audio_sample_position = 0;
272 if([
self.assetWriter canAddInput:
self.audioInput])
274 [
self.assetWriter addInput:self.audioInput];
278 VUserLog(
"Could not add audio input.");
279 self.audioInput = nil;
284 [
self.assetWriter startWriting];
285 [
self.assetWriter startSessionAtSourceTime:kCMTimeZero];
287 self.firstFrame =
true;
292 - (void) appendImage:(
VuoImage)image presentationTime:(
double)timestamp blockIfNotReady:(BOOL)blockIfNotReady
294 if(image->pixelsWide !=
self.originalWidth || image->pixelsHigh !=
self.originalHeight)
296 VUserLog(
"Error: Can't append image because it is not the same dimensions as the the current movie.");
300 if(!
self.videoInput.readyForMoreMediaData)
304 VUserLog(
"Warning: The AVFoundation asset writer isn't keeping up; waiting for it to catch up before writing this video frame.");
305 while (!
self.videoInput.readyForMoreMediaData)
306 usleep(USEC_PER_SEC/10);
310 VUserLog(
"Error: The AVFoundation asset writer isn't keeping up; dropping this video frame.");
315 CVPixelBufferRef pb = nil;
319 CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nil, [
self.avAdaptor pixelBufferPool], &pb);
321 if(ret != kCVReturnSuccess)
323 VUserLog(
"Error: Couldn't get PixelBuffer from pool. %d", ret);
327 ret = CVPixelBufferLockBaseAddress(pb, 0);
329 if(ret != kCVReturnSuccess)
331 VUserLog(
"Error: Couldn't lock PixelBuffer base address");
335 unsigned char *bytes = (
unsigned char*)CVPixelBufferGetBaseAddress(pb);
339 VUserLog(
"Error: Couldn't get the pixel buffer base address");
343 unsigned int bytesPerRow = CVPixelBufferGetBytesPerRow(pb);
345 for(
unsigned long y = 0; y <
self.originalHeight; y++)
346 memcpy(bytes + bytesPerRow * (self.originalHeight - y - 1), buf + self.originalWidth * y * 4, self.originalWidth * 4);
348 ret = CVPixelBufferUnlockBaseAddress(pb, 0);
350 if(ret != kCVReturnSuccess)
352 VUserLog(
"Error: Couldn't unlock pixelbuffer base address. %d", ret);
356 CMTime presentationTime = CMTimeMakeWithSeconds(timestamp,
TIMEBASE);
358 while (CMTimeCompare(presentationTime,
self.lastImageTimestamp) <= 0)
359 presentationTime.value++;
361 self.lastImageTimestamp = presentationTime;
365 if (![
self.avAdaptor appendPixelBuffer:pb withPresentationTime:presentationTime])
367 VUserLog(
"Couldn't write frame %lld (%fs): %s", presentationTime.value, CMTimeGetSeconds(presentationTime), [[_assetWriter.error description] UTF8String]);
371 CVPixelBufferRelease(pb);
379 if(channelCount !=
self.originalChannelCount)
381 if (
self.originalChannelCount == -1)
382 VUserLog(
"Error: Attempting to write %lu audio channels to a silent movie.", channelCount);
384 VUserLog(
"Error: Attempting to write %lu audio channels to a movie with %d channels.", channelCount,
self.originalChannelCount);
388 if ( !
self.audioInput.readyForMoreMediaData) {
391 VUserLog(
"Warning: The AVFoundation asset writer isn't keeping up; waiting for it to catch up before writing this audio frame.");
392 while (!
self.audioInput.readyForMoreMediaData)
393 usleep(USEC_PER_SEC/10);
397 VUserLog(
"Error: The AVFoundation asset writer isn't keeping up; dropping this audio frame.");
403 CMBlockBufferRef bbuf = NULL;
404 CMSampleBufferRef sbuf = NULL;
406 size_t buflen = sampleCount * channelCount *
sizeof(float);
408 float* interleaved = (
float*)malloc(
sizeof(
float) * sampleCount * channelCount);
411 for(
int n = 0; n < channelCount; n++)
417 VUserLog(
"Error: Attempting to write a NULL audio sample buffer. Skipping this audio frame.");
426 for(
int i = 0; i < sampleCount; i++)
428 interleaved[i*channelCount + n] = channel[i];
433 CMBlockBufferRef tmp;
434 status = CMBlockBufferCreateWithMemoryBlock( kCFAllocatorDefault,
443 if (status != noErr) {
445 VUserLog(
"CMBlockBufferCreateWithMemoryBlock error: %s", s);
450 status = CMBlockBufferCreateContiguous(kCFAllocatorDefault, tmp, kCFAllocatorDefault, NULL, 0,
451 buflen, kCMBlockBufferAlwaysCopyDataFlag, &bbuf);
455 if (status != noErr) {
457 VUserLog(
"CMBlockBufferCreateContiguous error: %s", s);
462 CMTime presentationTime = CMTimeMakeWithSeconds(timestamp,
TIMEBASE);
478 while (CMTimeCompare(presentationTime,
self.lastAudioTimestamp) <= 0)
479 presentationTime.value++;
481 self.lastAudioTimestamp = presentationTime;
485 status = CMAudioSampleBufferCreateWithPacketDescriptions( kCFAllocatorDefault,
495 if (status != noErr) {
496 VUserLog(
"CMSampleBufferCreate error");
500 if ( ![
self.audioInput appendSampleBuffer:sbuf] )
502 VUserLog(
"AppendSampleBuffer error");
513 if(
self.assetWriter )
515 [
self.videoInput markAsFinished];
517 if(
self.audioInput != nil)
518 [
self.audioInput markAsFinished];
520 dispatch_semaphore_t finishedWriting = dispatch_semaphore_create(0);
521 [_assetWriter finishWritingWithCompletionHandler:^{
522 dispatch_semaphore_signal(finishedWriting);
524 dispatch_semaphore_wait(finishedWriting, DISPATCH_TIME_FOREVER);
525 dispatch_release(finishedWriting);
527 if (_assetWriter.status != AVAssetWriterStatusCompleted)
528 VUserLog(
"Error: %s", [[_assetWriter.error localizedDescription] UTF8String]);
530 self.assetWriter = nil;
531 self.videoInput = nil;
532 self.audioInput = nil;
539 [
self finalizeRecording];