13 #include <OpenGL/OpenGL.h>
14 #include <OpenGL/CGLMacro.h>
18 #include <QuartzCore/CoreImage.h>
19 #include <Quartz/Quartz.h>
20 #include <QuartzCore/CVImageBuffer.h>
22 #include <Accelerate/Accelerate.h>
26 #include "HapInAVFoundation.h"
30 "title" :
"VuoAvPlayerObject",
34 "AVFoundation.framework",
36 "CoreVideo.framework",
37 "CoreMedia.framework",
39 "Accelerate.framework",
43 "QuartzCore.framework",
79 - (id)initWithHapAssetTrack:(AVAssetTrack *)track;
95 - (id)initWithTrack:(AVAssetTrack *)track outputSettings:(NSDictionary *)settings
98 for (
id formatDescription in track.formatDescriptions)
100 CMFormatDescriptionRef desc = (CMFormatDescriptionRef) formatDescription;
101 CMVideoCodecType codec = CMFormatDescriptionGetMediaSubType(desc);
112 if (
self = [super initWithTrack:track outputSettings:(isHap ? nil : settings)])
116 NSBundle *f = [NSBundle bundleWithPath:[NSString stringWithFormat:@"%s/Vuo.framework/Versions/%s/Frameworks/HapInAVFoundation.framework",
117 VuoGetFrameworkPath(),
118 VUO_FRAMEWORK_VERSION_STRING]];
121 f = [NSBundle bundleWithPath:[NSString stringWithFormat:@"%s/VuoRunner.framework/Versions/%s/Frameworks/HapInAVFoundation.framework",
122 VuoGetRunnerFrameworkPath(),
123 VUO_FRAMEWORK_VERSION_STRING]];
126 VUserLog(
"Error: Playing this movie requires HapInAVFoundation.framework, but I can't find it.");
134 bool status = [f loadAndReturnError:&error];
137 NSError *underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
139 error = underlyingError;
140 VUserLog(
"Error: Playing this movie requires HapInAVFoundation.framework, but it wouldn't load: %s", [[error localizedDescription] UTF8String]);
145 hapOutput = [[NSClassFromString(@"AVPlayerItemHapDXTOutput") alloc] initWithHapAssetTrack:track];
156 CMSampleBufferRef buffer = [
super copyNextSampleBuffer];
161 return [hapOutput allocFrameForHapSampleBuffer:buffer];
189 __block CVReturn ret;
191 ret = CVOpenGLTextureCacheCreate(NULL, NULL, cgl_ctx, pf, NULL, &
textureCache);
193 CGLReleasePixelFormat(pf);
194 if (ret != kCVReturnSuccess)
196 VUserLog(
"Error: Couldn't create texture cache: %d", ret);
219 + (void) releaseAssetReader:(AVAssetReader*)reader
221 AVAssetReader* readerToRelease = reader;
223 dispatch_after(dispatch_time(DISPATCH_TIME_NOW, .2 * NSEC_PER_SEC), dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
225 if (readerToRelease.status == AVAssetReaderStatusReading)
226 [readerToRelease cancelReading];
228 [readerToRelease release];
239 [asset cancelLoading];
250 [
self clearFrameQueue];
252 [videoQueue release];
263 - (bool) setURL:(NSURL*)url
265 asset = [AVAsset assetWithURL:url];
269 VUserLog(
"VuoVideoDecoder: AvFoundation decoder could not find movie file at path: %s", [[url absoluteString] UTF8String]);
279 bool isPlayable =
true;
283 bool isProtected = [asset hasProtectedContent];
285 if (!isPlayable || isProtected || ![
asset isReadable])
287 VUserLog(
"AvFoundation cannot play this asset (isPlayable=%d, hasProtectedContent=%d, isReadable=%d).",
295 AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
296 CMFormatDescriptionRef desc = (CMFormatDescriptionRef)videoTrack.formatDescriptions[0];
297 CMVideoCodecType codec = CMFormatDescriptionGetMediaSubType(desc);
305 CFBooleanRef containsAlphaChannel = CMFormatDescriptionGetExtension(desc, CFSTR(
"ContainsAlphaChannel"));
306 if (containsAlphaChannel == kCFBooleanTrue)
313 int bitsPerComponent = ((NSNumber *)CMFormatDescriptionGetExtension(desc, CFSTR(
"BitsPerComponent"))).intValue;
314 int depth = ((NSNumber *)CMFormatDescriptionGetExtension(desc, CFSTR(
"Depth"))).intValue;
320 VUserLog(
"codec=\"%s\" (%s) bpc=%d depth=%d hasAlpha=%d %s",
326 (
hasAlpha && alphaMode) ? alphaMode :
"");
334 NSArray* assetKeys = @[@"playable", @"hasProtectedContent", @"tracks"];
345 [asset loadValuesAsynchronouslyForKeys: assetKeys completionHandler:^(void)
347 vuoAddCompositionStateToThreadLocalStorage(compositionState);
350 NSError *error = nil;
351 bool failedLoading = false;
353 for (NSString *key in assetKeys)
355 NSInteger status = [asset statusOfValueForKey:key error:&error];
358 status == AVKeyValueStatusFailed ||
359 status == AVKeyValueStatusCancelled )
361 VUserLog("AVFoundation failed loading asset.");
362 failedLoading = true;
366 isReady = !failedLoading && [
self setAssetReaderTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration])];
370 for(int i = 0; i < 3; i++)
372 if(![
self copyNextVideoSampleBuffer])
374 VUserLog("AvFoundation successfully loaded but failed to extract a video buffer.");
381 if( self != nil && readyToPlayCallback != NULL && avDecoderCppObject != NULL )
382 readyToPlayCallback( avDecoderCppObject, isReady );
385 vuoRemoveCompositionStateFromThreadLocalStorage();
406 - (bool) setAssetReaderTimeRange:(CMTimeRange)timeRange
408 NSError *error = nil;
418 NSArray* videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
420 if ([videoTracks count] != 0)
422 AVAssetTrack* vidtrack = [videoTracks objectAtIndex:0];
439 assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
441 [assetReader retain];
444 VUserLog(
"AVAssetReader failed initialization: %s", [[error localizedDescription] UTF8String]);
450 NSDictionary *videoOutputSettings = @{
451 (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
452 (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @(YES),
456 outputSettings:videoOutputSettings];
462 [assetReader addOutput:assetReaderVideoTrackOutput];
466 VUserLog(
"AVFoundation Video Decoder: assetReaderVideoTrackOutput cannot be added to assetReader.");
472 NSDictionary* audioOutputSettings;
474 audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
475 [NSNumber numberWithInt: kAudioFormatLinearPCM ], AVFormatIDKey,
476 [NSNumber numberWithFloat: VuoAudioSamples_sampleRate], AVSampleRateKey,
477 [NSNumber numberWithInt:32], AVLinearPCMBitDepthKey,
478 [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
479 [NSNumber numberWithBool:YES], AVLinearPCMIsFloatKey,
480 [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
485 for(AVAssetTrack* track in [
asset tracksWithMediaType:AVMediaTypeAudio])
502 [assetReader addOutput:assetReaderAudioTrackOutput];
508 VUserLog(
"AVFoundation Video Decoder: AssetReaderAudioTrackOutput cannot be added to assetReader.");
515 return [assetReader startReading];
524 - (
unsigned int) getAudioChannelCount:(AVAssetTrack*) track
526 NSArray* formatDesc = track.formatDescriptions;
528 for(
unsigned int i = 0; i < [formatDesc count]; ++i)
530 CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
531 const AudioStreamBasicDescription* audioDescription = CMAudioFormatDescriptionGetStreamBasicDescription (item);
533 if(audioDescription != nil)
535 return audioDescription->mChannelsPerFrame;
542 - (void) setPlaybackRate:(
double)rate
549 [
self seekToSecond:videoTimestamp withRange:-1 frame:NULL];
551 [
self clearFrameQueue];
567 - (void) clearFrameQueue
569 for(
int i = 0; i < [videoQueue count]; i++)
571 VuoVideoFrame* frame = [[videoQueue objectAtIndex:i] pointerValue];
576 [videoQueue removeAllObjects];
582 - (bool) seekToSecond:(
float)second withRange:(
float)range frame:(
VuoVideoFrame *)frame
587 [
self clearFrameQueue];
595 CMTime cmsec = CMTimeMakeWithSeconds(second, NSEC_PER_SEC);
597 [
self setAssetReaderTimeRange: CMTimeRangeMake(cmsec, range < 0 ? [asset duration] : CMTimeMakeWithSeconds(range, NSEC_PER_SEC))];
600 [
self copyNextVideoSampleBuffer];
601 [
self copyNextAudioSampleBuffer];
613 if (![
self nextVideoFrame:frame])
616 frame->timestamp = 0;
623 - (void) setPlayerCallback:(
void (*)(
void* functionPtr,
bool canPlayMedia))callback target:(
void*)decoderCppObject
654 int index =
playbackRate < 0 ? [videoQueue count] - 1 : 0;
656 NSValue* value = [videoQueue objectAtIndex:index];
658 *frame = *framePointer;
660 [videoQueue removeObjectAtIndex:index];
675 [
self seekToSecond:seek withRange:rewindIncrement frame:NULL];
679 return [videoQueue count] > 0;
702 while(sampleIndex < sampleCapacity - 1)
706 if(![
self copyNextAudioSampleBuffer])
719 for(
int iterator = 0; iterator < copyLength; iterator++)
721 frame_samples[frame_sample_index] =
audioBuffer[buffer_sample_index * audioChannelCount + i];
723 frame_sample_index++;
724 buffer_sample_index++;
728 sampleIndex += copyLength;
740 static void VuoAvPlayerObject_freeCallback(
VuoImage imageToFree)
747 if( [
assetReader status] == AVAssetReaderStatusReading )
752 NSObject<VuoAvTrackHapFrame> *hapFrame = [assetReaderVideoTrackOutput newHapFrame];
757 timestamp = CMTimeGetSeconds(hapFrame.presentationTime);
759 int dxtPlaneCount = hapFrame.dxtPlaneCount;
760 OSType *dxtPixelFormats = hapFrame.dxtPixelFormats;
761 void **dxtBaseAddresses = hapFrame.dxtDatas;
763 if (dxtPlaneCount > 2)
765 VUserLog(
"Error: This image has %d planes, which isn't part of the Hap spec.", dxtPlaneCount);
771 for (
int i = 0; i < dxtPlaneCount; ++i)
773 GLuint internalFormat;
774 unsigned int bitsPerPixel;
775 switch (dxtPixelFormats[i])
777 case kHapCVPixelFormat_RGB_DXT1:
778 internalFormat = HapTextureFormat_RGB_DXT1;
782 case kHapCVPixelFormat_RGBA_DXT5:
783 case kHapCVPixelFormat_YCoCg_DXT5:
784 internalFormat = HapTextureFormat_RGBA_DXT5;
788 case kHapCVPixelFormat_CoCgXY:
791 internalFormat = HapTextureFormat_RGBA_DXT5;
796 internalFormat = HapTextureFormat_A_RGTC1;
801 case kHapCVPixelFormat_YCoCg_DXT5_A_RGTC1:
804 internalFormat = HapTextureFormat_RGBA_DXT5;
809 internalFormat = HapTextureFormat_A_RGTC1;
814 case kHapCVPixelFormat_A_RGTC1:
815 internalFormat = HapTextureFormat_A_RGTC1;
824 GLuint texture =
VuoGlTexturePool_use(cgl_ctx, VuoGlTexturePool_NoAllocation, GL_TEXTURE_2D, internalFormat, hapFrame.dxtImgSize.width, hapFrame.dxtImgSize.height, GL_RGBA, NULL);
825 glBindTexture(GL_TEXTURE_2D, texture);
827 size_t bytesPerRow = (hapFrame.dxtImgSize.width * bitsPerPixel) / 8;
828 GLsizei dataLength = (int)(bytesPerRow * hapFrame.dxtImgSize.height);
830 glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_TRUE);
832 glCompressedTexImage2D(GL_TEXTURE_2D, 0, internalFormat, hapFrame.dxtImgSize.width, hapFrame.dxtImgSize.height, 0, dataLength, dxtBaseAddresses[i]);
834 glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
835 glBindTexture(GL_TEXTURE_2D, 0);
837 dxtImage[i] =
VuoImage_make(texture, internalFormat, hapFrame.dxtImgSize.width, hapFrame.dxtImgSize.height);
840 if (hapFrame.codecSubType == kHapCodecSubType
841 || hapFrame.codecSubType == kHapAlphaCodecSubType)
843 else if (hapFrame.codecSubType == kHapYCoCgCodecSubType)
847 uniform sampler2D cocgsy_src;
848 const vec4 offsets = vec4(-0.50196078431373, -0.50196078431373, 0.0, 0.0);
849 varying vec2 fragmentTextureCoordinate;
852 vec4 CoCgSY = texture2D(cocgsy_src, vec2(fragmentTextureCoordinate.x, 1. - fragmentTextureCoordinate.y));
856 float scale = ( CoCgSY.z * ( 255.0 / 8.0 ) ) + 1.0;
858 float Co = CoCgSY.x / scale;
859 float Cg = CoCgSY.y / scale;
862 gl_FragColor = vec4(Y + Co - Cg, Y + Cg, Y - Co - Cg, 1.0);
870 image =
VuoImageRenderer_render(shader, dxtImage[0]->pixelsWide, dxtImage[0]->pixelsHigh, VuoImageColorDepth_8);
873 else if (hapFrame.codecSubType == kHapYCoCgACodecSubType)
877 uniform sampler2D cocgsy_src;
878 uniform sampler2D alpha_src;
879 const vec4 offsets = vec4(-0.50196078431373, -0.50196078431373, 0.0, 0.0);
880 varying vec2 fragmentTextureCoordinate;
883 vec2 tc = vec2(fragmentTextureCoordinate.x, 1. - fragmentTextureCoordinate.y);
884 vec4 CoCgSY = texture2D(cocgsy_src, tc);
885 float alpha = texture2D(alpha_src, tc).r;
889 float scale = ( CoCgSY.z * ( 255.0 / 8.0 ) ) + 1.0;
891 float Co = CoCgSY.x / scale;
892 float Cg = CoCgSY.y / scale;
895 gl_FragColor = vec4(Y + Co - Cg, Y + Cg, Y - Co - Cg, alpha);
904 image =
VuoImageRenderer_render(shader, dxtImage[0]->pixelsWide, dxtImage[0]->pixelsHigh, VuoImageColorDepth_8);
923 CMSampleBufferRef sampleBuffer = [assetReaderVideoTrackOutput copyNextSampleBuffer];
928 timestamp = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
929 duration = CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer));
933 CVPixelBufferRef buffer = (CVPixelBufferRef) CMSampleBufferGetImageBuffer(sampleBuffer);
936 CMSampleBufferInvalidate(sampleBuffer);
937 CFRelease(sampleBuffer);
941 __block CVOpenGLTextureRef texture;
942 __block CVReturn ret;
944 ret = CVOpenGLTextureCacheCreateTextureFromImage(NULL,
textureCache, buffer, NULL, &texture);
946 if (ret != kCVReturnSuccess)
949 VUserLog(
"Error: Couldn't convert CVImageBuffer to texture: %s", error);
951 CMSampleBufferInvalidate(sampleBuffer);
952 CFRelease(sampleBuffer);
956 CVOpenGLTextureGetName(texture),
958 CVPixelBufferGetWidth(buffer),
959 CVPixelBufferGetHeight(buffer),
960 VuoAvPlayerObject_freeCallback, NULL);
962 image =
VuoImage_makeCopy(rectImage, CVOpenGLTextureIsFlipped(texture), 0, 0,
false);
963 CVOpenGLTextureRelease(texture);
969 CMSampleBufferInvalidate(sampleBuffer);
970 CFRelease(sampleBuffer);
976 NSValue* val = [NSValue valueWithPointer:frame];
977 [videoQueue addObject:val];
981 else if ([
assetReader status] == AVAssetReaderStatusFailed)
983 VUserLog(
"Error: AVAssetReader failed: %s. %s.",
984 [[[
assetReader error] localizedDescription] UTF8String],
985 [[[
assetReader error] localizedFailureReason] UTF8String]);
995 - (bool) copyNextAudioSampleBuffer
997 if([
assetReader status] == AVAssetReaderStatusReading)
999 CMSampleBufferRef audioSampleBuffer = [assetReaderAudioTrackOutput copyNextSampleBuffer];
1003 if(audioSampleBuffer == NULL)
1010 audioTimestamp = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(audioSampleBuffer));
1012 CMBlockBufferRef audioBlockBuffer = CMSampleBufferGetDataBuffer( audioSampleBuffer );
1015 size_t bufferSize = 0;
1017 CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
1028 AudioBufferList* audioBufferList = malloc(bufferSize);
1030 OSStatus err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
1037 kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
1043 VUserLog(
"AvFoundation failed extracting audio buffer: %i", err);
1046 free(audioBufferList);
1047 CFRelease(audioSampleBuffer);
1069 const int curBuffer = 0;
1072 size_t dataByteSize = audioBufferList->mBuffers[curBuffer].mDataByteSize;
1079 free(audioBufferList);
1080 CFRelease(audioSampleBuffer);
1082 VUserLog(
"AvFoundation video decoder is out of memory.");
1089 Float32* samples = (Float32 *)audioBufferList->mBuffers[curBuffer].mData;
1090 memcpy(
audioBuffer, samples, audioBufferList->mBuffers[curBuffer].mDataByteSize);
1092 free(audioBufferList);
1093 CFRelease(audioSampleBuffer);
1101 VUserLog(
"Warning: AVFoundation reported %d audio channel%s, but actually decoded %d channel%s.",