13 #include <OpenGL/OpenGL.h>
14 #include <OpenGL/CGLMacro.h>
18 #include <QuartzCore/CoreImage.h>
19 #include <Quartz/Quartz.h>
20 #include <QuartzCore/CVImageBuffer.h>
22 #include <Accelerate/Accelerate.h>
26 #include "HapInAVFoundation.h"
30 "title" :
"VuoAvPlayerObject",
34 "AVFoundation.framework",
36 "CoreVideo.framework",
37 "CoreMedia.framework",
39 "Accelerate.framework",
43 "QuartzCore.framework",
79 - (id)initWithHapAssetTrack:(AVAssetTrack *)track;
95 - (id)initWithTrack:(AVAssetTrack *)track outputSettings:(NSDictionary *)settings
98 for (
id formatDescription in track.formatDescriptions)
100 CMFormatDescriptionRef desc = (CMFormatDescriptionRef) formatDescription;
101 CMVideoCodecType codec = CMFormatDescriptionGetMediaSubType(desc);
112 if (
self = [super initWithTrack:track outputSettings:(isHap ? nil : settings)])
116 NSBundle *f = [NSBundle bundleWithPath:[NSString stringWithFormat:@"%s/Vuo.framework/Versions/%s/Frameworks/HapInAVFoundation.framework",
117 VuoGetFrameworkPath(),
118 VUO_FRAMEWORK_VERSION_STRING]];
121 f = [NSBundle bundleWithPath:[NSString stringWithFormat:@"%s/VuoRunner.framework/Versions/%s/Frameworks/HapInAVFoundation.framework",
122 VuoGetRunnerFrameworkPath(),
123 VUO_FRAMEWORK_VERSION_STRING]];
126 VUserLog(
"Error: Playing this movie requires HapInAVFoundation.framework, but I can't find it.");
134 bool status = [f loadAndReturnError:&error];
137 NSError *underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
139 error = underlyingError;
140 VUserLog(
"Error: Playing this movie requires HapInAVFoundation.framework, but it wouldn't load: %s", [[error localizedDescription] UTF8String]);
145 hapOutput = [[NSClassFromString(@"AVPlayerItemHapDXTOutput") alloc] initWithHapAssetTrack:track];
156 CMSampleBufferRef buffer = [
super copyNextSampleBuffer];
161 return [hapOutput allocFrameForHapSampleBuffer:buffer];
188 #pragma clang diagnostic push
189 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
191 __block CVReturn ret;
193 ret = CVOpenGLTextureCacheCreate(NULL, NULL, cgl_ctx, pf, NULL, &
textureCache);
195 CGLReleasePixelFormat(pf);
196 #pragma clang diagnostic pop
197 if (ret != kCVReturnSuccess)
199 VUserLog(
"Error: Couldn't create texture cache: %d", ret);
222 + (void) releaseAssetReader:(AVAssetReader*)reader
224 AVAssetReader* readerToRelease = reader;
226 dispatch_after(dispatch_time(DISPATCH_TIME_NOW, .2 * NSEC_PER_SEC), dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
228 if (readerToRelease.status == AVAssetReaderStatusReading)
229 [readerToRelease cancelReading];
231 [readerToRelease release];
242 [asset cancelLoading];
250 #pragma clang diagnostic push
251 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
253 #pragma clang diagnostic pop
256 [
self clearFrameQueue];
258 [videoQueue release];
269 - (bool) setURL:(NSURL*)url
271 asset = [AVAsset assetWithURL:url];
275 VUserLog(
"VuoVideoDecoder: AvFoundation decoder could not find movie file at path: %s", [[url absoluteString] UTF8String]);
285 bool isPlayable =
true;
289 bool isProtected = [asset hasProtectedContent];
291 if (!isPlayable || isProtected || ![
asset isReadable])
293 VUserLog(
"AvFoundation cannot play this asset (isPlayable=%d, hasProtectedContent=%d, isReadable=%d).",
300 NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
301 if (videoTracks.count)
303 AVAssetTrack *videoTrack = videoTracks[0];
304 CMFormatDescriptionRef desc = (CMFormatDescriptionRef)videoTrack.formatDescriptions[0];
305 CMVideoCodecType codec = CMFormatDescriptionGetMediaSubType(desc);
313 CFBooleanRef containsAlphaChannel = CMFormatDescriptionGetExtension(desc, CFSTR(
"ContainsAlphaChannel"));
314 if (containsAlphaChannel == kCFBooleanTrue)
321 int bitsPerComponent = ((NSNumber *)CMFormatDescriptionGetExtension(desc, CFSTR(
"BitsPerComponent"))).intValue;
322 int depth = ((NSNumber *)CMFormatDescriptionGetExtension(desc, CFSTR(
"Depth"))).intValue;
328 VUserLog(
"codec=\"%s\" (%s) bpc=%d depth=%d hasAlpha=%d %s",
334 (
hasAlpha && alphaMode) ? alphaMode :
"");
342 NSArray* assetKeys = @[@"playable", @"hasProtectedContent", @"tracks"];
353 [asset loadValuesAsynchronouslyForKeys: assetKeys completionHandler:^(void)
355 vuoAddCompositionStateToThreadLocalStorage(compositionState);
358 NSError *error = nil;
359 bool failedLoading = false;
361 for (NSString *key in assetKeys)
363 NSInteger status = [asset statusOfValueForKey:key error:&error];
366 status == AVKeyValueStatusFailed ||
367 status == AVKeyValueStatusCancelled )
369 VUserLog("AVFoundation failed loading asset.");
370 failedLoading = true;
374 isReady = !failedLoading && [
self setAssetReaderTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration])];
378 for(int i = 0; i < 3; i++)
380 if(![
self copyNextVideoSampleBuffer])
382 VUserLog("AvFoundation successfully loaded but failed to extract a video buffer.");
389 if( self != nil && readyToPlayCallback != NULL && avDecoderCppObject != NULL )
390 readyToPlayCallback( avDecoderCppObject, isReady );
393 vuoRemoveCompositionStateFromThreadLocalStorage();
414 - (bool) setAssetReaderTimeRange:(CMTimeRange)timeRange
416 NSError *error = nil;
426 NSArray* videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
428 if ([videoTracks count] != 0)
430 AVAssetTrack* vidtrack = [videoTracks objectAtIndex:0];
447 assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
449 [assetReader retain];
452 VUserLog(
"AVAssetReader failed initialization: %s", [[error localizedDescription] UTF8String]);
458 NSDictionary *videoOutputSettings = @{
459 (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
460 (NSString *)kCVPixelBufferOpenGLCompatibilityKey: @(YES),
464 outputSettings:videoOutputSettings];
470 [assetReader addOutput:assetReaderVideoTrackOutput];
474 VUserLog(
"AVFoundation Video Decoder: assetReaderVideoTrackOutput cannot be added to assetReader.");
480 NSDictionary* audioOutputSettings;
482 audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
483 [NSNumber numberWithInt: kAudioFormatLinearPCM ], AVFormatIDKey,
484 [NSNumber numberWithFloat: VuoAudioSamples_sampleRate], AVSampleRateKey,
485 [NSNumber numberWithInt:32], AVLinearPCMBitDepthKey,
486 [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
487 [NSNumber numberWithBool:YES], AVLinearPCMIsFloatKey,
488 [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
493 for(AVAssetTrack* track in [
asset tracksWithMediaType:AVMediaTypeAudio])
510 [assetReader addOutput:assetReaderAudioTrackOutput];
516 VUserLog(
"AVFoundation Video Decoder: AssetReaderAudioTrackOutput cannot be added to assetReader.");
523 return [assetReader startReading];
532 - (
unsigned int) getAudioChannelCount:(AVAssetTrack*) track
534 NSArray* formatDesc = track.formatDescriptions;
536 for(
unsigned int i = 0; i < [formatDesc count]; ++i)
538 CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
539 const AudioStreamBasicDescription* audioDescription = CMAudioFormatDescriptionGetStreamBasicDescription (item);
541 if(audioDescription != nil)
543 return audioDescription->mChannelsPerFrame;
550 - (void) setPlaybackRate:(
double)rate
557 [
self seekToSecond:videoTimestamp withRange:-1 frame:NULL];
559 [
self clearFrameQueue];
575 - (void) clearFrameQueue
577 for(
int i = 0; i < [videoQueue count]; i++)
579 VuoVideoFrame* frame = [[videoQueue objectAtIndex:i] pointerValue];
584 [videoQueue removeAllObjects];
590 - (bool) seekToSecond:(
float)second withRange:(
float)range frame:(
VuoVideoFrame *)frame
595 [
self clearFrameQueue];
603 CMTime cmsec = CMTimeMakeWithSeconds(second, NSEC_PER_SEC);
605 [
self setAssetReaderTimeRange: CMTimeRangeMake(cmsec, range < 0 ? [asset duration] : CMTimeMakeWithSeconds(range, NSEC_PER_SEC))];
608 [
self copyNextVideoSampleBuffer];
609 [
self copyNextAudioSampleBuffer];
621 if (![
self nextVideoFrame:frame])
624 frame->timestamp = 0;
631 - (void) setPlayerCallback:(
void (*)(
void* functionPtr,
bool canPlayMedia))callback target:(
void*)decoderCppObject
662 int index =
playbackRate < 0 ? [videoQueue count] - 1 : 0;
664 NSValue* value = [videoQueue objectAtIndex:index];
666 *frame = *framePointer;
668 [videoQueue removeObjectAtIndex:index];
683 [
self seekToSecond:seek withRange:rewindIncrement frame:NULL];
687 return [videoQueue count] > 0;
710 while(sampleIndex < sampleCapacity - 1)
714 if(![
self copyNextAudioSampleBuffer])
727 for(
int iterator = 0; iterator < copyLength; iterator++)
729 frame_samples[frame_sample_index] =
audioBuffer[buffer_sample_index * audioChannelCount + i];
731 frame_sample_index++;
732 buffer_sample_index++;
736 sampleIndex += copyLength;
748 static void VuoAvPlayerObject_freeCallback(
VuoImage imageToFree)
755 if( [
assetReader status] == AVAssetReaderStatusReading )
760 NSObject<VuoAvTrackHapFrame> *hapFrame = [assetReaderVideoTrackOutput newHapFrame];
765 timestamp = CMTimeGetSeconds(hapFrame.presentationTime);
767 int dxtPlaneCount = hapFrame.dxtPlaneCount;
768 OSType *dxtPixelFormats = hapFrame.dxtPixelFormats;
769 void **dxtBaseAddresses = hapFrame.dxtDatas;
771 if (dxtPlaneCount > 2)
773 VUserLog(
"Error: This image has %d planes, which isn't part of the Hap spec.", dxtPlaneCount);
779 for (
int i = 0; i < dxtPlaneCount; ++i)
781 GLuint internalFormat;
782 unsigned int bitsPerPixel;
783 switch (dxtPixelFormats[i])
785 case kHapCVPixelFormat_RGB_DXT1:
786 internalFormat = HapTextureFormat_RGB_DXT1;
790 case kHapCVPixelFormat_RGBA_DXT5:
791 case kHapCVPixelFormat_YCoCg_DXT5:
792 internalFormat = HapTextureFormat_RGBA_DXT5;
796 case kHapCVPixelFormat_CoCgXY:
799 internalFormat = HapTextureFormat_RGBA_DXT5;
804 internalFormat = HapTextureFormat_A_RGTC1;
809 case kHapCVPixelFormat_YCoCg_DXT5_A_RGTC1:
812 internalFormat = HapTextureFormat_RGBA_DXT5;
817 internalFormat = HapTextureFormat_A_RGTC1;
822 case kHapCVPixelFormat_A_RGTC1:
823 internalFormat = HapTextureFormat_A_RGTC1;
832 GLuint texture =
VuoGlTexturePool_use(cgl_ctx, VuoGlTexturePool_NoAllocation, GL_TEXTURE_2D, internalFormat, hapFrame.dxtImgSize.width, hapFrame.dxtImgSize.height, GL_RGBA, NULL);
833 glBindTexture(GL_TEXTURE_2D, texture);
835 size_t bytesPerRow = (hapFrame.dxtImgSize.width * bitsPerPixel) / 8;
836 GLsizei dataLength = (int)(bytesPerRow * hapFrame.dxtImgSize.height);
838 glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_TRUE);
840 glCompressedTexImage2D(GL_TEXTURE_2D, 0, internalFormat, hapFrame.dxtImgSize.width, hapFrame.dxtImgSize.height, 0, dataLength, dxtBaseAddresses[i]);
842 glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
843 glBindTexture(GL_TEXTURE_2D, 0);
845 dxtImage[i] =
VuoImage_make(texture, internalFormat, hapFrame.dxtImgSize.width, hapFrame.dxtImgSize.height);
848 if (hapFrame.codecSubType == kHapCodecSubType
849 || hapFrame.codecSubType == kHapAlphaCodecSubType)
851 else if (hapFrame.codecSubType == kHapYCoCgCodecSubType)
855 uniform sampler2D cocgsy_src;
856 const vec4 offsets = vec4(-0.50196078431373, -0.50196078431373, 0.0, 0.0);
857 varying vec2 fragmentTextureCoordinate;
860 vec4 CoCgSY = texture2D(cocgsy_src, vec2(fragmentTextureCoordinate.x, 1. - fragmentTextureCoordinate.y));
864 float scale = ( CoCgSY.z * ( 255.0 / 8.0 ) ) + 1.0;
866 float Co = CoCgSY.x / scale;
867 float Cg = CoCgSY.y / scale;
870 gl_FragColor = vec4(Y + Co - Cg, Y + Cg, Y - Co - Cg, 1.0);
878 image =
VuoImageRenderer_render(shader, dxtImage[0]->pixelsWide, dxtImage[0]->pixelsHigh, VuoImageColorDepth_8);
881 else if (hapFrame.codecSubType == kHapYCoCgACodecSubType)
885 uniform sampler2D cocgsy_src;
886 uniform sampler2D alpha_src;
887 const vec4 offsets = vec4(-0.50196078431373, -0.50196078431373, 0.0, 0.0);
888 varying vec2 fragmentTextureCoordinate;
891 vec2 tc = vec2(fragmentTextureCoordinate.x, 1. - fragmentTextureCoordinate.y);
892 vec4 CoCgSY = texture2D(cocgsy_src, tc);
893 float alpha = texture2D(alpha_src, tc).r;
897 float scale = ( CoCgSY.z * ( 255.0 / 8.0 ) ) + 1.0;
899 float Co = CoCgSY.x / scale;
900 float Cg = CoCgSY.y / scale;
903 gl_FragColor = vec4(Y + Co - Cg, Y + Cg, Y - Co - Cg, alpha);
912 image =
VuoImageRenderer_render(shader, dxtImage[0]->pixelsWide, dxtImage[0]->pixelsHigh, VuoImageColorDepth_8);
931 CMSampleBufferRef sampleBuffer = [assetReaderVideoTrackOutput copyNextSampleBuffer];
936 timestamp = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
937 duration = CMTimeGetSeconds(CMSampleBufferGetDuration(sampleBuffer));
941 CVPixelBufferRef buffer = (CVPixelBufferRef) CMSampleBufferGetImageBuffer(sampleBuffer);
944 CMSampleBufferInvalidate(sampleBuffer);
945 CFRelease(sampleBuffer);
949 #pragma clang diagnostic push
950 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
951 __block CVOpenGLTextureRef texture;
952 __block CVReturn ret;
954 ret = CVOpenGLTextureCacheCreateTextureFromImage(NULL,
textureCache, buffer, NULL, &texture);
956 if (ret != kCVReturnSuccess)
959 VUserLog(
"Error: Couldn't convert CVImageBuffer to texture: %s", error);
961 CMSampleBufferInvalidate(sampleBuffer);
962 CFRelease(sampleBuffer);
966 CVOpenGLTextureGetName(texture),
968 CVPixelBufferGetWidth(buffer),
969 CVPixelBufferGetHeight(buffer),
970 VuoAvPlayerObject_freeCallback, NULL);
972 image =
VuoImage_makeCopy(rectImage, CVOpenGLTextureIsFlipped(texture), 0, 0,
false);
973 CVOpenGLTextureRelease(texture);
978 #pragma clang diagnostic pop
980 CMSampleBufferInvalidate(sampleBuffer);
981 CFRelease(sampleBuffer);
987 NSValue* val = [NSValue valueWithPointer:frame];
988 [videoQueue addObject:val];
992 else if ([
assetReader status] == AVAssetReaderStatusFailed)
994 VUserLog(
"Error: AVAssetReader failed: %s. %s.",
995 [[[
assetReader error] localizedDescription] UTF8String],
996 [[[
assetReader error] localizedFailureReason] UTF8String]);
1006 - (bool) copyNextAudioSampleBuffer
1008 if([
assetReader status] == AVAssetReaderStatusReading)
1010 CMSampleBufferRef audioSampleBuffer = [assetReaderAudioTrackOutput copyNextSampleBuffer];
1014 if(audioSampleBuffer == NULL)
1021 audioTimestamp = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(audioSampleBuffer));
1023 CMBlockBufferRef audioBlockBuffer = CMSampleBufferGetDataBuffer( audioSampleBuffer );
1026 size_t bufferSize = 0;
1028 CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
1039 AudioBufferList* audioBufferList = malloc(bufferSize);
1041 OSStatus err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
1048 kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
1054 VUserLog(
"AvFoundation failed extracting audio buffer: %i", err);
1057 free(audioBufferList);
1058 CFRelease(audioBlockBuffer);
1059 CFRelease(audioSampleBuffer);
1081 const int curBuffer = 0;
1084 size_t dataByteSize = audioBufferList->mBuffers[curBuffer].mDataByteSize;
1091 free(audioBufferList);
1092 CFRelease(audioBlockBuffer);
1093 CFRelease(audioSampleBuffer);
1095 VUserLog(
"AvFoundation video decoder is out of memory.");
1102 Float32* samples = (Float32 *)audioBufferList->mBuffers[curBuffer].mData;
1103 memcpy(
audioBuffer, samples, audioBufferList->mBuffers[curBuffer].mDataByteSize);
1105 free(audioBufferList);
1106 CFRelease(audioBlockBuffer);
1107 CFRelease(audioSampleBuffer);
1115 VUserLog(
"Warning: AVFoundation reported %d audio channel%s, but actually decoded %d channel%s.",