< prev index next >

modules/media/src/main/native/jfxmedia/platform/osx/avf/AVFAudioProcessor.mm

Print this page
rev 9888 : 8145602: Remove QTKit based media player
Summary: Removed obsolete QTKit based code, updated AVFoundation code to use a later SDK
Reviewed-by: almatvee

*** 25,102 **** #import "AVFAudioProcessor.h" #import "AVFMediaPlayer.h" #import <AVFoundation/AVFoundation.h> #import "AVFKernelProcessor.h" #import <CoreFoundation/CoreFoundation.h> #import <pthread.h> - #import <dlfcn.h> #import <objc/message.h> ! /* ! * MTAudioProcessingTap is a feature new to 10.9 but also exists in ! * MediaToolbox.framework in 10.8. Unfortunately the SDK we build with does not ! * have the header file needed to compile our audio tap, so we will have to ! * supply the missing pieces here. We will use dlsym to find the ! * MTAudioProcessingTap calls we need, this will prevent crashing on systems that ! * don't implement it. ! */ ! extern "C" { ! #pragma pack(push, 4) ! ! // This is MTAudioProcessingTapCallbacks in MediaToolbox.framework ! struct __MTAudioTapCallbacks { ! int version; ! void *clientInfo; ! void (*init)(CFTypeRef tapRef, void *clientInfo, void **tapStorageOut); ! void (*finalize)(CFTypeRef tapRef); ! void (*prepare)(CFTypeRef tapRef, CMItemCount maxFrames, const AudioStreamBasicDescription *processingFormat); ! void (*unprepare)(CFTypeRef tapRef); ! void (*process)(CFTypeRef tapRef, ! CMItemCount numberFramesIn, uint32_t flagsIn, ! AudioBufferList *bufferListInOut, ! CMItemCount *numberFramesOut, uint32_t *flagsOut); ! }; ! ! #pragma pack(pop) ! }; ! ! typedef OSStatus (*AudioTapCreateProc)(CFAllocatorRef allocator, ! const __MTAudioTapCallbacks *callbacks, ! uint32_t flags, ! CFTypeRef *tapOut); ! AudioTapCreateProc gAudioTapCreate = NULL; ! ! typedef void *(*AudioTapGetStorageProc)(CFTypeRef tap); ! AudioTapGetStorageProc gAudioTapGetStorage = NULL; ! ! typedef OSStatus (*AudioTapGetSourceAudioProc)(CFTypeRef tap, ! CMItemCount numberFrames, ! AudioBufferList *bufferListInOut, ! uint32_t *flagsOut, ! CMTimeRange *timeRangeOut, ! CMItemCount *numberFramesOut); ! AudioTapGetSourceAudioProc gAudioTapGetSourceAudio = NULL; ! ! pthread_mutex_t gAVFTapProcsLock = PTHREAD_MUTEX_INITIALIZER; ! ! static void InitAudioTap(CFTypeRef tapRef, void *clientInfo, void **tapStorageOut); ! static void FinalizeAudioTap(CFTypeRef tapRef); ! static void PrepareAudioTap(CFTypeRef tapRef, ! CMItemCount maxFrames, ! const AudioStreamBasicDescription *processingFormat); ! static void UnprepareAudioTap(CFTypeRef tapRef); ! static void ProcessAudioTap(CFTypeRef tapRef, CMItemCount numberFrames, ! uint32_t /*MTAudioProcessingTapFlags*/ flags, AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, ! uint32_t /*MTAudioProcessingTapFlags*/ *flagsOut); static OSStatus AVFTapRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, --- 25,53 ---- #import "AVFAudioProcessor.h" #import "AVFMediaPlayer.h" #import <AVFoundation/AVFoundation.h> + #import <MediaToolbox/MediaToolbox.h> #import "AVFKernelProcessor.h" #import <CoreFoundation/CoreFoundation.h> #import <pthread.h> #import <objc/message.h> ! static void InitAudioTap(MTAudioProcessingTapRef tapRef, void *clientInfo, void **tapStorageOut); ! static void FinalizeAudioTap(MTAudioProcessingTapRef tapRef); ! static void PrepareAudioTap(MTAudioProcessingTapRef tapRef, CMItemCount maxFrames, const AudioStreamBasicDescription *processingFormat); ! static void UnprepareAudioTap(MTAudioProcessingTapRef tapRef); ! static void ProcessAudioTap(MTAudioProcessingTapRef tapRef, CMItemCount numberFrames, ! MTAudioProcessingTapFlags flags, AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, ! MTAudioProcessingTapFlags *flagsOut); static OSStatus AVFTapRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber,
*** 128,163 **** AVFSoundLevelUnitPtr audioSLU; AVFAudioSpectrumUnitPtr audioSpectrum; AVFAudioEqualizerPtr audioEQ; }; - static bool FindAudioTap() { - static bool checkPerformed = false; - - pthread_mutex_lock(&gAVFTapProcsLock); - if (!checkPerformed) { - if (!gAudioTapCreate) { - gAudioTapCreate = (AudioTapCreateProc) - dlsym(RTLD_DEFAULT, "MTAudioProcessingTapCreate"); - } - if (!gAudioTapGetStorage) { - gAudioTapGetStorage = (AudioTapGetStorageProc) - dlsym(RTLD_DEFAULT, "MTAudioProcessingTapGetStorage"); - } - if (!gAudioTapGetSourceAudio) { - gAudioTapGetSourceAudio = (AudioTapGetSourceAudioProc) - dlsym(RTLD_DEFAULT, "MTAudioProcessingTapGetSourceAudio"); - } - checkPerformed = true; - } - pthread_mutex_unlock(&gAVFTapProcsLock); - - return (gAudioTapCreate != NULL) - && (gAudioTapGetStorage != NULL) - && (gAudioTapGetSourceAudio != NULL); - } - @implementation AVFAudioProcessor - (id) init { if ((self = [super init]) != nil) { _soundLevelUnit = AVFSoundLevelUnitPtr(new AVFSoundLevelUnit()); --- 79,88 ----
*** 188,221 **** - (AVAudioMix*) mixer { if (!self.audioTrack) { return nil; } - if (!FindAudioTap()) { - NSLog(@"Audio tap is not available, cannot post-process audio"); - return nil; - } if (!_mixer) { AVMutableAudioMix *mixer = [AVMutableAudioMix audioMix]; if (mixer) { AVMutableAudioMixInputParameters *audioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:self.audioTrack]; if (audioMixInputParameters && [audioMixInputParameters respondsToSelector:@selector(setAudioTapProcessor:)]) { ! __MTAudioTapCallbacks callbacks; ! callbacks.version = 0; // kMTAudioProcessingTapCallbacksVersion_0 ! callbacks.clientInfo = (__bridge void *)self, callbacks.init = InitAudioTap; callbacks.finalize = FinalizeAudioTap; callbacks.prepare = PrepareAudioTap; callbacks.unprepare = UnprepareAudioTap; callbacks.process = ProcessAudioTap; ! CFTypeRef audioProcessingTap; ! if (noErr == gAudioTapCreate(kCFAllocatorDefault, &callbacks, ! 1, // kMTAudioProcessingTapCreationFlag_PreEffects &audioProcessingTap)) { objc_msgSend(audioMixInputParameters, @selector(setAudioTapProcessor:), audioProcessingTap); --- 113,142 ---- - (AVAudioMix*) mixer { if (!self.audioTrack) { return nil; } if (!_mixer) { AVMutableAudioMix *mixer = [AVMutableAudioMix audioMix]; if (mixer) { AVMutableAudioMixInputParameters *audioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:self.audioTrack]; if (audioMixInputParameters && [audioMixInputParameters respondsToSelector:@selector(setAudioTapProcessor:)]) { ! MTAudioProcessingTapCallbacks callbacks; ! callbacks.version = kMTAudioProcessingTapCallbacksVersion_0; ! callbacks.clientInfo = (__bridge void *)self; callbacks.init = InitAudioTap; callbacks.finalize = FinalizeAudioTap; callbacks.prepare = PrepareAudioTap; callbacks.unprepare = UnprepareAudioTap; callbacks.process = ProcessAudioTap; ! MTAudioProcessingTapRef audioProcessingTap; ! if (noErr == MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks, ! kMTAudioProcessingTapCreationFlag_PreEffects, &audioProcessingTap)) { objc_msgSend(audioMixInputParameters, @selector(setAudioTapProcessor:), audioProcessingTap);
*** 245,255 **** } } @end ! void InitAudioTap(CFTypeRef tapRef, void *clientInfo, void **tapStorageOut) { // retain the AU kernels so they don't get freed while we're running AVFAudioProcessor *processor = (__bridge AVFAudioProcessor *)clientInfo; if (processor) { AVFTapContext *context = new AVFTapContext(processor.soundLevelUnit, --- 166,176 ---- } } @end ! void InitAudioTap(MTAudioProcessingTapRef tapRef, void *clientInfo, void **tapStorageOut) { // retain the AU kernels so they don't get freed while we're running AVFAudioProcessor *processor = (__bridge AVFAudioProcessor *)clientInfo; if (processor) { AVFTapContext *context = new AVFTapContext(processor.soundLevelUnit,
*** 257,275 **** processor.audioEqualizer); *tapStorageOut = context; } } ! void FinalizeAudioTap(CFTypeRef tapRef) { ! // NULL check is for safety, this should never be called if we don't have all ! // the audio tap functions ! if (!gAudioTapGetStorage) { ! // should not happen ! return; ! } ! AVFTapContext *context = (AVFTapContext*)gAudioTapGetStorage(tapRef); if (context) { delete context; } } --- 178,190 ---- processor.audioEqualizer); *tapStorageOut = context; } } ! void FinalizeAudioTap(MTAudioProcessingTapRef tapRef) { ! AVFTapContext *context = (AVFTapContext*)MTAudioProcessingTapGetStorage(tapRef); if (context) { delete context; } }
*** 327,345 **** AudioComponentInstanceNew(audioComponent, &audioUnit); } return audioUnit; } ! void PrepareAudioTap(CFTypeRef tapRef, CMItemCount maxFrames, const AudioStreamBasicDescription *processingFormat) { ! if (!gAudioTapGetStorage) { ! // should not happen ! return; ! } ! AVFTapContext *context = (AVFTapContext*)gAudioTapGetStorage(tapRef); // Validate the audio format before we enable the processor // Failures here should rarely, if ever, happen so leave the NSLogs in for // easier diagnosis in the field --- 242,256 ---- AudioComponentInstanceNew(audioComponent, &audioUnit); } return audioUnit; } ! void PrepareAudioTap(MTAudioProcessingTapRef tapRef, CMItemCount maxFrames, const AudioStreamBasicDescription *processingFormat) { ! AVFTapContext *context = (AVFTapContext*)MTAudioProcessingTapGetStorage(tapRef); // Validate the audio format before we enable the processor // Failures here should rarely, if ever, happen so leave the NSLogs in for // easier diagnosis in the field
*** 457,473 **** &renderCB, sizeof(renderCB)); } context->totalFrames = 0; } ! void UnprepareAudioTap(CFTypeRef tapRef) { ! if (!gAudioTapGetStorage) { ! // should not happen ! return; ! } ! AVFTapContext *context = (AVFTapContext*)gAudioTapGetStorage(tapRef); context->renderUnit = NULL; if (context->spectrumUnit) { AudioUnitUninitialize(context->spectrumUnit); AudioComponentInstanceDispose(context->spectrumUnit); --- 368,380 ---- &renderCB, sizeof(renderCB)); } context->totalFrames = 0; } ! void UnprepareAudioTap(MTAudioProcessingTapRef tapRef) { ! AVFTapContext *context = (AVFTapContext*)MTAudioProcessingTapGetStorage(tapRef); context->renderUnit = NULL; if (context->spectrumUnit) { AudioUnitUninitialize(context->spectrumUnit); AudioComponentInstanceDispose(context->spectrumUnit);
*** 483,504 **** AudioComponentInstanceDispose(context->eqUnit); context->eqUnit = NULL; } } ! void ProcessAudioTap(CFTypeRef tapRef, CMItemCount numberFrames, uint32_t flags, AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, uint32_t *flagsOut) { ! if (!gAudioTapGetStorage) { ! // should not happen ! return; ! } ! AVFTapContext *context = (AVFTapContext*)gAudioTapGetStorage(tapRef); OSStatus status = noErr; if (context->renderUnit) { AudioTimeStamp audioTimeStamp; audioTimeStamp.mSampleTime = context->totalFrames; --- 390,407 ---- AudioComponentInstanceDispose(context->eqUnit); context->eqUnit = NULL; } } ! void ProcessAudioTap(MTAudioProcessingTapRef tapRef, CMItemCount numberFrames, uint32_t flags, AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, uint32_t *flagsOut) { ! AVFTapContext *context = (AVFTapContext*)MTAudioProcessingTapGetStorage(tapRef); OSStatus status = noErr; if (context->renderUnit) { AudioTimeStamp audioTimeStamp; audioTimeStamp.mSampleTime = context->totalFrames;
*** 514,539 **** return; } context->totalFrames += numberFrames; *numberFramesOut = numberFrames; } else { ! if (gAudioTapGetSourceAudio) { ! gAudioTapGetSourceAudio(tapRef, numberFrames, bufferListInOut, flagsOut, NULL, numberFramesOut); } - } } static OSStatus AVFTapRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) { ! if (!gAudioTapGetSourceAudio) { ! // should not happen ! return noErr; ! } ! CFTypeRef tapRef = static_cast<CFTypeRef>(inRefCon); ! return gAudioTapGetSourceAudio(tapRef, inNumberFrames, ioData, NULL, NULL, NULL); } --- 417,436 ---- return; } context->totalFrames += numberFrames; *numberFramesOut = numberFrames; } else { ! MTAudioProcessingTapGetSourceAudio(tapRef, numberFrames, bufferListInOut, flagsOut, NULL, numberFramesOut); } } static OSStatus AVFTapRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) { ! MTAudioProcessingTapRef tapRef = static_cast<MTAudioProcessingTapRef>(inRefCon); ! return MTAudioProcessingTapGetSourceAudio(tapRef, inNumberFrames, ioData, NULL, NULL, NULL); }
< prev index next >