< prev index next >

modules/javafx.media/src/main/native/jfxmedia/platform/osx/avf/AVFAudioProcessor.mm

Print this page
rev 10028 : 8156563: JavaFX Ensemble8 media sample hang and crash
Reviewed-by: almatvee, kcr

@@ -1,7 +1,7 @@
 /*
- * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
  * under the terms of the GNU General Public License version 2 only, as
  * published by the Free Software Foundation.  Oracle designates this

@@ -52,59 +52,77 @@
                                      const AudioTimeStamp *inTimeStamp,
                                      UInt32 inBusNumber,
                                      UInt32 inNumberFrames,
                                      AudioBufferList *ioData);
 
-typedef struct AVFTapContext {
-    BOOL enabled;
-    AVFAudioProcessor *processor; // we want this object retained, so don't use __bridge to set this!
+class AVFTapContext {
+public:
+    AVFTapContext(AVFSoundLevelUnitPtr slu, AVFAudioSpectrumUnitPtr spectrum, AVFAudioEqualizerPtr eq) :
+        audioSLU(slu),
+        audioSpectrum(spectrum),
+        audioEQ(eq)
+    {
+    }
+
+    ~AVFTapContext() {
+        // AudioUnits have already been deallocated by now
+        // shared_ptrs get freed automatically
+    }
 
-    AudioUnit delayUnit;
     AudioUnit spectrumUnit;
     AudioUnit volumeUnit;
     AudioUnit eqUnit;
 
     AudioUnit renderUnit; // the last unit in our chain
     CMItemCount totalFrames;
-} AVFTapContext;
+    
+    // Hold on to these while we're running
+    AVFSoundLevelUnitPtr audioSLU;
+    AVFAudioSpectrumUnitPtr audioSpectrum;
+    AVFAudioEqualizerPtr audioEQ;
+};
 
 @implementation AVFAudioProcessor
 
-- (id) initWithPlayer:(AVFMediaPlayer*)player assetTrack:(AVAssetTrack *)assetTrack {
+- (id) init {
     if ((self = [super init]) != nil) {
-        _player = player;
-
-        // Create a mixer for this asset track
-        [self createMixerWithTrack:assetTrack];
-        if (_mixer) {
-            _player.playerItem.audioMix = _mixer;
-        }
-
-        _soundLevelUnit = new AVFSoundLevelUnit();
-        _audioSpectrum = NULL;
-        _audioEqualizer = NULL;
+        _soundLevelUnit = AVFSoundLevelUnitPtr(new AVFSoundLevelUnit());
+        _audioSpectrum = AVFAudioSpectrumUnitPtr(new AVFAudioSpectrumUnit());
+        _audioEqualizer = AVFAudioEqualizerPtr(new AVFAudioEqualizer());
+
+        _volume = 1.0f;
+        _balance = 0.0f;
+        _audioDelay = 0LL;
     }
     return self;
 }
 
 - (void) dealloc {
-    if (_soundLevelUnit) {
-        delete _soundLevelUnit;
-        _soundLevelUnit = NULL;
-    }
+    _soundLevelUnit = nullptr;
+    _audioSpectrum = nullptr;
+    _audioEqualizer = nullptr;
+}
 
-    // We don't own these objects
-    _audioSpectrum = NULL;
-    _audioEqualizer = NULL;
+- (void) setAudioTrack:(AVAssetTrack *)track {
+    if (track != _audioTrack) {
+        // reset the audio mixer if it's already been created
+        // this theoretically should never happen...
+        _mixer = nil;
+    }
+    _audioTrack = track;
 }
 
-- (void) createMixerWithTrack:(AVAssetTrack*)audioTrack {
+- (AVAudioMix*) mixer {
+    if (!self.audioTrack) {
+        return nil;
+    }
+
     if (!_mixer) {
         AVMutableAudioMix *mixer = [AVMutableAudioMix audioMix];
         if (mixer) {
             AVMutableAudioMixInputParameters *audioMixInputParameters =
-                [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioTrack];
+                [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:self.audioTrack];
             if (audioMixInputParameters &&
                 [audioMixInputParameters respondsToSelector:@selector(setAudioTapProcessor:)]) {
                 MTAudioProcessingTapCallbacks callbacks;
 
                 callbacks.version = kMTAudioProcessingTapCallbacksVersion_0;

@@ -123,60 +141,54 @@
                     objc_msgSend(audioMixInputParameters,
                                  @selector(setAudioTapProcessor:),
                                  audioProcessingTap);
 
                     CFRelease(audioProcessingTap); // owned by the mixer now
-
                     mixer.inputParameters = @[audioMixInputParameters];
 
                     _mixer = mixer;
                 }
             }
         }
     }
+    return _mixer;
 }
 
 - (void) setVolume:(float)volume {
     _volume = volume;
-    if (_soundLevelUnit) {
+    if (_soundLevelUnit != nullptr) {
         _soundLevelUnit->setVolume(volume);
     }
 }
 
 - (void) setBalance:(float)balance {
     _balance = balance;
-    if (_soundLevelUnit) {
+    if (_soundLevelUnit != nullptr) {
         _soundLevelUnit->setBalance(balance);
     }
 }
 
 @end
 
 void InitAudioTap(MTAudioProcessingTapRef tapRef, void *clientInfo, void **tapStorageOut)
 {
-    AVFAudioProcessor *processor = (__bridge AVFAudioProcessor*)clientInfo;
-
-    AVFTapContext *context = (AVFTapContext*)calloc(1, sizeof(AVFTapContext));
-    if (context) {
-        context->enabled = NO;
-            // processor should be retained, else we can crash when closing the media player
-        context->processor = processor;
+    // retain the AU kernels so they don't get freed while we're running
+    AVFAudioProcessor *processor = (__bridge AVFAudioProcessor *)clientInfo;
+    if (processor) {
+        AVFTapContext *context = new AVFTapContext(processor.soundLevelUnit,
+                                                   processor.audioSpectrum,
+                                                   processor.audioEqualizer);
         *tapStorageOut = context;
-
-        processor.tapStorage = context;
     }
 }
 
 void FinalizeAudioTap(MTAudioProcessingTapRef tapRef)
 {
     AVFTapContext *context = (AVFTapContext*)MTAudioProcessingTapGetStorage(tapRef);
 
     if (context) {
-        context->processor.tapStorage = NULL;
-        context->processor = NULL;
-
-        free(context);
+        delete context;
     }
 }
 
 static OSStatus SetupAudioUnit(AudioUnit unit,
                                const AudioStreamBasicDescription *processingFormat,

@@ -255,57 +267,43 @@
         NSLog(@"AVFAudioProcessor needs native endian packed float samples!!");
         return;
     }
 
     // Get an instance of our sound level unit
-    context->delayUnit = FindAudioUnit(kAudioUnitType_Effect,
-                                       kAudioUnitSubType_SampleDelay,
-                                       kAudioUnitManufacturer_Apple);
-    if (context->delayUnit) {
-        OSStatus status = SetupAudioUnit(context->delayUnit, processingFormat, (UInt32)maxFrames);
-        if (noErr != status) {
-            NSLog(@"Error setting up delay unit: %d", status);
-            AudioComponentInstanceDispose(context->delayUnit);
-            context->delayUnit = NULL;
-        }
-    }
-
     context->eqUnit = NULL;
-    if (context->processor.audioEqualizer) {
-        context->eqUnit = NewKernelProcessorUnit(context->processor.audioEqualizer);
+    if (context->audioEQ != nullptr) {
+        context->eqUnit = NewKernelProcessorUnit(static_pointer_cast<AVFKernelProcessor>(context->audioEQ));
         if (context->eqUnit) {
             OSStatus status = SetupAudioUnit(context->eqUnit,
                                              processingFormat,
                                              (UInt32)maxFrames);
             if (noErr != status) {
                 NSLog(@"Error creating audio equalizer unit: %d", status);
-                // Don't delete the instance, that will happen when we dispose the unit
                 AudioComponentInstanceDispose(context->eqUnit);
                 context->eqUnit = NULL;
             }
         }
     }
 
     context->spectrumUnit = NULL;
-    if (context->processor.audioSpectrum) {
-        context->spectrumUnit = NewKernelProcessorUnit(context->processor.audioSpectrum);
+    if (context->audioSpectrum != nullptr) {
+        context->spectrumUnit = NewKernelProcessorUnit(static_pointer_cast<AVFKernelProcessor>(context->audioSpectrum));
         if (context->spectrumUnit) {
             OSStatus status = SetupAudioUnit(context->spectrumUnit,
                                              processingFormat,
                                              (UInt32)maxFrames);
             if (noErr != status) {
                 NSLog(@"Error creating audio spectrum unit: %d", status);
-                // Don't delete the instance, that will happen when we dispose the unit
                 AudioComponentInstanceDispose(context->spectrumUnit);
                 context->spectrumUnit = NULL;
             }
         }
     }
 
     context->volumeUnit = NULL;
-    if (context->processor.soundLevelUnit) {
-        context->volumeUnit = NewKernelProcessorUnit(context->processor.soundLevelUnit);
+    if (context->audioSLU != nullptr) {
+        context->volumeUnit = NewKernelProcessorUnit(static_pointer_cast<AVFKernelProcessor>(context->audioSLU));
         if (context->volumeUnit) {
             OSStatus status = SetupAudioUnit(context->volumeUnit,
                                              processingFormat,
                                              (UInt32)maxFrames);
             if (noErr != status) {

@@ -321,29 +319,19 @@
      * The last unit in the chain will be the unit we call to render, it will
      * pull through the graph until we get to the first, which will fetch samples
      * via the render proc we install.
      *
      * The graph will look like this:
-     *    (render proc) -> delayUnit -> eqUnit -> spectrumUnit -> volUnit
+     *    (render proc) -> eqUnit -> spectrumUnit -> volUnit
      *
      * This will allow the EQ settings to affect the spectrum output, but not
      * the volume or balance.
      */
     AudioUnit firstUnit = NULL;
     context->renderUnit = NULL;
 
     // Set initial settings
-    if (context->delayUnit) {
-        if (context->renderUnit) {
-            // Connect renderUnit output to this input
-            ConnectAudioUnits(context->renderUnit, context->delayUnit);
-        }
-        context->renderUnit = context->delayUnit;
-        if (!firstUnit) {
-            firstUnit = context->delayUnit;
-        }
-    }
     if (context->eqUnit) {
         if (context->renderUnit) {
             ConnectAudioUnits(context->renderUnit, context->eqUnit);
         }
         context->renderUnit = context->eqUnit;

@@ -379,24 +367,17 @@
                              kAudioUnitProperty_SetRenderCallback,
                              kAudioUnitScope_Input, 0,
                              &renderCB, sizeof(renderCB));
     }
     context->totalFrames = 0;
-    context->enabled = YES;
 }
 
 void UnprepareAudioTap(MTAudioProcessingTapRef tapRef)
 {
     AVFTapContext *context = (AVFTapContext*)MTAudioProcessingTapGetStorage(tapRef);
-    context->enabled = NO;
     context->renderUnit = NULL;
 
-    if (context->delayUnit) {
-        AudioUnitUninitialize(context->delayUnit);
-        AudioComponentInstanceDispose(context->delayUnit);
-        context->delayUnit = NULL;
-    }
     if (context->spectrumUnit) {
         AudioUnitUninitialize(context->spectrumUnit);
         AudioComponentInstanceDispose(context->spectrumUnit);
         context->spectrumUnit = NULL;
     }
< prev index next >