| @@ -108,6 +108,9 @@ void removeSubWindow (void* nsWindow, Component* comp) | |||||
| [hostWindow removeChildWindow: pluginWindow]; | [hostWindow removeChildWindow: pluginWindow]; | ||||
| comp->removeFromDesktop(); | comp->removeFromDesktop(); | ||||
| [hostWindow release]; | [hostWindow release]; | ||||
| for (int i = 20; --i >= 0;) | |||||
| MessageManager::getInstance()->runDispatchLoopUntil (1); | |||||
| } | } | ||||
| static bool isJuceWindow (WindowRef w) throw() | static bool isJuceWindow (WindowRef w) throw() | ||||
| @@ -735,8 +735,6 @@ public: | |||||
| #if MACOS_10_4_OR_EARLIER | #if MACOS_10_4_OR_EARLIER | ||||
| #include <GLUT/glut.h> | #include <GLUT/glut.h> | ||||
| typedef int NSInteger; | |||||
| typedef unsigned int NSUInteger; | |||||
| #endif | #endif | ||||
| #endif // __JUCE_MAC_NATIVEINCLUDES_JUCEHEADER__ | #endif // __JUCE_MAC_NATIVEINCLUDES_JUCEHEADER__ | ||||
| @@ -47555,7 +47553,8 @@ BEGIN_JUCE_NAMESPACE | |||||
| static const tchar* const tableColumnPropertyTag = T("_tableColumnID"); | static const tchar* const tableColumnPropertyTag = T("_tableColumnID"); | ||||
| class TableListRowComp : public Component | |||||
| class TableListRowComp : public Component, | |||||
| public TooltipClient | |||||
| { | { | ||||
| public: | public: | ||||
| TableListRowComp (TableListBox& owner_) | TableListRowComp (TableListBox& owner_) | ||||
| @@ -47759,6 +47758,19 @@ public: | |||||
| owner.getModel()->cellDoubleClicked (row, columnId, e); | owner.getModel()->cellDoubleClicked (row, columnId, e); | ||||
| } | } | ||||
| const String getTooltip() | |||||
| { | |||||
| int x, y; | |||||
| getMouseXYRelative (x, y); | |||||
| const int columnId = owner.getHeader()->getColumnIdAtX (x); | |||||
| if (columnId != 0 && owner.getModel() != 0) | |||||
| return owner.getModel()->getCellTooltip (row, columnId); | |||||
| return String::empty; | |||||
| } | |||||
| juce_UseDebuggingNewOperator | juce_UseDebuggingNewOperator | ||||
| private: | private: | ||||
| @@ -244743,11 +244755,12 @@ public: | |||||
| AudioIODevice* createDevice (const String& outputDeviceName, | AudioIODevice* createDevice (const String& outputDeviceName, | ||||
| const String& inputDeviceName) | const String& inputDeviceName) | ||||
| { | { | ||||
| // ASIO can't open two different devices for input and output - they must be the same one. | |||||
| jassert (inputDeviceName == outputDeviceName || outputDeviceName.isEmpty() || inputDeviceName.isEmpty()); | jassert (inputDeviceName == outputDeviceName || outputDeviceName.isEmpty() || inputDeviceName.isEmpty()); | ||||
| (void) inputDeviceName; | |||||
| jassert (hasScanned); // need to call scanForDevices() before doing this | jassert (hasScanned); // need to call scanForDevices() before doing this | ||||
| const int index = deviceNames.indexOf (outputDeviceName); | |||||
| const int index = deviceNames.indexOf (outputDeviceName.isNotEmpty() ? outputDeviceName | |||||
| : inputDeviceName); | |||||
| if (index >= 0) | if (index >= 0) | ||||
| { | { | ||||
| @@ -258363,33 +258376,76 @@ void PlatformUtilities::addItemToDock (const File& file) | |||||
| #if ! JUCE_ONLY_BUILD_CORE_LIBRARY | #if ! JUCE_ONLY_BUILD_CORE_LIBRARY | ||||
| bool AlertWindow::showNativeDialogBox (const String& title, | |||||
| const String& bodyText, | |||||
| bool isOkCancel) | |||||
| END_JUCE_NAMESPACE | |||||
| @interface JuceAlertBoxDelegate : NSObject | |||||
| { | |||||
| @public | |||||
| bool clickedOk; | |||||
| } | |||||
| - (void) alertView: (UIAlertView*) alertView clickedButtonAtIndex: (NSInteger) buttonIndex; | |||||
| @end | |||||
| @implementation JuceAlertBoxDelegate | |||||
| - (void) alertView: (UIAlertView*) alertView clickedButtonAtIndex: (NSInteger) buttonIndex | |||||
| { | |||||
| clickedOk = (buttonIndex == 0); | |||||
| alertView.hidden = true; | |||||
| } | |||||
| @end | |||||
| BEGIN_JUCE_NAMESPACE | |||||
| // (This function is used directly by other bits of code) | |||||
| bool juce_iPhoneShowModalAlert (const String& title, | |||||
| const String& bodyText, | |||||
| NSString* okButtonText, | |||||
| NSString* cancelButtonText) | |||||
| { | { | ||||
| const ScopedAutoReleasePool pool; | const ScopedAutoReleasePool pool; | ||||
| UIAlertView *alert = [[[UIAlertView alloc] initWithTitle: juceStringToNS (title) | |||||
| message: juceStringToNS (title) | |||||
| delegate: nil | |||||
| cancelButtonTitle: @"OK" | |||||
| otherButtonTitles: (isOkCancel ? @"Cancel" : nil), nil] autorelease]; | |||||
| alert.cancelButtonIndex = alert.firstOtherButtonIndex; | |||||
| JuceAlertBoxDelegate* callback = [[JuceAlertBoxDelegate alloc] init]; | |||||
| UIAlertView* alert = [[UIAlertView alloc] initWithTitle: juceStringToNS (title) | |||||
| message: juceStringToNS (bodyText) | |||||
| delegate: callback | |||||
| cancelButtonTitle: okButtonText | |||||
| otherButtonTitles: cancelButtonText, nil]; | |||||
| [alert retain]; | |||||
| [alert show]; | [alert show]; | ||||
| // xxx need to use a delegate to find which button was clicked | |||||
| return false; | |||||
| while (! alert.hidden && alert.superview != nil) | |||||
| [[NSRunLoop mainRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.01]]; | |||||
| const bool result = callback->clickedOk; | |||||
| [alert release]; | |||||
| [callback release]; | |||||
| return result; | |||||
| } | |||||
| bool AlertWindow::showNativeDialogBox (const String& title, | |||||
| const String& bodyText, | |||||
| bool isOkCancel) | |||||
| { | |||||
| return juce_iPhoneShowModalAlert (title, bodyText, | |||||
| @"OK", | |||||
| isOkCancel ? @"Cancel" : nil); | |||||
| } | } | ||||
| bool DragAndDropContainer::performExternalDragDropOfFiles (const StringArray& files, const bool canMoveFiles) | bool DragAndDropContainer::performExternalDragDropOfFiles (const StringArray& files, const bool canMoveFiles) | ||||
| { | { | ||||
| jassertfalse // not implemented! | |||||
| jassertfalse // no such thing on the iphone! | |||||
| return false; | return false; | ||||
| } | } | ||||
| bool DragAndDropContainer::performExternalDragDropOfText (const String& text) | bool DragAndDropContainer::performExternalDragDropOfText (const String& text) | ||||
| { | { | ||||
| jassertfalse // not implemented! | |||||
| jassertfalse // no such thing on the iphone! | |||||
| return false; | return false; | ||||
| } | } | ||||
| @@ -261169,37 +261225,43 @@ bool WebBrowserComponent::pageAboutToLoad (const String& url) | |||||
| // compiled on its own). | // compiled on its own). | ||||
| #if JUCE_INCLUDED_FILE | #if JUCE_INCLUDED_FILE | ||||
| class IPhoneAudioIODevice : public AudioIODeviceType | |||||
| class IPhoneAudioIODevice : public AudioIODevice | |||||
| { | { | ||||
| public: | public: | ||||
| IPhoneAudioIODevice (const String& deviceName, const bool isInput_) | |||||
| IPhoneAudioIODevice (const String& deviceName) | |||||
| : AudioIODevice (deviceName, T("Audio")), | : AudioIODevice (deviceName, T("Audio")), | ||||
| isInput (isInput_), | |||||
| isOpen_ (false), | |||||
| isStarted (false) | |||||
| audioUnit (0), | |||||
| isRunning (false), | |||||
| callback (0), | |||||
| actualBufferSize (0), | |||||
| floatData (1, 2) | |||||
| { | { | ||||
| numInputChannels = 2; | |||||
| numOutputChannels = 2; | |||||
| preferredBufferSize = 0; | |||||
| AudioSessionInitialize (0, 0, interruptionListenerStatic, this); | |||||
| updateDeviceInfo(); | |||||
| } | } | ||||
| ~IPhoneAudioIODeviceType() | |||||
| ~IPhoneAudioIODevice() | |||||
| { | { | ||||
| close(); | |||||
| } | } | ||||
| const StringArray getOutputChannelNames() | const StringArray getOutputChannelNames() | ||||
| { | { | ||||
| StringArray s; | StringArray s; | ||||
| if (! isInput) | |||||
| { | |||||
| s.add ("Left"); | |||||
| s.add ("Right"); | |||||
| } | |||||
| s.add ("Left"); | |||||
| s.add ("Right"); | |||||
| return s; | return s; | ||||
| } | } | ||||
| const StringArray getInputChannelNames() | const StringArray getInputChannelNames() | ||||
| { | { | ||||
| StringArray s; | StringArray s; | ||||
| if (isInput) | |||||
| if (audioInputIsAvailable) | |||||
| { | { | ||||
| s.add ("Left"); | s.add ("Left"); | ||||
| s.add ("Right"); | s.add ("Right"); | ||||
| @@ -261209,134 +261271,150 @@ public: | |||||
| int getNumSampleRates() | int getNumSampleRates() | ||||
| { | { | ||||
| return sampleRates.size(); | |||||
| return 1; | |||||
| } | } | ||||
| double getSampleRate (int index) | double getSampleRate (int index) | ||||
| { | { | ||||
| return sampleRates [index]; | |||||
| return sampleRate; | |||||
| } | } | ||||
| int getNumBufferSizesAvailable() | int getNumBufferSizesAvailable() | ||||
| { | { | ||||
| return bufferSizes.size(); | |||||
| return 1; | |||||
| } | } | ||||
| int getBufferSizeSamples (int index) | int getBufferSizeSamples (int index) | ||||
| { | { | ||||
| return bufferSizes [index]; | |||||
| return getDefaultBufferSize(); | |||||
| } | } | ||||
| int getDefaultBufferSize() | int getDefaultBufferSize() | ||||
| { | { | ||||
| for (int i = 0; i < getNumBufferSizesAvailable(); ++i) | |||||
| if (getBufferSizeSamples(i) >= 512) | |||||
| return getBufferSizeSamples(i); | |||||
| return 512; | |||||
| return 1024; | |||||
| } | } | ||||
| const String open (const BitArray& inputChannels, | const String open (const BitArray& inputChannels, | ||||
| const BitArray& outputChannels, | const BitArray& outputChannels, | ||||
| double sampleRate, | double sampleRate, | ||||
| int bufferSizeSamples) | |||||
| int bufferSize) | |||||
| { | { | ||||
| isOpen_ = true; | |||||
| if (bufferSizeSamples <= 0) | |||||
| bufferSizeSamples = getDefaultBufferSize(); | |||||
| close(); | |||||
| lastError = String::empty; | lastError = String::empty; | ||||
| preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize; | |||||
| isOpen_ = lastError.isEmpty(); | |||||
| // xxx set up channel mapping | |||||
| activeOutputChans = outputChannels; | |||||
| activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false); | |||||
| numOutputChannels = activeOutputChans.countNumberOfSetBits(); | |||||
| monoOutputChannelNumber = activeOutputChans.findNextSetBit (0); | |||||
| activeInputChans = inputChannels; | |||||
| activeInputChans.setRange (2, activeInputChans.getHighestBit(), false); | |||||
| numInputChannels = activeInputChans.countNumberOfSetBits(); | |||||
| monoInputChannelNumber = activeInputChans.findNextSetBit (0); | |||||
| AudioSessionSetActive (true); | |||||
| UInt32 audioCategory = kAudioSessionCategory_PlayAndRecord; | |||||
| AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory); | |||||
| AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, propertyChangedStatic, this); | |||||
| fixAudioRouteIfSetToReceiver(); | |||||
| updateDeviceInfo(); | |||||
| Float32 bufferDuration = preferredBufferSize / sampleRate; | |||||
| AudioSessionSetProperty (kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof (bufferDuration), &bufferDuration); | |||||
| actualBufferSize = preferredBufferSize; | |||||
| prepareFloatBuffers(); | |||||
| isRunning = true; | |||||
| propertyChanged (0, 0, 0); // creates and starts the AU | |||||
| lastError = audioUnit != 0 ? String::empty | |||||
| : T("Couldn't open the device"); | |||||
| return lastError; | return lastError; | ||||
| } | } | ||||
| void close() | void close() | ||||
| { | { | ||||
| isOpen_ = false; | |||||
| if (isRunning) | |||||
| { | |||||
| isRunning = false; | |||||
| AudioSessionSetActive (false); | |||||
| if (audioUnit != 0) | |||||
| { | |||||
| AudioComponentInstanceDispose (audioUnit); | |||||
| audioUnit = 0; | |||||
| } | |||||
| } | |||||
| } | } | ||||
| bool isOpen() | bool isOpen() | ||||
| { | { | ||||
| return isOpen_; | |||||
| return isRunning; | |||||
| } | } | ||||
| int getCurrentBufferSizeSamples() | int getCurrentBufferSizeSamples() | ||||
| { | { | ||||
| return internal != 0 ? internal->getBufferSize() : 512; | |||||
| return actualBufferSize; | |||||
| } | } | ||||
| double getCurrentSampleRate() | double getCurrentSampleRate() | ||||
| { | { | ||||
| return internal != 0 ? internal->getSampleRate() : 0; | |||||
| return sampleRate; | |||||
| } | } | ||||
| int getCurrentBitDepth() | int getCurrentBitDepth() | ||||
| { | { | ||||
| return 32; // no way to find out, so just assume it's high.. | |||||
| return 16; | |||||
| } | } | ||||
| const BitArray getActiveOutputChannels() const | const BitArray getActiveOutputChannels() const | ||||
| { | { | ||||
| return internal != 0 ? internal->activeOutputChans : BitArray(); | |||||
| return activeOutputChans; | |||||
| } | } | ||||
| const BitArray getActiveInputChannels() const | const BitArray getActiveInputChannels() const | ||||
| { | { | ||||
| BitArray chans; | |||||
| if (internal != 0) | |||||
| { | |||||
| chans = internal->activeInputChans; | |||||
| if (internal->inputDevice != 0) | |||||
| chans.orWith (internal->inputDevice->activeInputChans); | |||||
| } | |||||
| return chans; | |||||
| return activeInputChans; | |||||
| } | } | ||||
| int getOutputLatencyInSamples() | int getOutputLatencyInSamples() | ||||
| { | { | ||||
| if (internal == 0) | |||||
| return 0; | |||||
| // this seems like a good guess at getting the latency right - comparing | |||||
| // this with a round-trip measurement, it gets it to within a few millisecs | |||||
| // for the built-in mac soundcard | |||||
| return internal->outputLatency + internal->getBufferSize() * 2; | |||||
| return 0; //xxx | |||||
| } | } | ||||
| int getInputLatencyInSamples() | int getInputLatencyInSamples() | ||||
| { | { | ||||
| if (internal == 0) | |||||
| return 0; | |||||
| return internal->inputLatency + internal->getBufferSize() * 2; | |||||
| return 0; //xxx | |||||
| } | } | ||||
| void start (AudioIODeviceCallback* callback) | |||||
| void start (AudioIODeviceCallback* callback_) | |||||
| { | { | ||||
| if (internal != 0 && ! isStarted) | |||||
| if (isRunning && callback != callback_) | |||||
| { | { | ||||
| if (callback != 0) | |||||
| callback->audioDeviceAboutToStart (this); | |||||
| if (callback_ != 0) | |||||
| callback_->audioDeviceAboutToStart (this); | |||||
| isStarted = true; | |||||
| internal->start (callback); | |||||
| callbackLock.enter(); | |||||
| callback = callback_; | |||||
| callbackLock.exit(); | |||||
| } | } | ||||
| } | } | ||||
| void stop() | void stop() | ||||
| { | { | ||||
| if (isStarted && internal != 0) | |||||
| if (isRunning) | |||||
| { | { | ||||
| AudioIODeviceCallback* const lastCallback = internal->callback; | |||||
| isStarted = false; | |||||
| internal->stop (true); | |||||
| callbackLock.enter(); | |||||
| AudioIODeviceCallback* const lastCallback = callback; | |||||
| callback = 0; | |||||
| callbackLock.exit(); | |||||
| if (lastCallback != 0) | if (lastCallback != 0) | ||||
| lastCallback->audioDeviceStopped(); | lastCallback->audioDeviceStopped(); | ||||
| @@ -261345,10 +261423,7 @@ public: | |||||
| bool isPlaying() | bool isPlaying() | ||||
| { | { | ||||
| if (internal->callback == 0) | |||||
| isStarted = false; | |||||
| return isStarted; | |||||
| return isRunning && callback != 0; | |||||
| } | } | ||||
| const String getLastError() | const String getLastError() | ||||
| @@ -261356,36 +261431,286 @@ public: | |||||
| return lastError; | return lastError; | ||||
| } | } | ||||
| int inputIndex, outputIndex; | |||||
| juce_UseDebuggingNewOperator | |||||
| private: | private: | ||||
| CoreAudioInternal* internal; | |||||
| bool isOpen_, isStarted; | |||||
| CriticalSection callbackLock; | |||||
| Float64 sampleRate; | |||||
| int numInputChannels, numOutputChannels; | |||||
| int preferredBufferSize; | |||||
| int actualBufferSize; | |||||
| bool isRunning; | |||||
| String lastError; | String lastError; | ||||
| static OSStatus hardwareListenerProc (AudioDeviceID /*inDevice*/, UInt32 /*inLine*/, const AudioObjectPropertyAddress* pa, void* inClientData) | |||||
| AudioStreamBasicDescription format; | |||||
| AudioUnit audioUnit; | |||||
| UInt32 audioInputIsAvailable; | |||||
| AudioIODeviceCallback* callback; | |||||
| BitArray activeOutputChans, activeInputChans; | |||||
| AudioSampleBuffer floatData; | |||||
| float* inputChannels[3]; | |||||
| float* outputChannels[3]; | |||||
| bool monoInputChannelNumber, monoOutputChannelNumber; | |||||
| void prepareFloatBuffers() | |||||
| { | { | ||||
| CoreAudioInternal* const intern = (CoreAudioInternal*) inClientData; | |||||
| floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize); | |||||
| zerostruct (inputChannels); | |||||
| zerostruct (outputChannels); | |||||
| switch (pa->mSelector) | |||||
| for (int i = 0; i < numInputChannels; ++i) | |||||
| inputChannels[i] = floatData.getSampleData (i); | |||||
| for (int i = 0; i < numOutputChannels; ++i) | |||||
| outputChannels[i] = floatData.getSampleData (i + numInputChannels); | |||||
| } | |||||
| OSStatus process (AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, | |||||
| UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData) throw() | |||||
| { | |||||
| OSStatus err = noErr; | |||||
| if (audioInputIsAvailable) | |||||
| err = AudioUnitRender (audioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData); | |||||
| const ScopedLock sl (callbackLock); | |||||
| if (callback != 0) | |||||
| { | { | ||||
| case kAudioHardwarePropertyDevices: | |||||
| intern->deviceDetailsChanged(); | |||||
| break; | |||||
| if (audioInputIsAvailable && numInputChannels > 0) | |||||
| { | |||||
| short* shortData = (short*) ioData->mBuffers[0].mData; | |||||
| case kAudioHardwarePropertyDefaultOutputDevice: | |||||
| case kAudioHardwarePropertyDefaultInputDevice: | |||||
| case kAudioHardwarePropertyDefaultSystemOutputDevice: | |||||
| break; | |||||
| if (numInputChannels >= 2) | |||||
| { | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f); | |||||
| inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f); | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| if (monoInputChannelNumber > 0) | |||||
| ++shortData; | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f); | |||||
| ++shortData; | |||||
| } | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| zeromem (inputChannels[0], sizeof (float) * inNumberFrames); | |||||
| zeromem (inputChannels[1], sizeof (float) * inNumberFrames); | |||||
| } | |||||
| callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels, | |||||
| outputChannels, numOutputChannels, | |||||
| (int) inNumberFrames); | |||||
| short* shortData = (short*) ioData->mBuffers[0].mData; | |||||
| int n = 0; | |||||
| if (numOutputChannels >= 2) | |||||
| { | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| shortData [n++] = (short) (outputChannels[0][i] * 32767.0f); | |||||
| shortData [n++] = (short) (outputChannels[1][i] * 32767.0f); | |||||
| } | |||||
| } | |||||
| else if (numOutputChannels == 1) | |||||
| { | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f); | |||||
| shortData [n++] = s; | |||||
| shortData [n++] = s; | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| zeromem (ioData->mBuffers[0].mData, 2 * sizeof (short) * inNumberFrames); | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| zeromem (ioData->mBuffers[0].mData, 2 * sizeof (short) * inNumberFrames); | |||||
| } | } | ||||
| return noErr; | |||||
| return err; | |||||
| } | } | ||||
| CoreAudioIODevice (const CoreAudioIODevice&); | |||||
| const CoreAudioIODevice& operator= (const CoreAudioIODevice&); | |||||
| void updateDeviceInfo() throw() | |||||
| { | |||||
| UInt32 size = sizeof (sampleRate); | |||||
| AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate); | |||||
| size = sizeof (audioInputIsAvailable); | |||||
| AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable); | |||||
| } | |||||
| void propertyChanged (AudioSessionPropertyID inID, UInt32 inDataSize, const void* inPropertyValue) | |||||
| { | |||||
| if (! isRunning) | |||||
| return; | |||||
| if (inPropertyValue != 0) | |||||
| { | |||||
| CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) inPropertyValue; | |||||
| CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary, | |||||
| CFSTR (kAudioSession_AudioRouteChangeKey_Reason)); | |||||
| SInt32 routeChangeReason; | |||||
| CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason); | |||||
| if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable) | |||||
| fixAudioRouteIfSetToReceiver(); | |||||
| } | |||||
| updateDeviceInfo(); | |||||
| createAudioUnit(); | |||||
| AudioSessionSetActive (true); | |||||
| if (audioUnit != 0) | |||||
| { | |||||
| UInt32 formatSize = sizeof (format); | |||||
| AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize); | |||||
| Float32 bufferDuration = preferredBufferSize / sampleRate; | |||||
| UInt32 bufferDurationSize = sizeof (bufferDuration); | |||||
| AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, &bufferDurationSize, &bufferDurationSize); | |||||
| actualBufferSize = (int) (sampleRate * bufferDuration + 0.5); | |||||
| AudioOutputUnitStart (audioUnit); | |||||
| } | |||||
| } | |||||
| void interruptionListener (UInt32 inInterruption) | |||||
| { | |||||
| if (inInterruption == kAudioSessionBeginInterruption) | |||||
| { | |||||
| isRunning = false; | |||||
| AudioOutputUnitStop (audioUnit); | |||||
| if (juce_iPhoneShowModalAlert ("Audio Interrupted", | |||||
| "This could have been interrupted by another application or by unplugging a headset", | |||||
| @"Resume", | |||||
| @"Cancel")) | |||||
| { | |||||
| isRunning = true; | |||||
| propertyChanged (0, 0, 0); | |||||
| } | |||||
| } | |||||
| if (inInterruption == kAudioSessionEndInterruption) | |||||
| { | |||||
| isRunning = true; | |||||
| AudioSessionSetActive (true); | |||||
| AudioOutputUnitStart (audioUnit); | |||||
| } | |||||
| } | |||||
| static OSStatus processStatic (void* inRefCon, AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, | |||||
| UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData) | |||||
| { | |||||
| return ((IPhoneAudioIODevice*) inRefCon)->process (ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData); | |||||
| } | |||||
| static void propertyChangedStatic (void* inClientData, AudioSessionPropertyID inID, UInt32 inDataSize, const void* inPropertyValue) | |||||
| { | |||||
| ((IPhoneAudioIODevice*) inClientData)->propertyChanged (inID, inDataSize, inPropertyValue); | |||||
| } | |||||
| static void interruptionListenerStatic (void* inClientData, UInt32 inInterruption) | |||||
| { | |||||
| ((IPhoneAudioIODevice*) inClientData)->interruptionListener (inInterruption); | |||||
| } | |||||
| void resetFormat (const int numChannels) | |||||
| { | |||||
| memset (&format, 0, sizeof (format)); | |||||
| format.mFormatID = kAudioFormatLinearPCM; | |||||
| format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; | |||||
| format.mBitsPerChannel = 8 * sizeof (short); | |||||
| format.mChannelsPerFrame = 2; | |||||
| format.mFramesPerPacket = 1; | |||||
| format.mBytesPerFrame = format.mBytesPerPacket = 2 * sizeof (short); | |||||
| } | |||||
| bool createAudioUnit() | |||||
| { | |||||
| if (audioUnit != 0) | |||||
| { | |||||
| AudioComponentInstanceDispose (audioUnit); | |||||
| audioUnit = 0; | |||||
| } | |||||
| resetFormat (2); | |||||
| AudioComponentDescription desc; | |||||
| desc.componentType = kAudioUnitType_Output; | |||||
| desc.componentSubType = kAudioUnitSubType_RemoteIO; | |||||
| desc.componentManufacturer = kAudioUnitManufacturer_Apple; | |||||
| desc.componentFlags = 0; | |||||
| desc.componentFlagsMask = 0; | |||||
| AudioComponent comp = AudioComponentFindNext (0, &desc); | |||||
| AudioComponentInstanceNew (comp, &audioUnit); | |||||
| if (audioUnit == 0) | |||||
| return false; | |||||
| const UInt32 one = 1; | |||||
| AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one)); | |||||
| AudioChannelLayout layout; | |||||
| layout.mChannelBitmap = 0; | |||||
| layout.mNumberChannelDescriptions = 0; | |||||
| layout.mChannelLayoutTag = kAudioChannelLayoutTag_StereoHeadphones; | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout)); | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout)); | |||||
| AURenderCallbackStruct inputProc; | |||||
| inputProc.inputProc = processStatic; | |||||
| inputProc.inputProcRefCon = this; | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc)); | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format)); | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format)); | |||||
| AudioUnitInitialize (audioUnit); | |||||
| return true; | |||||
| } | |||||
| // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it | |||||
| // to make it loud. Needed because by default when using an input + output, the output is kept quiet. | |||||
| static void fixAudioRouteIfSetToReceiver() throw() | |||||
| { | |||||
| CFStringRef audioRoute = 0; | |||||
| UInt32 propertySize = sizeof (audioRoute); | |||||
| if (AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &propertySize, &audioRoute) == noErr) | |||||
| { | |||||
| NSString* route = (NSString*) audioRoute; | |||||
| //printf ("audio route: %s\n", [route cString]); | |||||
| if ([route hasPrefix: @"Receiver"]) | |||||
| { | |||||
| UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker; | |||||
| AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof (audioRouteOverride), &audioRouteOverride); | |||||
| } | |||||
| CFRelease (audioRoute); | |||||
| } | |||||
| } | |||||
| IPhoneAudioIODevice (const IPhoneAudioIODevice&); | |||||
| const IPhoneAudioIODevice& operator= (const IPhoneAudioIODevice&); | |||||
| }; | }; | ||||
| class IPhoneAudioIODeviceType : public AudioIODeviceType | class IPhoneAudioIODeviceType : public AudioIODeviceType | ||||
| @@ -261393,8 +261718,7 @@ class IPhoneAudioIODeviceType : public AudioIODeviceType | |||||
| public: | public: | ||||
| IPhoneAudioIODeviceType() | IPhoneAudioIODeviceType() | ||||
| : AudioIODeviceType (T("iPhone Audio")), | |||||
| hasScanned (false) | |||||
| : AudioIODeviceType (T("iPhone Audio")) | |||||
| { | { | ||||
| } | } | ||||
| @@ -261409,6 +261733,7 @@ public: | |||||
| const StringArray getDeviceNames (const bool wantInputNames) const | const StringArray getDeviceNames (const bool wantInputNames) const | ||||
| { | { | ||||
| StringArray s; | StringArray s; | ||||
| s.add (wantInputNames ? "Microphone" : "Speaker"); | |||||
| return s; | return s; | ||||
| } | } | ||||
| @@ -261419,20 +261744,19 @@ public: | |||||
| int getIndexOfDevice (AudioIODevice* device, const bool asInput) const | int getIndexOfDevice (AudioIODevice* device, const bool asInput) const | ||||
| { | { | ||||
| return 0; | |||||
| return device != 0 ? 0 : -1; | |||||
| } | } | ||||
| bool hasSeparateInputsAndOutputs() const { return true; } | |||||
| bool hasSeparateInputsAndOutputs() const { return false; } | |||||
| AudioIODevice* createDevice (const String& outputDeviceName, | AudioIODevice* createDevice (const String& outputDeviceName, | ||||
| const String& inputDeviceName) | const String& inputDeviceName) | ||||
| { | { | ||||
| if (outputDeviceName.isNotEmpty() && inputDeviceName.isNotEmpty()) | |||||
| return new CoreAudioIODevice (deviceName, | |||||
| inputIds [inputIndex], | |||||
| inputIndex, | |||||
| outputIds [outputIndex], | |||||
| outputIndex); | |||||
| if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty()) | |||||
| { | |||||
| return new IPhoneAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName | |||||
| : inputDeviceName); | |||||
| } | |||||
| return 0; | return 0; | ||||
| } | } | ||||
| @@ -39225,7 +39225,7 @@ private: | |||||
| #define __JUCE_GLYPHARRANGEMENT_JUCEHEADER__ | #define __JUCE_GLYPHARRANGEMENT_JUCEHEADER__ | ||||
| /** | /** | ||||
| An glyph from a particular font, with a particular size, style, | |||||
| A glyph from a particular font, with a particular size, style, | |||||
| typeface and position. | typeface and position. | ||||
| @see GlyphArrangement, Font | @see GlyphArrangement, Font | ||||
| @@ -47015,6 +47015,10 @@ public: | |||||
| */ | */ | ||||
| virtual int getColumnAutoSizeWidth (int columnId); | virtual int getColumnAutoSizeWidth (int columnId); | ||||
| /** Returns a tooltip for a particular cell in the table. | |||||
| */ | |||||
| virtual const String getCellTooltip (int rowNumber, int columnId); | |||||
| /** Override this to be informed when rows are selected or deselected. | /** Override this to be informed when rows are selected or deselected. | ||||
| @see ListBox::selectedRowsChanged() | @see ListBox::selectedRowsChanged() | ||||
| @@ -53668,7 +53672,7 @@ private: | |||||
| /********* End of inlined file: juce_ActiveXControlComponent.h *********/ | /********* End of inlined file: juce_ActiveXControlComponent.h *********/ | ||||
| typedef ActiveXControlComponent QTCompBaseClass; | typedef ActiveXControlComponent QTCompBaseClass; | ||||
| #else | |||||
| #elif JUCE_MAC | |||||
| typedef NSViewComponent QTCompBaseClass; | typedef NSViewComponent QTCompBaseClass; | ||||
| #endif | #endif | ||||
| @@ -38,7 +38,8 @@ BEGIN_JUCE_NAMESPACE | |||||
| //============================================================================== | //============================================================================== | ||||
| static const tchar* const tableColumnPropertyTag = T("_tableColumnID"); | static const tchar* const tableColumnPropertyTag = T("_tableColumnID"); | ||||
| class TableListRowComp : public Component | |||||
| class TableListRowComp : public Component, | |||||
| public TooltipClient | |||||
| { | { | ||||
| public: | public: | ||||
| TableListRowComp (TableListBox& owner_) | TableListRowComp (TableListBox& owner_) | ||||
| @@ -242,6 +243,19 @@ public: | |||||
| owner.getModel()->cellDoubleClicked (row, columnId, e); | owner.getModel()->cellDoubleClicked (row, columnId, e); | ||||
| } | } | ||||
| const String getTooltip() | |||||
| { | |||||
| int x, y; | |||||
| getMouseXYRelative (x, y); | |||||
| const int columnId = owner.getHeader()->getColumnIdAtX (x); | |||||
| if (columnId != 0 && owner.getModel() != 0) | |||||
| return owner.getModel()->getCellTooltip (row, columnId); | |||||
| return String::empty; | |||||
| } | |||||
| juce_UseDebuggingNewOperator | juce_UseDebuggingNewOperator | ||||
| private: | private: | ||||
| @@ -144,6 +144,10 @@ public: | |||||
| */ | */ | ||||
| virtual int getColumnAutoSizeWidth (int columnId); | virtual int getColumnAutoSizeWidth (int columnId); | ||||
| /** Returns a tooltip for a particular cell in the table. | |||||
| */ | |||||
| virtual const String getCellTooltip (int rowNumber, int columnId); | |||||
| //============================================================================== | //============================================================================== | ||||
| /** Override this to be informed when rows are selected or deselected. | /** Override this to be informed when rows are selected or deselected. | ||||
| @@ -233,8 +233,8 @@ public: | |||||
| comp->setBounds (pos); | comp->setBounds (pos); | ||||
| } | } | ||||
| } | } | ||||
| if ((! keep) | |||||
| if ((! keep) | |||||
| && Component::isMouseButtonDownAnywhere() | && Component::isMouseButtonDownAnywhere() | ||||
| && (comp == Component::getComponentUnderMouse() | && (comp == Component::getComponentUnderMouse() | ||||
| || comp->isParentOf (Component::getComponentUnderMouse()))) | || comp->isParentOf (Component::getComponentUnderMouse()))) | ||||
| @@ -225,7 +225,7 @@ void MenuBarComponent::showMenu (int index) | |||||
| if (m.lookAndFeel == 0) | if (m.lookAndFeel == 0) | ||||
| m.setLookAndFeel (&getLookAndFeel()); | m.setLookAndFeel (&getLookAndFeel()); | ||||
| currentPopup = m.createMenuComponent (x, getScreenY(), | currentPopup = m.createMenuComponent (x, getScreenY(), | ||||
| w, getHeight(), | w, getHeight(), | ||||
| 0, w, 0, 0, | 0, w, 0, 0, | ||||
| @@ -33,7 +33,7 @@ | |||||
| #if JUCE_WINDOWS | #if JUCE_WINDOWS | ||||
| #include "juce_ActiveXControlComponent.h" | #include "juce_ActiveXControlComponent.h" | ||||
| typedef ActiveXControlComponent QTCompBaseClass; | typedef ActiveXControlComponent QTCompBaseClass; | ||||
| #else | |||||
| #elif JUCE_MAC | |||||
| #include "juce_NSViewComponent.h" | #include "juce_NSViewComponent.h" | ||||
| typedef NSViewComponent QTCompBaseClass; | typedef NSViewComponent QTCompBaseClass; | ||||
| #endif | #endif | ||||
| @@ -32,7 +32,7 @@ | |||||
| //============================================================================== | //============================================================================== | ||||
| /** | /** | ||||
| An glyph from a particular font, with a particular size, style, | |||||
| A glyph from a particular font, with a particular size, style, | |||||
| typeface and position. | typeface and position. | ||||
| @see GlyphArrangement, Font | @see GlyphArrangement, Font | ||||
| @@ -28,39 +28,44 @@ | |||||
| #if JUCE_INCLUDED_FILE | #if JUCE_INCLUDED_FILE | ||||
| class IPhoneAudioIODevice : public AudioIODeviceType | |||||
| //================================================================================================== | |||||
| class IPhoneAudioIODevice : public AudioIODevice | |||||
| { | { | ||||
| public: | public: | ||||
| //============================================================================== | //============================================================================== | ||||
| IPhoneAudioIODevice (const String& deviceName, const bool isInput_) | |||||
| IPhoneAudioIODevice (const String& deviceName) | |||||
| : AudioIODevice (deviceName, T("Audio")), | : AudioIODevice (deviceName, T("Audio")), | ||||
| isInput (isInput_), | |||||
| isOpen_ (false), | |||||
| isStarted (false) | |||||
| audioUnit (0), | |||||
| isRunning (false), | |||||
| callback (0), | |||||
| actualBufferSize (0), | |||||
| floatData (1, 2) | |||||
| { | { | ||||
| numInputChannels = 2; | |||||
| numOutputChannels = 2; | |||||
| preferredBufferSize = 0; | |||||
| AudioSessionInitialize (0, 0, interruptionListenerStatic, this); | |||||
| updateDeviceInfo(); | |||||
| } | } | ||||
| ~IPhoneAudioIODeviceType() | |||||
| ~IPhoneAudioIODevice() | |||||
| { | { | ||||
| close(); | |||||
| } | } | ||||
| const StringArray getOutputChannelNames() | const StringArray getOutputChannelNames() | ||||
| { | { | ||||
| StringArray s; | StringArray s; | ||||
| if (! isInput) | |||||
| { | |||||
| s.add ("Left"); | |||||
| s.add ("Right"); | |||||
| } | |||||
| s.add ("Left"); | |||||
| s.add ("Right"); | |||||
| return s; | return s; | ||||
| } | } | ||||
| const StringArray getInputChannelNames() | const StringArray getInputChannelNames() | ||||
| { | { | ||||
| StringArray s; | StringArray s; | ||||
| if (isInput) | |||||
| if (audioInputIsAvailable) | |||||
| { | { | ||||
| s.add ("Left"); | s.add ("Left"); | ||||
| s.add ("Right"); | s.add ("Right"); | ||||
| @@ -70,137 +75,150 @@ public: | |||||
| int getNumSampleRates() | int getNumSampleRates() | ||||
| { | { | ||||
| return sampleRates.size(); | |||||
| return 1; | |||||
| } | } | ||||
| double getSampleRate (int index) | double getSampleRate (int index) | ||||
| { | { | ||||
| return sampleRates [index]; | |||||
| return sampleRate; | |||||
| } | } | ||||
| int getNumBufferSizesAvailable() | int getNumBufferSizesAvailable() | ||||
| { | { | ||||
| return bufferSizes.size(); | |||||
| return 1; | |||||
| } | } | ||||
| int getBufferSizeSamples (int index) | int getBufferSizeSamples (int index) | ||||
| { | { | ||||
| return bufferSizes [index]; | |||||
| return getDefaultBufferSize(); | |||||
| } | } | ||||
| int getDefaultBufferSize() | int getDefaultBufferSize() | ||||
| { | { | ||||
| for (int i = 0; i < getNumBufferSizesAvailable(); ++i) | |||||
| if (getBufferSizeSamples(i) >= 512) | |||||
| return getBufferSizeSamples(i); | |||||
| return 512; | |||||
| return 1024; | |||||
| } | } | ||||
| const String open (const BitArray& inputChannels, | const String open (const BitArray& inputChannels, | ||||
| const BitArray& outputChannels, | const BitArray& outputChannels, | ||||
| double sampleRate, | double sampleRate, | ||||
| int bufferSizeSamples) | |||||
| int bufferSize) | |||||
| { | { | ||||
| isOpen_ = true; | |||||
| if (bufferSizeSamples <= 0) | |||||
| bufferSizeSamples = getDefaultBufferSize(); | |||||
| close(); | |||||
| lastError = String::empty; | lastError = String::empty; | ||||
| preferredBufferSize = (bufferSize <= 0) ? getDefaultBufferSize() : bufferSize; | |||||
| // xxx set up channel mapping | |||||
| activeOutputChans = outputChannels; | |||||
| activeOutputChans.setRange (2, activeOutputChans.getHighestBit(), false); | |||||
| numOutputChannels = activeOutputChans.countNumberOfSetBits(); | |||||
| monoOutputChannelNumber = activeOutputChans.findNextSetBit (0); | |||||
| activeInputChans = inputChannels; | |||||
| activeInputChans.setRange (2, activeInputChans.getHighestBit(), false); | |||||
| numInputChannels = activeInputChans.countNumberOfSetBits(); | |||||
| monoInputChannelNumber = activeInputChans.findNextSetBit (0); | |||||
| AudioSessionSetActive (true); | |||||
| UInt32 audioCategory = kAudioSessionCategory_PlayAndRecord; | |||||
| AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (audioCategory), &audioCategory); | |||||
| AudioSessionAddPropertyListener (kAudioSessionProperty_AudioRouteChange, propertyChangedStatic, this); | |||||
| fixAudioRouteIfSetToReceiver(); | |||||
| updateDeviceInfo(); | |||||
| isOpen_ = lastError.isEmpty(); | |||||
| Float32 bufferDuration = preferredBufferSize / sampleRate; | |||||
| AudioSessionSetProperty (kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof (bufferDuration), &bufferDuration); | |||||
| actualBufferSize = preferredBufferSize; | |||||
| prepareFloatBuffers(); | |||||
| isRunning = true; | |||||
| propertyChanged (0, 0, 0); // creates and starts the AU | |||||
| lastError = audioUnit != 0 ? String::empty | |||||
| : T("Couldn't open the device"); | |||||
| return lastError; | return lastError; | ||||
| } | } | ||||
| void close() | void close() | ||||
| { | { | ||||
| isOpen_ = false; | |||||
| if (isRunning) | |||||
| { | |||||
| isRunning = false; | |||||
| AudioSessionSetActive (false); | |||||
| if (audioUnit != 0) | |||||
| { | |||||
| AudioComponentInstanceDispose (audioUnit); | |||||
| audioUnit = 0; | |||||
| } | |||||
| } | |||||
| } | } | ||||
| bool isOpen() | bool isOpen() | ||||
| { | { | ||||
| return isOpen_; | |||||
| return isRunning; | |||||
| } | } | ||||
| int getCurrentBufferSizeSamples() | int getCurrentBufferSizeSamples() | ||||
| { | { | ||||
| return internal != 0 ? internal->getBufferSize() : 512; | |||||
| return actualBufferSize; | |||||
| } | } | ||||
| double getCurrentSampleRate() | double getCurrentSampleRate() | ||||
| { | { | ||||
| return internal != 0 ? internal->getSampleRate() : 0; | |||||
| return sampleRate; | |||||
| } | } | ||||
| int getCurrentBitDepth() | int getCurrentBitDepth() | ||||
| { | { | ||||
| return 32; // no way to find out, so just assume it's high.. | |||||
| return 16; | |||||
| } | } | ||||
| const BitArray getActiveOutputChannels() const | const BitArray getActiveOutputChannels() const | ||||
| { | { | ||||
| return internal != 0 ? internal->activeOutputChans : BitArray(); | |||||
| return activeOutputChans; | |||||
| } | } | ||||
| const BitArray getActiveInputChannels() const | const BitArray getActiveInputChannels() const | ||||
| { | { | ||||
| BitArray chans; | |||||
| if (internal != 0) | |||||
| { | |||||
| chans = internal->activeInputChans; | |||||
| if (internal->inputDevice != 0) | |||||
| chans.orWith (internal->inputDevice->activeInputChans); | |||||
| } | |||||
| return chans; | |||||
| return activeInputChans; | |||||
| } | } | ||||
| int getOutputLatencyInSamples() | int getOutputLatencyInSamples() | ||||
| { | { | ||||
| if (internal == 0) | |||||
| return 0; | |||||
| // this seems like a good guess at getting the latency right - comparing | |||||
| // this with a round-trip measurement, it gets it to within a few millisecs | |||||
| // for the built-in mac soundcard | |||||
| return internal->outputLatency + internal->getBufferSize() * 2; | |||||
| return 0; //xxx | |||||
| } | } | ||||
| int getInputLatencyInSamples() | int getInputLatencyInSamples() | ||||
| { | { | ||||
| if (internal == 0) | |||||
| return 0; | |||||
| return internal->inputLatency + internal->getBufferSize() * 2; | |||||
| return 0; //xxx | |||||
| } | } | ||||
| void start (AudioIODeviceCallback* callback) | |||||
| void start (AudioIODeviceCallback* callback_) | |||||
| { | { | ||||
| if (internal != 0 && ! isStarted) | |||||
| if (isRunning && callback != callback_) | |||||
| { | { | ||||
| if (callback != 0) | |||||
| callback->audioDeviceAboutToStart (this); | |||||
| if (callback_ != 0) | |||||
| callback_->audioDeviceAboutToStart (this); | |||||
| isStarted = true; | |||||
| internal->start (callback); | |||||
| callbackLock.enter(); | |||||
| callback = callback_; | |||||
| callbackLock.exit(); | |||||
| } | } | ||||
| } | } | ||||
| void stop() | void stop() | ||||
| { | { | ||||
| if (isStarted && internal != 0) | |||||
| if (isRunning) | |||||
| { | { | ||||
| AudioIODeviceCallback* const lastCallback = internal->callback; | |||||
| isStarted = false; | |||||
| internal->stop (true); | |||||
| callbackLock.enter(); | |||||
| AudioIODeviceCallback* const lastCallback = callback; | |||||
| callback = 0; | |||||
| callbackLock.exit(); | |||||
| if (lastCallback != 0) | if (lastCallback != 0) | ||||
| lastCallback->audioDeviceStopped(); | lastCallback->audioDeviceStopped(); | ||||
| @@ -209,10 +227,7 @@ public: | |||||
| bool isPlaying() | bool isPlaying() | ||||
| { | { | ||||
| if (internal->callback == 0) | |||||
| isStarted = false; | |||||
| return isStarted; | |||||
| return isRunning && callback != 0; | |||||
| } | } | ||||
| const String getLastError() | const String getLastError() | ||||
| @@ -220,36 +235,290 @@ public: | |||||
| return lastError; | return lastError; | ||||
| } | } | ||||
| int inputIndex, outputIndex; | |||||
| juce_UseDebuggingNewOperator | |||||
| private: | private: | ||||
| CoreAudioInternal* internal; | |||||
| bool isOpen_, isStarted; | |||||
| //================================================================================================== | |||||
| CriticalSection callbackLock; | |||||
| Float64 sampleRate; | |||||
| int numInputChannels, numOutputChannels; | |||||
| int preferredBufferSize; | |||||
| int actualBufferSize; | |||||
| bool isRunning; | |||||
| String lastError; | String lastError; | ||||
| static OSStatus hardwareListenerProc (AudioDeviceID /*inDevice*/, UInt32 /*inLine*/, const AudioObjectPropertyAddress* pa, void* inClientData) | |||||
| AudioStreamBasicDescription format; | |||||
| AudioUnit audioUnit; | |||||
| UInt32 audioInputIsAvailable; | |||||
| AudioIODeviceCallback* callback; | |||||
| BitArray activeOutputChans, activeInputChans; | |||||
| AudioSampleBuffer floatData; | |||||
| float* inputChannels[3]; | |||||
| float* outputChannels[3]; | |||||
| bool monoInputChannelNumber, monoOutputChannelNumber; | |||||
| void prepareFloatBuffers() | |||||
| { | { | ||||
| CoreAudioInternal* const intern = (CoreAudioInternal*) inClientData; | |||||
| floatData.setSize (numInputChannels + numOutputChannels, actualBufferSize); | |||||
| zerostruct (inputChannels); | |||||
| zerostruct (outputChannels); | |||||
| for (int i = 0; i < numInputChannels; ++i) | |||||
| inputChannels[i] = floatData.getSampleData (i); | |||||
| for (int i = 0; i < numOutputChannels; ++i) | |||||
| outputChannels[i] = floatData.getSampleData (i + numInputChannels); | |||||
| } | |||||
| //================================================================================================== | |||||
| OSStatus process (AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, | |||||
| UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData) throw() | |||||
| { | |||||
| OSStatus err = noErr; | |||||
| if (audioInputIsAvailable) | |||||
| err = AudioUnitRender (audioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData); | |||||
| const ScopedLock sl (callbackLock); | |||||
| switch (pa->mSelector) | |||||
| if (callback != 0) | |||||
| { | { | ||||
| case kAudioHardwarePropertyDevices: | |||||
| intern->deviceDetailsChanged(); | |||||
| break; | |||||
| case kAudioHardwarePropertyDefaultOutputDevice: | |||||
| case kAudioHardwarePropertyDefaultInputDevice: | |||||
| case kAudioHardwarePropertyDefaultSystemOutputDevice: | |||||
| break; | |||||
| if (audioInputIsAvailable && numInputChannels > 0) | |||||
| { | |||||
| short* shortData = (short*) ioData->mBuffers[0].mData; | |||||
| if (numInputChannels >= 2) | |||||
| { | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f); | |||||
| inputChannels[1][i] = *shortData++ * (1.0f / 32768.0f); | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| if (monoInputChannelNumber > 0) | |||||
| ++shortData; | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| inputChannels[0][i] = *shortData++ * (1.0f / 32768.0f); | |||||
| ++shortData; | |||||
| } | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| zeromem (inputChannels[0], sizeof (float) * inNumberFrames); | |||||
| zeromem (inputChannels[1], sizeof (float) * inNumberFrames); | |||||
| } | |||||
| callback->audioDeviceIOCallback ((const float**) inputChannels, numInputChannels, | |||||
| outputChannels, numOutputChannels, | |||||
| (int) inNumberFrames); | |||||
| short* shortData = (short*) ioData->mBuffers[0].mData; | |||||
| int n = 0; | |||||
| if (numOutputChannels >= 2) | |||||
| { | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| shortData [n++] = (short) (outputChannels[0][i] * 32767.0f); | |||||
| shortData [n++] = (short) (outputChannels[1][i] * 32767.0f); | |||||
| } | |||||
| } | |||||
| else if (numOutputChannels == 1) | |||||
| { | |||||
| for (UInt32 i = 0; i < inNumberFrames; ++i) | |||||
| { | |||||
| const short s = (short) (outputChannels[monoOutputChannelNumber][i] * 32767.0f); | |||||
| shortData [n++] = s; | |||||
| shortData [n++] = s; | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| zeromem (ioData->mBuffers[0].mData, 2 * sizeof (short) * inNumberFrames); | |||||
| } | |||||
| } | |||||
| else | |||||
| { | |||||
| zeromem (ioData->mBuffers[0].mData, 2 * sizeof (short) * inNumberFrames); | |||||
| } | } | ||||
| return noErr; | |||||
| return err; | |||||
| } | } | ||||
| CoreAudioIODevice (const CoreAudioIODevice&); | |||||
| const CoreAudioIODevice& operator= (const CoreAudioIODevice&); | |||||
| void updateDeviceInfo() throw() | |||||
| { | |||||
| UInt32 size = sizeof (sampleRate); | |||||
| AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate); | |||||
| size = sizeof (audioInputIsAvailable); | |||||
| AudioSessionGetProperty (kAudioSessionProperty_AudioInputAvailable, &size, &audioInputIsAvailable); | |||||
| } | |||||
| void propertyChanged (AudioSessionPropertyID inID, UInt32 inDataSize, const void* inPropertyValue) | |||||
| { | |||||
| if (! isRunning) | |||||
| return; | |||||
| if (inPropertyValue != 0) | |||||
| { | |||||
| CFDictionaryRef routeChangeDictionary = (CFDictionaryRef) inPropertyValue; | |||||
| CFNumberRef routeChangeReasonRef = (CFNumberRef) CFDictionaryGetValue (routeChangeDictionary, | |||||
| CFSTR (kAudioSession_AudioRouteChangeKey_Reason)); | |||||
| SInt32 routeChangeReason; | |||||
| CFNumberGetValue (routeChangeReasonRef, kCFNumberSInt32Type, &routeChangeReason); | |||||
| if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable) | |||||
| fixAudioRouteIfSetToReceiver(); | |||||
| } | |||||
| updateDeviceInfo(); | |||||
| createAudioUnit(); | |||||
| AudioSessionSetActive (true); | |||||
| if (audioUnit != 0) | |||||
| { | |||||
| UInt32 formatSize = sizeof (format); | |||||
| AudioUnitGetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, &formatSize); | |||||
| Float32 bufferDuration = preferredBufferSize / sampleRate; | |||||
| UInt32 bufferDurationSize = sizeof (bufferDuration); | |||||
| AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareIOBufferDuration, &bufferDurationSize, &bufferDurationSize); | |||||
| actualBufferSize = (int) (sampleRate * bufferDuration + 0.5); | |||||
| AudioOutputUnitStart (audioUnit); | |||||
| } | |||||
| } | |||||
| void interruptionListener (UInt32 inInterruption) | |||||
| { | |||||
| if (inInterruption == kAudioSessionBeginInterruption) | |||||
| { | |||||
| isRunning = false; | |||||
| AudioOutputUnitStop (audioUnit); | |||||
| if (juce_iPhoneShowModalAlert ("Audio Interrupted", | |||||
| "This could have been interrupted by another application or by unplugging a headset", | |||||
| @"Resume", | |||||
| @"Cancel")) | |||||
| { | |||||
| isRunning = true; | |||||
| propertyChanged (0, 0, 0); | |||||
| } | |||||
| } | |||||
| if (inInterruption == kAudioSessionEndInterruption) | |||||
| { | |||||
| isRunning = true; | |||||
| AudioSessionSetActive (true); | |||||
| AudioOutputUnitStart (audioUnit); | |||||
| } | |||||
| } | |||||
| //================================================================================================== | |||||
| static OSStatus processStatic (void* inRefCon, AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, | |||||
| UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData) | |||||
| { | |||||
| return ((IPhoneAudioIODevice*) inRefCon)->process (ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData); | |||||
| } | |||||
| static void propertyChangedStatic (void* inClientData, AudioSessionPropertyID inID, UInt32 inDataSize, const void* inPropertyValue) | |||||
| { | |||||
| ((IPhoneAudioIODevice*) inClientData)->propertyChanged (inID, inDataSize, inPropertyValue); | |||||
| } | |||||
| static void interruptionListenerStatic (void* inClientData, UInt32 inInterruption) | |||||
| { | |||||
| ((IPhoneAudioIODevice*) inClientData)->interruptionListener (inInterruption); | |||||
| } | |||||
| //================================================================================================== | |||||
| void resetFormat (const int numChannels) | |||||
| { | |||||
| memset (&format, 0, sizeof (format)); | |||||
| format.mFormatID = kAudioFormatLinearPCM; | |||||
| format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; | |||||
| format.mBitsPerChannel = 8 * sizeof (short); | |||||
| format.mChannelsPerFrame = 2; | |||||
| format.mFramesPerPacket = 1; | |||||
| format.mBytesPerFrame = format.mBytesPerPacket = 2 * sizeof (short); | |||||
| } | |||||
| bool createAudioUnit() | |||||
| { | |||||
| if (audioUnit != 0) | |||||
| { | |||||
| AudioComponentInstanceDispose (audioUnit); | |||||
| audioUnit = 0; | |||||
| } | |||||
| resetFormat (2); | |||||
| AudioComponentDescription desc; | |||||
| desc.componentType = kAudioUnitType_Output; | |||||
| desc.componentSubType = kAudioUnitSubType_RemoteIO; | |||||
| desc.componentManufacturer = kAudioUnitManufacturer_Apple; | |||||
| desc.componentFlags = 0; | |||||
| desc.componentFlagsMask = 0; | |||||
| AudioComponent comp = AudioComponentFindNext (0, &desc); | |||||
| AudioComponentInstanceNew (comp, &audioUnit); | |||||
| if (audioUnit == 0) | |||||
| return false; | |||||
| const UInt32 one = 1; | |||||
| AudioUnitSetProperty (audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof (one)); | |||||
| AudioChannelLayout layout; | |||||
| layout.mChannelBitmap = 0; | |||||
| layout.mNumberChannelDescriptions = 0; | |||||
| layout.mChannelLayoutTag = kAudioChannelLayoutTag_StereoHeadphones; | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof (layout)); | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Output, 0, &layout, sizeof (layout)); | |||||
| AURenderCallbackStruct inputProc; | |||||
| inputProc.inputProc = processStatic; | |||||
| inputProc.inputProcRefCon = this; | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &inputProc, sizeof (inputProc)); | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &format, sizeof (format)); | |||||
| AudioUnitSetProperty (audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &format, sizeof (format)); | |||||
| AudioUnitInitialize (audioUnit); | |||||
| return true; | |||||
| } | |||||
| // If the routing is set to go through the receiver (i.e. the speaker, but quiet), this re-routes it | |||||
| // to make it loud. Needed because by default when using an input + output, the output is kept quiet. | |||||
| static void fixAudioRouteIfSetToReceiver() throw() | |||||
| { | |||||
| CFStringRef audioRoute = 0; | |||||
| UInt32 propertySize = sizeof (audioRoute); | |||||
| if (AudioSessionGetProperty (kAudioSessionProperty_AudioRoute, &propertySize, &audioRoute) == noErr) | |||||
| { | |||||
| NSString* route = (NSString*) audioRoute; | |||||
| //printf ("audio route: %s\n", [route cString]); | |||||
| if ([route hasPrefix: @"Receiver"]) | |||||
| { | |||||
| UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker; | |||||
| AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute, sizeof (audioRouteOverride), &audioRouteOverride); | |||||
| } | |||||
| CFRelease (audioRoute); | |||||
| } | |||||
| } | |||||
| IPhoneAudioIODevice (const IPhoneAudioIODevice&); | |||||
| const IPhoneAudioIODevice& operator= (const IPhoneAudioIODevice&); | |||||
| }; | }; | ||||
| @@ -259,8 +528,7 @@ class IPhoneAudioIODeviceType : public AudioIODeviceType | |||||
| public: | public: | ||||
| //============================================================================== | //============================================================================== | ||||
| IPhoneAudioIODeviceType() | IPhoneAudioIODeviceType() | ||||
| : AudioIODeviceType (T("iPhone Audio")), | |||||
| hasScanned (false) | |||||
| : AudioIODeviceType (T("iPhone Audio")) | |||||
| { | { | ||||
| } | } | ||||
| @@ -276,6 +544,7 @@ public: | |||||
| const StringArray getDeviceNames (const bool wantInputNames) const | const StringArray getDeviceNames (const bool wantInputNames) const | ||||
| { | { | ||||
| StringArray s; | StringArray s; | ||||
| s.add (wantInputNames ? "Microphone" : "Speaker"); | |||||
| return s; | return s; | ||||
| } | } | ||||
| @@ -286,20 +555,19 @@ public: | |||||
| int getIndexOfDevice (AudioIODevice* device, const bool asInput) const | int getIndexOfDevice (AudioIODevice* device, const bool asInput) const | ||||
| { | { | ||||
| return 0; | |||||
| return device != 0 ? 0 : -1; | |||||
| } | } | ||||
| bool hasSeparateInputsAndOutputs() const { return true; } | |||||
| bool hasSeparateInputsAndOutputs() const { return false; } | |||||
| AudioIODevice* createDevice (const String& outputDeviceName, | AudioIODevice* createDevice (const String& outputDeviceName, | ||||
| const String& inputDeviceName) | const String& inputDeviceName) | ||||
| { | { | ||||
| if (outputDeviceName.isNotEmpty() && inputDeviceName.isNotEmpty()) | |||||
| return new CoreAudioIODevice (deviceName, | |||||
| inputIds [inputIndex], | |||||
| inputIndex, | |||||
| outputIds [outputIndex], | |||||
| outputIndex); | |||||
| if (outputDeviceName.isNotEmpty() || inputDeviceName.isNotEmpty()) | |||||
| { | |||||
| return new IPhoneAudioIODevice (outputDeviceName.isNotEmpty() ? outputDeviceName | |||||
| : inputDeviceName); | |||||
| } | |||||
| return 0; | return 0; | ||||
| } | } | ||||
| @@ -95,34 +95,77 @@ void PlatformUtilities::addItemToDock (const File& file) | |||||
| //============================================================================== | //============================================================================== | ||||
| #if ! JUCE_ONLY_BUILD_CORE_LIBRARY | #if ! JUCE_ONLY_BUILD_CORE_LIBRARY | ||||
| bool AlertWindow::showNativeDialogBox (const String& title, | |||||
| const String& bodyText, | |||||
| bool isOkCancel) | |||||
| END_JUCE_NAMESPACE | |||||
| @interface JuceAlertBoxDelegate : NSObject | |||||
| { | |||||
| @public | |||||
| bool clickedOk; | |||||
| } | |||||
| - (void) alertView: (UIAlertView*) alertView clickedButtonAtIndex: (NSInteger) buttonIndex; | |||||
| @end | |||||
| @implementation JuceAlertBoxDelegate | |||||
| - (void) alertView: (UIAlertView*) alertView clickedButtonAtIndex: (NSInteger) buttonIndex | |||||
| { | |||||
| clickedOk = (buttonIndex == 0); | |||||
| alertView.hidden = true; | |||||
| } | |||||
| @end | |||||
| BEGIN_JUCE_NAMESPACE | |||||
| // (This function is used directly by other bits of code) | |||||
| bool juce_iPhoneShowModalAlert (const String& title, | |||||
| const String& bodyText, | |||||
| NSString* okButtonText, | |||||
| NSString* cancelButtonText) | |||||
| { | { | ||||
| const ScopedAutoReleasePool pool; | const ScopedAutoReleasePool pool; | ||||
| UIAlertView *alert = [[[UIAlertView alloc] initWithTitle: juceStringToNS (title) | |||||
| message: juceStringToNS (title) | |||||
| delegate: nil | |||||
| cancelButtonTitle: @"OK" | |||||
| otherButtonTitles: (isOkCancel ? @"Cancel" : nil), nil] autorelease]; | |||||
| alert.cancelButtonIndex = alert.firstOtherButtonIndex; | |||||
| JuceAlertBoxDelegate* callback = [[JuceAlertBoxDelegate alloc] init]; | |||||
| UIAlertView* alert = [[UIAlertView alloc] initWithTitle: juceStringToNS (title) | |||||
| message: juceStringToNS (bodyText) | |||||
| delegate: callback | |||||
| cancelButtonTitle: okButtonText | |||||
| otherButtonTitles: cancelButtonText, nil]; | |||||
| [alert retain]; | |||||
| [alert show]; | [alert show]; | ||||
| // xxx need to use a delegate to find which button was clicked | |||||
| return false; | |||||
| while (! alert.hidden && alert.superview != nil) | |||||
| [[NSRunLoop mainRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.01]]; | |||||
| const bool result = callback->clickedOk; | |||||
| [alert release]; | |||||
| [callback release]; | |||||
| return result; | |||||
| } | |||||
| bool AlertWindow::showNativeDialogBox (const String& title, | |||||
| const String& bodyText, | |||||
| bool isOkCancel) | |||||
| { | |||||
| return juce_iPhoneShowModalAlert (title, bodyText, | |||||
| @"OK", | |||||
| isOkCancel ? @"Cancel" : nil); | |||||
| } | } | ||||
| //============================================================================== | //============================================================================== | ||||
| bool DragAndDropContainer::performExternalDragDropOfFiles (const StringArray& files, const bool canMoveFiles) | bool DragAndDropContainer::performExternalDragDropOfFiles (const StringArray& files, const bool canMoveFiles) | ||||
| { | { | ||||
| jassertfalse // not implemented! | |||||
| jassertfalse // no such thing on the iphone! | |||||
| return false; | return false; | ||||
| } | } | ||||
| bool DragAndDropContainer::performExternalDragDropOfText (const String& text) | bool DragAndDropContainer::performExternalDragDropOfText (const String& text) | ||||
| { | { | ||||
| jassertfalse // not implemented! | |||||
| jassertfalse // no such thing on the iphone! | |||||
| return false; | return false; | ||||
| } | } | ||||
| @@ -74,8 +74,6 @@ | |||||
| #if MACOS_10_4_OR_EARLIER | #if MACOS_10_4_OR_EARLIER | ||||
| #include <GLUT/glut.h> | #include <GLUT/glut.h> | ||||
| typedef int NSInteger; | |||||
| typedef unsigned int NSUInteger; | |||||
| #endif | #endif | ||||
| #endif // __JUCE_MAC_NATIVEINCLUDES_JUCEHEADER__ | #endif // __JUCE_MAC_NATIVEINCLUDES_JUCEHEADER__ | ||||
| @@ -1816,11 +1816,12 @@ public: | |||||
| AudioIODevice* createDevice (const String& outputDeviceName, | AudioIODevice* createDevice (const String& outputDeviceName, | ||||
| const String& inputDeviceName) | const String& inputDeviceName) | ||||
| { | { | ||||
| // ASIO can't open two different devices for input and output - they must be the same one. | |||||
| jassert (inputDeviceName == outputDeviceName || outputDeviceName.isEmpty() || inputDeviceName.isEmpty()); | jassert (inputDeviceName == outputDeviceName || outputDeviceName.isEmpty() || inputDeviceName.isEmpty()); | ||||
| (void) inputDeviceName; | |||||
| jassert (hasScanned); // need to call scanForDevices() before doing this | jassert (hasScanned); // need to call scanForDevices() before doing this | ||||
| const int index = deviceNames.indexOf (outputDeviceName); | |||||
| const int index = deviceNames.indexOf (outputDeviceName.isNotEmpty() ? outputDeviceName | |||||
| : inputDeviceName); | |||||
| if (index >= 0) | if (index >= 0) | ||||
| { | { | ||||