The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1861 lines
80KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. #include "../../juce_core/system/juce_TargetPlatform.h"
  20. #include "../utility/juce_CheckSettingMacros.h"
  21. #if JucePlugin_Build_AUv3
  22. #import <CoreAudioKit/CoreAudioKit.h>
  23. #import <AudioToolbox/AudioToolbox.h>
  24. #import <AVFoundation/AVFoundation.h>
  25. #if JUCE_MAC
  26. #if (! defined MAC_OS_X_VERSION_MIN_REQUIRED) || (! defined MAC_OS_X_VERSION_10_11) || (MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_11)
  27. #error AUv3 needs Deployment Target OS X 10.11 or higher to compile
  28. #endif
  29. #if (defined MAC_OS_X_VERSION_10_13) && (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_13)
  30. #define JUCE_AUV3_MIDI_OUTPUT_SUPPORTED 1
  31. #define JUCE_AUV3_VIEW_CONFIG_SUPPORTED 1
  32. #endif
  33. #endif
  34. #if JUCE_IOS
  35. #if (! defined __IPHONE_OS_VERSION_MIN_REQUIRED) || (! defined __IPHONE_9_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_9_0)
  36. #error AUv3 needs Deployment Target iOS 9.0 or higher to compile
  37. #endif
  38. #if (defined __IPHONE_11_0) && (__IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_11_0)
  39. #define JUCE_AUV3_MIDI_OUTPUT_SUPPORTED 1
  40. #define JUCE_AUV3_VIEW_CONFIG_SUPPORTED 1
  41. #endif
  42. #endif
  43. #ifndef __OBJC2__
  44. #error AUv3 needs Objective-C 2 support (compile with 64-bit)
  45. #endif
  46. #define JUCE_CORE_INCLUDE_OBJC_HELPERS 1
  47. #include "../utility/juce_IncludeSystemHeaders.h"
  48. #include "../utility/juce_IncludeModuleHeaders.h"
  49. #include "../../juce_graphics/native/juce_mac_CoreGraphicsHelpers.h"
  50. #include "../../juce_audio_basics/native/juce_mac_CoreAudioLayouts.h"
  51. #include "../../juce_audio_processors/format_types/juce_LegacyAudioParameter.cpp"
  52. #include "../../juce_audio_processors/format_types/juce_AU_Shared.h"
  53. #define JUCE_VIEWCONTROLLER_OBJC_NAME(x) JUCE_JOIN_MACRO (x, FactoryAUv3)
  54. #if JUCE_IOS
  55. #define JUCE_IOS_MAC_VIEW UIView
  56. #else
  57. #define JUCE_IOS_MAC_VIEW NSView
  58. #endif
  59. #define JUCE_AUDIOUNIT_OBJC_NAME(x) JUCE_JOIN_MACRO (x, AUv3)
  60. #pragma clang diagnostic push
  61. #pragma clang diagnostic ignored "-Wnullability-completeness"
  62. using namespace juce;
  63. struct AudioProcessorHolder : public ReferenceCountedObject
  64. {
  65. AudioProcessorHolder() {}
  66. AudioProcessorHolder (AudioProcessor* p) : processor (p) {}
  67. AudioProcessor& operator*() noexcept { return *processor; }
  68. AudioProcessor* operator->() noexcept { return processor.get(); }
  69. AudioProcessor* get() noexcept { return processor.get(); }
  70. struct ViewConfig
  71. {
  72. double width;
  73. double height;
  74. bool hostHasMIDIController;
  75. };
  76. std::unique_ptr<ViewConfig> viewConfiguration;
  77. using Ptr = ReferenceCountedObjectPtr<AudioProcessorHolder>;
  78. private:
  79. std::unique_ptr<AudioProcessor> processor;
  80. AudioProcessorHolder& operator= (AudioProcessor*) = delete;
  81. AudioProcessorHolder (AudioProcessorHolder&) = delete;
  82. AudioProcessorHolder& operator= (AudioProcessorHolder&) = delete;
  83. };
  84. //==============================================================================
  85. class JuceAudioUnitv3Base
  86. {
  87. public:
  88. JuceAudioUnitv3Base (const AudioComponentDescription& descr,
  89. AudioComponentInstantiationOptions options,
  90. NSError** error)
  91. #pragma clang diagnostic push
  92. #pragma clang diagnostic ignored "-Wobjc-method-access"
  93. : au ([audioUnitObjCClass.createInstance() initWithComponentDescription: descr
  94. options: options
  95. error: error
  96. juceClass: this])
  97. #pragma clang diagnostic pop
  98. {}
  99. JuceAudioUnitv3Base (AUAudioUnit* audioUnit) : au (audioUnit)
  100. {
  101. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  102. initialiseJuce_GUI();
  103. }
  104. virtual ~JuceAudioUnitv3Base() {}
  105. //==============================================================================
  106. AUAudioUnit* getAudioUnit() noexcept { return au; }
  107. //==============================================================================
  108. virtual void reset() = 0;
  109. //==============================================================================
  110. virtual AUAudioUnitPreset* getCurrentPreset() = 0;
  111. virtual void setCurrentPreset(AUAudioUnitPreset*) = 0;
  112. virtual NSArray<AUAudioUnitPreset*>* getFactoryPresets() = 0;
  113. virtual NSDictionary<NSString*, id>* getFullState()
  114. {
  115. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  116. return ObjCMsgSendSuper<NSDictionary<NSString*, id>*> (&s, @selector (fullState));
  117. }
  118. virtual void setFullState (NSDictionary<NSString*, id>* state)
  119. {
  120. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  121. ObjCMsgSendSuper<void, NSDictionary<NSString*, id>*> (&s, @selector (setFullState:), state);
  122. }
  123. virtual AUParameterTree* getParameterTree() = 0;
  124. virtual NSArray<NSNumber*>* parametersForOverviewWithCount (int) = 0;
  125. //==============================================================================
  126. virtual NSTimeInterval getLatency() = 0;
  127. virtual NSTimeInterval getTailTime() = 0;
  128. //==============================================================================
  129. virtual AUAudioUnitBusArray* getInputBusses() = 0;
  130. virtual AUAudioUnitBusArray* getOutputBusses() = 0;
  131. virtual NSArray<NSNumber*>* getChannelCapabilities() = 0;
  132. virtual bool shouldChangeToFormat (AVAudioFormat*, AUAudioUnitBus*) = 0;
  133. //==============================================================================
  134. virtual int getVirtualMIDICableCount() = 0;
  135. virtual bool getSupportsMPE() = 0;
  136. virtual NSArray<NSString*>* getMIDIOutputNames() = 0;
  137. //==============================================================================
  138. virtual AUInternalRenderBlock getInternalRenderBlock() = 0;
  139. virtual bool getCanProcessInPlace() { return false; }
  140. virtual bool getRenderingOffline() = 0;
  141. virtual void setRenderingOffline (bool offline) = 0;
  142. virtual bool getShouldBypassEffect()
  143. {
  144. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  145. return (ObjCMsgSendSuper<BOOL> (&s, @selector (shouldBypassEffect)) == YES);
  146. }
  147. virtual void setShouldBypassEffect (bool shouldBypass)
  148. {
  149. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  150. ObjCMsgSendSuper<void, BOOL> (&s, @selector (setShouldBypassEffect:), shouldBypass ? YES : NO);
  151. }
  152. //==============================================================================
  153. virtual NSString* getContextName() const = 0;
  154. virtual void setContextName (NSString*) = 0;
  155. virtual bool allocateRenderResourcesAndReturnError (NSError **outError)
  156. {
  157. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  158. return (ObjCMsgSendSuper<BOOL, NSError**> (&s, @selector (allocateRenderResourcesAndReturnError:), outError) == YES);
  159. }
  160. virtual void deallocateRenderResources()
  161. {
  162. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  163. ObjCMsgSendSuper<void> (&s, @selector (deallocateRenderResources));
  164. }
  165. //==============================================================================
  166. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  167. virtual NSIndexSet* getSupportedViewConfigurations (NSArray<AUAudioUnitViewConfiguration*>*) = 0;
  168. virtual void selectViewConfiguration (AUAudioUnitViewConfiguration*) = 0;
  169. #endif
  170. private:
  171. struct Class : public ObjCClass<AUAudioUnit>
  172. {
  173. Class() : ObjCClass<AUAudioUnit> ("AUAudioUnit_")
  174. {
  175. addIvar<JuceAudioUnitv3Base*> ("cppObject");
  176. #pragma clang diagnostic push
  177. #pragma clang diagnostic ignored "-Wundeclared-selector"
  178. addMethod (@selector (initWithComponentDescription:options:error:juceClass:),
  179. initWithComponentDescriptionAndJuceClass, "@@:",
  180. @encode (AudioComponentDescription),
  181. @encode (AudioComponentInstantiationOptions), "^@@");
  182. #pragma clang diagnostic pop
  183. addMethod (@selector (initWithComponentDescription:options:error:),
  184. initWithComponentDescription, "@@:",
  185. @encode (AudioComponentDescription),
  186. @encode (AudioComponentInstantiationOptions), "^@");
  187. addMethod (@selector (dealloc), dealloc, "v@:");
  188. //==============================================================================
  189. addMethod (@selector (reset), reset, "v@:");
  190. //==============================================================================
  191. addMethod (@selector (currentPreset), getCurrentPreset, "@@:");
  192. addMethod (@selector (setCurrentPreset:), setCurrentPreset, "v@:@");
  193. addMethod (@selector (factoryPresets), getFactoryPresets, "@@:");
  194. addMethod (@selector (fullState), getFullState, "@@:");
  195. addMethod (@selector (setFullState:), setFullState, "v@:@");
  196. addMethod (@selector (parameterTree), getParameterTree, "@@:");
  197. addMethod (@selector (parametersForOverviewWithCount:), parametersForOverviewWithCount, "@@:", @encode (NSInteger));
  198. //==============================================================================
  199. addMethod (@selector (latency), getLatency, @encode (NSTimeInterval), "@:");
  200. addMethod (@selector (tailTime), getTailTime, @encode (NSTimeInterval), "@:");
  201. //==============================================================================
  202. addMethod (@selector (inputBusses), getInputBusses, "@@:");
  203. addMethod (@selector (outputBusses), getOutputBusses, "@@:");
  204. addMethod (@selector (channelCapabilities), getChannelCapabilities, "@@:");
  205. addMethod (@selector (shouldChangeToFormat:forBus:), shouldChangeToFormat, "B@:@@");
  206. //==============================================================================
  207. addMethod (@selector (virtualMIDICableCount), getVirtualMIDICableCount, @encode (NSInteger), "@:");
  208. addMethod (@selector (supportsMPE), getSupportsMPE, @encode (BOOL), "@:");
  209. #if JUCE_AUV3_MIDI_OUTPUT_SUPPORTED
  210. addMethod (@selector (MIDIOutputNames), getMIDIOutputNames, "@@:");
  211. #endif
  212. //==============================================================================
  213. addMethod (@selector (internalRenderBlock), getInternalRenderBlock, @encode (AUInternalRenderBlock), "@:");
  214. addMethod (@selector (canProcessInPlace), getCanProcessInPlace, @encode (BOOL), "@:");
  215. addMethod (@selector (isRenderingOffline), getRenderingOffline, @encode (BOOL), "@:");
  216. addMethod (@selector (setRenderingOffline:), setRenderingOffline, "v@:", @encode (BOOL));
  217. addMethod (@selector (shouldBypassEffect), getShouldBypassEffect, @encode (BOOL), "@:");
  218. addMethod (@selector (setShouldBypassEffect:), setShouldBypassEffect, "v@:", @encode (BOOL));
  219. addMethod (@selector (allocateRenderResourcesAndReturnError:), allocateRenderResourcesAndReturnError, "B@:^@");
  220. addMethod (@selector (deallocateRenderResources), deallocateRenderResources, "v@:");
  221. //==============================================================================
  222. addMethod (@selector (contextName), getContextName, "@@:");
  223. addMethod (@selector (setContextName:), setContextName, "v@:@");
  224. //==============================================================================
  225. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  226. addMethod (@selector (supportedViewConfigurations:), getSupportedViewConfigurations, "@@:@");
  227. addMethod (@selector (selectViewConfiguration:), selectViewConfiguration, "v@:@");
  228. #endif
  229. registerClass();
  230. }
  231. //==============================================================================
  232. static JuceAudioUnitv3Base* _this (id self) { return getIvar<JuceAudioUnitv3Base*> (self, "cppObject"); }
  233. static void setThis (id self, JuceAudioUnitv3Base* cpp) { object_setInstanceVariable (self, "cppObject", cpp); }
  234. //==============================================================================
  235. static id initWithComponentDescription (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  236. {
  237. AUAudioUnit* self = _self;
  238. objc_super s = { self, [AUAudioUnit class] };
  239. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  240. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  241. JuceAudioUnitv3Base* juceAU = JuceAudioUnitv3Base::create (self, descr, options, error);
  242. setThis (self, juceAU);
  243. return self;
  244. }
  245. static id initWithComponentDescriptionAndJuceClass (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error, JuceAudioUnitv3Base* juceAU)
  246. {
  247. AUAudioUnit* self = _self;
  248. objc_super s = { self, [AUAudioUnit class] };
  249. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  250. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  251. setThis (self, juceAU);
  252. return self;
  253. }
  254. static void dealloc (id self, SEL)
  255. {
  256. if (! MessageManager::getInstance()->isThisTheMessageThread())
  257. {
  258. WaitableEvent deletionEvent;
  259. struct AUDeleter : public CallbackMessage
  260. {
  261. AUDeleter (id selfToDelete, WaitableEvent& event)
  262. : parentSelf (selfToDelete), parentDeletionEvent (event)
  263. {
  264. }
  265. void messageCallback() override
  266. {
  267. delete _this (parentSelf);
  268. parentDeletionEvent.signal();
  269. }
  270. id parentSelf;
  271. WaitableEvent& parentDeletionEvent;
  272. };
  273. (new AUDeleter (self, deletionEvent))->post();
  274. deletionEvent.wait (-1);
  275. }
  276. else
  277. {
  278. delete _this (self);
  279. }
  280. }
  281. //==============================================================================
  282. static void reset (id self, SEL) { _this (self)->reset(); }
  283. //==============================================================================
  284. static AUAudioUnitPreset* getCurrentPreset (id self, SEL) { return _this (self)->getCurrentPreset(); }
  285. static void setCurrentPreset (id self, SEL, AUAudioUnitPreset* preset) { return _this (self)->setCurrentPreset (preset); }
  286. static NSArray<AUAudioUnitPreset*>* getFactoryPresets (id self, SEL) { return _this (self)->getFactoryPresets(); }
  287. static NSDictionary<NSString*, id>* getFullState (id self, SEL) { return _this (self)->getFullState(); }
  288. static void setFullState (id self, SEL, NSDictionary<NSString *, id>* state) { return _this (self)->setFullState (state); }
  289. static AUParameterTree* getParameterTree (id self, SEL) { return _this (self)->getParameterTree(); }
  290. static NSArray<NSNumber*>* parametersForOverviewWithCount (id self, SEL, NSInteger count) { return _this (self)->parametersForOverviewWithCount (static_cast<int> (count)); }
  291. //==============================================================================
  292. static NSTimeInterval getLatency (id self, SEL) { return _this (self)->getLatency(); }
  293. static NSTimeInterval getTailTime (id self, SEL) { return _this (self)->getTailTime(); }
  294. //==============================================================================
  295. static AUAudioUnitBusArray* getInputBusses (id self, SEL) { return _this (self)->getInputBusses(); }
  296. static AUAudioUnitBusArray* getOutputBusses (id self, SEL) { return _this (self)->getOutputBusses(); }
  297. static NSArray<NSNumber*>* getChannelCapabilities (id self, SEL) { return _this (self)->getChannelCapabilities(); }
  298. static BOOL shouldChangeToFormat (id self, SEL, AVAudioFormat* format, AUAudioUnitBus* bus) { return _this (self)->shouldChangeToFormat (format, bus) ? YES : NO; }
  299. //==============================================================================
  300. static NSInteger getVirtualMIDICableCount (id self, SEL) { return _this (self)->getVirtualMIDICableCount(); }
  301. static BOOL getSupportsMPE (id self, SEL) { return _this (self)->getSupportsMPE() ? YES : NO; }
  302. static NSArray<NSString*>* getMIDIOutputNames (id self, SEL) { return _this (self)->getMIDIOutputNames(); }
  303. //==============================================================================
  304. static AUInternalRenderBlock getInternalRenderBlock (id self, SEL) { return _this (self)->getInternalRenderBlock(); }
  305. static BOOL getCanProcessInPlace (id self, SEL) { return _this (self)->getCanProcessInPlace() ? YES : NO; }
  306. static BOOL getRenderingOffline (id self, SEL) { return _this (self)->getRenderingOffline() ? YES : NO; }
  307. static void setRenderingOffline (id self, SEL, BOOL renderingOffline) { _this (self)->setRenderingOffline (renderingOffline); }
  308. static BOOL allocateRenderResourcesAndReturnError (id self, SEL, NSError** error) { return _this (self)->allocateRenderResourcesAndReturnError (error) ? YES : NO; }
  309. static void deallocateRenderResources (id self, SEL) { _this (self)->deallocateRenderResources(); }
  310. static BOOL getShouldBypassEffect (id self, SEL) { return _this (self)->getShouldBypassEffect() ? YES : NO; }
  311. static void setShouldBypassEffect (id self, SEL, BOOL shouldBypass) { _this (self)->setShouldBypassEffect (shouldBypass); }
  312. //==============================================================================
  313. static NSString* getContextName (id self, SEL) { return _this (self)->getContextName(); }
  314. static void setContextName (id self, SEL, NSString* str) { return _this (self)->setContextName (str); }
  315. //==============================================================================
  316. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  317. static NSIndexSet* getSupportedViewConfigurations (id self, SEL, NSArray<AUAudioUnitViewConfiguration*>* configs) { return _this (self)->getSupportedViewConfigurations (configs); }
  318. static void selectViewConfiguration (id self, SEL, AUAudioUnitViewConfiguration* config) { _this (self)->selectViewConfiguration (config); }
  319. #endif
  320. };
  321. static JuceAudioUnitv3Base* create (AUAudioUnit*, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**);
  322. //==============================================================================
  323. static Class audioUnitObjCClass;
  324. protected:
  325. AUAudioUnit* au;
  326. };
  327. //==============================================================================
  328. JuceAudioUnitv3Base::Class JuceAudioUnitv3Base::audioUnitObjCClass;
  329. //==============================================================================
  330. //=========================== The actual AudioUnit =============================
  331. //==============================================================================
  332. class JuceAudioUnitv3 : public JuceAudioUnitv3Base,
  333. public AudioProcessorListener,
  334. public AudioPlayHead,
  335. private AudioProcessorParameter::Listener
  336. {
  337. public:
  338. JuceAudioUnitv3 (const AudioProcessorHolder::Ptr& processor,
  339. const AudioComponentDescription& descr,
  340. AudioComponentInstantiationOptions options,
  341. NSError** error)
  342. : JuceAudioUnitv3Base (descr, options, error),
  343. processorHolder (processor),
  344. mapper (*processorHolder->get())
  345. {
  346. init();
  347. }
  348. JuceAudioUnitv3 (AUAudioUnit* audioUnit, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**)
  349. : JuceAudioUnitv3Base (audioUnit),
  350. processorHolder (new AudioProcessorHolder (createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))),
  351. mapper (*processorHolder->get())
  352. {
  353. init();
  354. }
  355. ~JuceAudioUnitv3()
  356. {
  357. auto& processor = getAudioProcessor();
  358. processor.removeListener (this);
  359. if (bypassParam != nullptr)
  360. bypassParam->removeListener (this);
  361. removeEditor (processor);
  362. if (editorObserverToken != nullptr)
  363. {
  364. [paramTree.get() removeParameterObserver: editorObserverToken];
  365. editorObserverToken = nullptr;
  366. }
  367. }
  368. //==============================================================================
  369. void init()
  370. {
  371. inParameterChangedCallback = false;
  372. AudioProcessor& processor = getAudioProcessor();
  373. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  374. #ifdef JucePlugin_PreferredChannelConfigurations
  375. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  376. const int numConfigs = sizeof (configs) / sizeof (short[2]);
  377. jassert (numConfigs > 0 && (configs[0][0] > 0 || configs[0][1] > 0));
  378. processor.setPlayConfigDetails (configs[0][0], configs[0][1], kDefaultSampleRate, static_cast<int> (maxFrames));
  379. Array<AUChannelInfo> channelInfos;
  380. for (int i = 0; i < numConfigs; ++i)
  381. {
  382. AUChannelInfo channelInfo;
  383. channelInfo.inChannels = configs[i][0];
  384. channelInfo.outChannels = configs[i][1];
  385. channelInfos.add (channelInfo);
  386. }
  387. #else
  388. Array<AUChannelInfo> channelInfos = AudioUnitHelpers::getAUChannelInfo (processor);
  389. #endif
  390. processor.setPlayHead (this);
  391. totalInChannels = processor.getTotalNumInputChannels();
  392. totalOutChannels = processor.getTotalNumOutputChannels();
  393. {
  394. channelCapabilities.reset ([[NSMutableArray<NSNumber*> alloc] init]);
  395. for (int i = 0; i < channelInfos.size(); ++i)
  396. {
  397. AUChannelInfo& info = channelInfos.getReference (i);
  398. [channelCapabilities.get() addObject: [NSNumber numberWithInteger: info.inChannels]];
  399. [channelCapabilities.get() addObject: [NSNumber numberWithInteger: info.outChannels]];
  400. }
  401. }
  402. editorObserverToken = nullptr;
  403. internalRenderBlock = CreateObjCBlock (this, &JuceAudioUnitv3::renderCallback);
  404. processor.setRateAndBufferSizeDetails (kDefaultSampleRate, static_cast<int> (maxFrames));
  405. processor.prepareToPlay (kDefaultSampleRate, static_cast<int> (maxFrames));
  406. processor.addListener (this);
  407. addParameters();
  408. addPresets();
  409. addAudioUnitBusses (true);
  410. addAudioUnitBusses (false);
  411. }
  412. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  413. //==============================================================================
  414. void reset() override
  415. {
  416. midiMessages.clear();
  417. lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
  418. }
  419. //==============================================================================
  420. AUAudioUnitPreset* getCurrentPreset() override
  421. {
  422. const int n = static_cast<int> ([factoryPresets.get() count]);
  423. const int idx = static_cast<int> (getAudioProcessor().getCurrentProgram());
  424. if (idx < n)
  425. return [factoryPresets.get() objectAtIndex:static_cast<unsigned int> (idx)];
  426. return nullptr;
  427. }
  428. void setCurrentPreset(AUAudioUnitPreset* preset) override
  429. {
  430. const int n = static_cast<int> ([factoryPresets.get() count]);
  431. const int idx = static_cast<int> ([preset number]);
  432. if (isPositiveAndBelow (idx, n))
  433. getAudioProcessor().setCurrentProgram (idx);
  434. }
  435. NSArray<AUAudioUnitPreset*>* getFactoryPresets() override
  436. {
  437. return factoryPresets.get();
  438. }
  439. NSDictionary<NSString*, id>* getFullState() override
  440. {
  441. NSMutableDictionary<NSString*, id>* retval = [[NSMutableDictionary<NSString*, id> alloc] init];
  442. {
  443. NSDictionary<NSString*, id>* superRetval = JuceAudioUnitv3Base::getFullState();
  444. if (superRetval != nullptr)
  445. [retval addEntriesFromDictionary:superRetval];
  446. }
  447. juce::MemoryBlock state;
  448. getAudioProcessor().getCurrentProgramStateInformation (state);
  449. if (state.getSize() > 0)
  450. {
  451. NSData* ourState = [[NSData alloc] initWithBytes: state.getData()
  452. length: state.getSize()];
  453. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  454. [retval setObject: ourState
  455. forKey: nsKey];
  456. [nsKey release];
  457. [ourState release];
  458. }
  459. return [retval autorelease];
  460. }
  461. void setFullState (NSDictionary<NSString*, id>* state) override
  462. {
  463. if (state == nullptr)
  464. return;
  465. NSMutableDictionary<NSString*, id>* modifiedState = [[NSMutableDictionary<NSString*, id> alloc] init];
  466. [modifiedState addEntriesFromDictionary: state];
  467. NSString* nsPresetKey = [[NSString alloc] initWithUTF8String: kAUPresetDataKey];
  468. [modifiedState removeObjectForKey: nsPresetKey];
  469. [nsPresetKey release];
  470. JuceAudioUnitv3Base::setFullState (modifiedState);
  471. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  472. NSObject* obj = [modifiedState objectForKey: nsKey];
  473. [nsKey release];
  474. if (obj != nullptr)
  475. {
  476. if ([obj isKindOfClass:[NSData class]])
  477. {
  478. NSData* data = reinterpret_cast<NSData*> (obj);
  479. const int numBytes = static_cast<int> ([data length]);
  480. const juce::uint8* const rawBytes = reinterpret_cast< const juce::uint8* const> ([data bytes]);
  481. if (numBytes > 0)
  482. getAudioProcessor().setCurrentProgramStateInformation (rawBytes, numBytes);
  483. }
  484. }
  485. [modifiedState release];
  486. }
  487. AUParameterTree* getParameterTree() override
  488. {
  489. return paramTree.get();
  490. }
  491. NSArray<NSNumber*>* parametersForOverviewWithCount (int count) override
  492. {
  493. const int n = static_cast<int> ([overviewParams.get() count]);
  494. if (count >= n)
  495. return overviewParams.get();
  496. NSMutableArray<NSNumber*>* retval = [[NSMutableArray<NSNumber*>alloc] initWithArray: overviewParams.get()];
  497. [retval removeObjectsInRange: NSMakeRange (static_cast<unsigned int> (count), static_cast<unsigned int> (n - count))];
  498. return [retval autorelease];
  499. }
  500. //==============================================================================
  501. NSTimeInterval getLatency() override
  502. {
  503. auto& p = getAudioProcessor();
  504. return p.getLatencySamples() / p.getSampleRate();
  505. }
  506. NSTimeInterval getTailTime() override
  507. {
  508. return getAudioProcessor().getTailLengthSeconds();
  509. }
  510. //==============================================================================
  511. AUAudioUnitBusArray* getInputBusses() override { return inputBusses.get(); }
  512. AUAudioUnitBusArray* getOutputBusses() override { return outputBusses.get(); }
  513. NSArray<NSNumber*>* getChannelCapabilities() override { return channelCapabilities.get(); }
  514. bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* auBus) override
  515. {
  516. const bool isInput = ([auBus busType] == AUAudioUnitBusTypeInput);
  517. const int busIdx = static_cast<int> ([auBus index]);
  518. const int newNumChannels = static_cast<int> ([format channelCount]);
  519. AudioProcessor& processor = getAudioProcessor();
  520. if (AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx))
  521. {
  522. #ifdef JucePlugin_PreferredChannelConfigurations
  523. ignoreUnused (bus);
  524. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  525. if (! AudioUnitHelpers::isLayoutSupported (processor, isInput, busIdx, newNumChannels, configs))
  526. return false;
  527. #else
  528. const AVAudioChannelLayout* layout = [format channelLayout];
  529. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  530. if (layoutTag != 0)
  531. {
  532. AudioChannelSet newLayout = CoreAudioLayouts::fromCoreAudio (layoutTag);
  533. if (newLayout.size() != newNumChannels)
  534. return false;
  535. if (! bus->isLayoutSupported (newLayout))
  536. return false;
  537. }
  538. else
  539. {
  540. if (! bus->isNumberOfChannelsSupported (newNumChannels))
  541. return false;
  542. }
  543. #endif
  544. return true;
  545. }
  546. return false;
  547. }
  548. //==============================================================================
  549. int getVirtualMIDICableCount() override
  550. {
  551. #if JucePlugin_WantsMidiInput
  552. return 1;
  553. #else
  554. return 0;
  555. #endif
  556. }
  557. bool getSupportsMPE() override
  558. {
  559. return getAudioProcessor().supportsMPE();
  560. }
  561. NSArray<NSString*>* getMIDIOutputNames() override
  562. {
  563. #if JucePlugin_ProducesMidiOutput
  564. return @[@"MIDI Out"];
  565. #else
  566. return @[];
  567. #endif
  568. }
  569. //==============================================================================
  570. AUInternalRenderBlock getInternalRenderBlock() override { return internalRenderBlock; }
  571. bool getRenderingOffline() override { return getAudioProcessor().isNonRealtime(); }
  572. void setRenderingOffline (bool offline) override
  573. {
  574. auto& processor = getAudioProcessor();
  575. auto isCurrentlyNonRealtime = processor.isNonRealtime();
  576. if (isCurrentlyNonRealtime != offline)
  577. {
  578. ScopedLock callbackLock (processor.getCallbackLock());
  579. processor.setNonRealtime (offline);
  580. processor.prepareToPlay (processor.getSampleRate(), processor.getBlockSize());
  581. }
  582. }
  583. bool getShouldBypassEffect() override
  584. {
  585. if (bypassParam != nullptr)
  586. return (bypassParam->getValue() != 0.0f);
  587. return JuceAudioUnitv3Base::getShouldBypassEffect();
  588. }
  589. void setShouldBypassEffect (bool shouldBypass) override
  590. {
  591. if (bypassParam != nullptr)
  592. bypassParam->setValue (shouldBypass ? 1.0f : 0.0f);
  593. JuceAudioUnitv3Base::setShouldBypassEffect (shouldBypass);
  594. }
  595. //==============================================================================
  596. NSString* getContextName() const override { return juceStringToNS (contextName); }
  597. void setContextName (NSString* str) override
  598. {
  599. if (str != nullptr)
  600. {
  601. AudioProcessor::TrackProperties props;
  602. props.name = nsStringToJuce (str);
  603. getAudioProcessor().updateTrackProperties (props);
  604. }
  605. }
  606. //==============================================================================
  607. bool allocateRenderResourcesAndReturnError (NSError **outError) override
  608. {
  609. AudioProcessor& processor = getAudioProcessor();
  610. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  611. if (! JuceAudioUnitv3Base::allocateRenderResourcesAndReturnError (outError))
  612. return false;
  613. if (outError != nullptr)
  614. *outError = nullptr;
  615. AudioProcessor::BusesLayout layouts;
  616. for (int dir = 0; dir < 2; ++dir)
  617. {
  618. const bool isInput = (dir == 0);
  619. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  620. Array<AudioChannelSet>& channelSets = (isInput ? layouts.inputBuses : layouts.outputBuses);
  621. AUAudioUnitBusArray* auBuses = (isInput ? [getAudioUnit() inputBusses] : [getAudioUnit() outputBusses]);
  622. jassert ([auBuses count] == static_cast<NSUInteger> (n));
  623. for (int busIdx = 0; busIdx < n; ++busIdx)
  624. {
  625. AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx);
  626. AVAudioFormat* format = [[auBuses objectAtIndexedSubscript:static_cast<NSUInteger> (busIdx)] format];
  627. AudioChannelSet newLayout;
  628. const AVAudioChannelLayout* layout = [format channelLayout];
  629. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  630. if (layoutTag != 0)
  631. newLayout = CoreAudioLayouts::fromCoreAudio (layoutTag);
  632. else
  633. newLayout = bus->supportedLayoutWithChannels (static_cast<int> ([format channelCount]));
  634. if (newLayout.isDisabled())
  635. return false;
  636. channelSets.add (newLayout);
  637. }
  638. }
  639. #ifdef JucePlugin_PreferredChannelConfigurations
  640. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  641. if (! AudioProcessor::containsLayout (layouts, configs))
  642. {
  643. if (outError != nullptr)
  644. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  645. return false;
  646. }
  647. #endif
  648. if (! AudioUnitHelpers::setBusesLayout (&getAudioProcessor(), layouts))
  649. {
  650. if (outError != nullptr)
  651. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  652. return false;
  653. }
  654. totalInChannels = processor.getTotalNumInputChannels();
  655. totalOutChannels = processor.getTotalNumOutputChannels();
  656. allocateBusBuffer (true);
  657. allocateBusBuffer (false);
  658. mapper.alloc();
  659. audioBuffer.prepare (totalInChannels, totalOutChannels, static_cast<int> (maxFrames));
  660. double sampleRate = (jmax (AudioUnitHelpers::getBusCount (&processor, true), AudioUnitHelpers::getBusCount (&processor, false)) > 0 ?
  661. [[[([inputBusses.get() count] > 0 ? inputBusses.get() : outputBusses.get()) objectAtIndexedSubscript: 0] format] sampleRate] : 44100.0);
  662. processor.setRateAndBufferSizeDetails (sampleRate, static_cast<int> (maxFrames));
  663. processor.prepareToPlay (sampleRate, static_cast<int> (maxFrames));
  664. zeromem (&lastAudioHead, sizeof (lastAudioHead));
  665. hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
  666. hostTransportStateCallback = [getAudioUnit() transportStateBlock];
  667. reset();
  668. return true;
  669. }
  670. void deallocateRenderResources() override
  671. {
  672. hostMusicalContextCallback = nullptr;
  673. hostTransportStateCallback = nullptr;
  674. getAudioProcessor().releaseResources();
  675. audioBuffer.release();
  676. inBusBuffers. clear();
  677. outBusBuffers.clear();
  678. mapper.release();
  679. JuceAudioUnitv3Base::deallocateRenderResources();
  680. }
  681. //==============================================================================
  682. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  683. NSIndexSet* getSupportedViewConfigurations (NSArray<AUAudioUnitViewConfiguration*>* configs) override
  684. {
  685. auto supportedViewIndecies = [[NSMutableIndexSet alloc] init];
  686. auto n = [configs count];
  687. if (auto* editor = getAudioProcessor().createEditorIfNeeded())
  688. {
  689. // If you hit this assertion then your plug-in's editor is reporting that it doesn't support
  690. // any host MIDI controller configurations!
  691. jassert (editor->supportsHostMIDIControllerPresence (true) || editor->supportsHostMIDIControllerPresence (false));
  692. for (auto i = 0u; i < n; ++i)
  693. {
  694. if (auto* viewConfiguration = [configs objectAtIndex:i])
  695. {
  696. if (editor->supportsHostMIDIControllerPresence ([viewConfiguration hostHasController] == YES))
  697. {
  698. auto* constrainer = editor->getConstrainer();
  699. auto height = (int) [viewConfiguration height];
  700. auto width = (int) [viewConfiguration width];
  701. if (height <= constrainer->getMaximumHeight() && height >= constrainer->getMinimumHeight()
  702. && width <= constrainer->getMaximumWidth() && width >= constrainer->getMinimumWidth())
  703. [supportedViewIndecies addIndex: i];
  704. }
  705. }
  706. }
  707. }
  708. return [supportedViewIndecies autorelease];
  709. }
  710. void selectViewConfiguration (AUAudioUnitViewConfiguration* config) override
  711. {
  712. processorHolder->viewConfiguration.reset (new AudioProcessorHolder::ViewConfig { [config width], [config height], [config hostHasController] == YES });
  713. }
  714. #endif
  715. //==============================================================================
  716. void audioProcessorChanged (AudioProcessor* processor) override
  717. {
  718. ignoreUnused (processor);
  719. [au willChangeValueForKey: @"allParameterValues"];
  720. addPresets();
  721. [au didChangeValueForKey: @"allParameterValues"];
  722. }
  723. void audioProcessorParameterChanged (AudioProcessor*, int idx, float newValue) override
  724. {
  725. if (inParameterChangedCallback.get())
  726. {
  727. inParameterChangedCallback = false;
  728. return;
  729. }
  730. if (isPositiveAndBelow (idx, juceParameters.getNumParameters()))
  731. {
  732. if (AUParameter* param = [paramTree.get() parameterWithAddress: getAUParameterAddressForIndex (idx)])
  733. {
  734. if (editorObserverToken != nullptr)
  735. [param setValue: newValue originator: editorObserverToken];
  736. else
  737. [param setValue: newValue];
  738. }
  739. }
  740. }
  741. //==============================================================================
  742. bool getCurrentPosition (CurrentPositionInfo& info) override
  743. {
  744. bool musicContextCallSucceeded = false;
  745. bool transportStateCallSucceeded = false;
  746. info = lastAudioHead;
  747. info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
  748. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  749. switch (lastTimeStamp.mSMPTETime.mType)
  750. {
  751. case kSMPTETimeType2398: info.frameRate = AudioPlayHead::fps23976; break;
  752. case kSMPTETimeType24: info.frameRate = AudioPlayHead::fps24; break;
  753. case kSMPTETimeType25: info.frameRate = AudioPlayHead::fps25; break;
  754. case kSMPTETimeType2997: info.frameRate = AudioPlayHead::fps2997; break;
  755. case kSMPTETimeType2997Drop: info.frameRate = AudioPlayHead::fps2997drop; break;
  756. case kSMPTETimeType30Drop: info.frameRate = AudioPlayHead::fps30drop; break;
  757. case kSMPTETimeType30: info.frameRate = AudioPlayHead::fps30; break;
  758. case kSMPTETimeType60Drop: info.frameRate = AudioPlayHead::fps60drop; break;
  759. case kSMPTETimeType60: info.frameRate = AudioPlayHead::fps60; break;
  760. default: info.frameRate = AudioPlayHead::fpsUnknown; break;
  761. }
  762. double num;
  763. NSInteger den;
  764. NSInteger outDeltaSampleOffsetToNextBeat;
  765. double outCurrentMeasureDownBeat, bpm;
  766. double ppqPosition;
  767. if (hostMusicalContextCallback != nullptr)
  768. {
  769. AUHostMusicalContextBlock musicalContextCallback = hostMusicalContextCallback;
  770. if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
  771. {
  772. musicContextCallSucceeded = true;
  773. info.timeSigNumerator = (int) num;
  774. info.timeSigDenominator = (int) den;
  775. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  776. info.bpm = bpm;
  777. info.ppqPosition = ppqPosition;
  778. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  779. }
  780. }
  781. double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
  782. AUHostTransportStateFlags flags;
  783. if (hostTransportStateCallback != nullptr)
  784. {
  785. AUHostTransportStateBlock transportStateCallback = hostTransportStateCallback;
  786. if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
  787. {
  788. transportStateCallSucceeded = true;
  789. info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
  790. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  791. info.isPlaying = ((flags & AUHostTransportStateMoving) != 0);
  792. info.isLooping = ((flags & AUHostTransportStateCycling) != 0);
  793. info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
  794. info.ppqLoopStart = outCycleStartBeat;
  795. info.ppqLoopEnd = outCycleEndBeat;
  796. }
  797. }
  798. if (musicContextCallSucceeded && transportStateCallSucceeded)
  799. lastAudioHead = info;
  800. return true;
  801. }
  802. //==============================================================================
  803. static void removeEditor (AudioProcessor& processor)
  804. {
  805. ScopedLock editorLock (processor.getCallbackLock());
  806. if (AudioProcessorEditor* editor = processor.getActiveEditor())
  807. {
  808. processor.editorBeingDeleted (editor);
  809. delete editor;
  810. }
  811. }
  812. private:
  813. //==============================================================================
  814. struct BusBuffer
  815. {
  816. BusBuffer (AUAudioUnitBus* bus, int maxFramesPerBuffer)
  817. : auBus (bus),
  818. maxFrames (maxFramesPerBuffer),
  819. numberOfChannels (static_cast<int> ([[auBus format] channelCount])),
  820. isInterleaved ([[auBus format] isInterleaved])
  821. {
  822. alloc();
  823. }
  824. //==============================================================================
  825. void alloc()
  826. {
  827. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  828. int bytes = static_cast<int> (sizeof (AudioBufferList))
  829. + ((numBuffers - 1) * static_cast<int> (sizeof (::AudioBuffer)));
  830. jassert (bytes > 0);
  831. bufferListStorage.calloc (static_cast<size_t> (bytes));
  832. bufferList = reinterpret_cast<AudioBufferList*> (bufferListStorage.getData());
  833. const int bufferChannels = isInterleaved ? numberOfChannels : 1;
  834. scratchBuffer.setSize (numBuffers, bufferChannels * maxFrames);
  835. }
  836. void dealloc()
  837. {
  838. bufferList = nullptr;
  839. bufferListStorage.free();
  840. scratchBuffer.setSize (0, 0);
  841. }
  842. //==============================================================================
  843. int numChannels() const noexcept { return numberOfChannels; }
  844. bool interleaved() const noexcept { return isInterleaved; }
  845. AudioBufferList* get() const noexcept { return bufferList; }
  846. //==============================================================================
  847. void prepare (UInt32 nFrames, const AudioBufferList* other = nullptr) noexcept
  848. {
  849. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  850. const bool isCompatible = isCompatibleWith (other);
  851. bufferList->mNumberBuffers = static_cast<UInt32> (numBuffers);
  852. for (int i = 0; i < numBuffers; ++i)
  853. {
  854. const UInt32 bufferChannels = static_cast<UInt32> (isInterleaved ? numberOfChannels : 1);
  855. bufferList->mBuffers[i].mNumberChannels = bufferChannels;
  856. bufferList->mBuffers[i].mData = (isCompatible ? other->mBuffers[i].mData
  857. : scratchBuffer.getWritePointer (i));
  858. bufferList->mBuffers[i].mDataByteSize = nFrames * bufferChannels * sizeof (float);
  859. }
  860. }
  861. //==============================================================================
  862. bool isCompatibleWith (const AudioBufferList* other) const noexcept
  863. {
  864. if (other == nullptr)
  865. return false;
  866. if (other->mNumberBuffers > 0)
  867. {
  868. const bool otherInterleaved = AudioUnitHelpers::isAudioBufferInterleaved (*other);
  869. const int otherChannels = static_cast<int> (otherInterleaved ? other->mBuffers[0].mNumberChannels
  870. : other->mNumberBuffers);
  871. return otherInterleaved == isInterleaved
  872. && numberOfChannels == otherChannels;
  873. }
  874. return numberOfChannels == 0;
  875. }
  876. private:
  877. AUAudioUnitBus* auBus;
  878. HeapBlock<char> bufferListStorage;
  879. AudioBufferList* bufferList = nullptr;
  880. int maxFrames, numberOfChannels;
  881. bool isInterleaved;
  882. AudioBuffer<float> scratchBuffer;
  883. };
  884. //==============================================================================
  885. void addAudioUnitBusses (bool isInput)
  886. {
  887. std::unique_ptr<NSMutableArray<AUAudioUnitBus*>, NSObjectDeleter> array ([[NSMutableArray<AUAudioUnitBus*> alloc] init]);
  888. AudioProcessor& processor = getAudioProcessor();
  889. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  890. for (int i = 0; i < n; ++i)
  891. {
  892. std::unique_ptr<AUAudioUnitBus, NSObjectDeleter> audioUnitBus;
  893. {
  894. std::unique_ptr<AVAudioFormat, NSObjectDeleter> defaultFormat ([[AVAudioFormat alloc] initStandardFormatWithSampleRate: kDefaultSampleRate
  895. channels: static_cast<AVAudioChannelCount> (processor.getChannelCountOfBus (isInput, i))]);
  896. audioUnitBus.reset ([[AUAudioUnitBus alloc] initWithFormat: defaultFormat.get()
  897. error: nullptr]);
  898. }
  899. [array.get() addObject: audioUnitBus.get()];
  900. }
  901. (isInput ? inputBusses : outputBusses).reset ([[AUAudioUnitBusArray alloc] initWithAudioUnit: au
  902. busType: (isInput ? AUAudioUnitBusTypeInput : AUAudioUnitBusTypeOutput)
  903. busses: array.get()]);
  904. }
  905. // When parameters are discrete we need to use integer values.
  906. float getMaximumParameterValue (AudioProcessorParameter* juceParam)
  907. {
  908. #if JUCE_FORCE_LEGACY_PARAMETER_AUTOMATION_TYPE
  909. ignoreUnused (juceParam);
  910. return 1.0f;
  911. #else
  912. return juceParam->isDiscrete() ? (float) (juceParam->getNumSteps() - 1) : 1.0f;
  913. #endif
  914. }
  915. void addParameters()
  916. {
  917. std::unique_ptr<NSMutableArray<AUParameterNode*>, NSObjectDeleter> params ([[NSMutableArray<AUParameterNode*> alloc] init]);
  918. overviewParams.reset ([[NSMutableArray<NSNumber*> alloc] init]);
  919. auto& processor = getAudioProcessor();
  920. juceParameters.update (processor, forceLegacyParamIDs);
  921. const int n = juceParameters.getNumParameters();
  922. for (int idx = 0; idx < n; ++idx)
  923. {
  924. auto* juceParam = juceParameters.getParamForIndex (idx);
  925. const String identifier (idx);
  926. const String name = juceParam->getName (512);
  927. AudioUnitParameterUnit unit = kAudioUnitParameterUnit_Generic;
  928. AudioUnitParameterOptions flags = (UInt32) (kAudioUnitParameterFlag_IsWritable
  929. | kAudioUnitParameterFlag_IsReadable
  930. | kAudioUnitParameterFlag_HasCFNameString
  931. | kAudioUnitParameterFlag_ValuesHaveStrings);
  932. if (! forceLegacyParamIDs)
  933. flags |= (UInt32) kAudioUnitParameterFlag_IsHighResolution;
  934. // set whether the param is automatable (unnamed parameters aren't allowed to be automated)
  935. if (name.isEmpty() || ! juceParam->isAutomatable())
  936. flags |= kAudioUnitParameterFlag_NonRealTime;
  937. const bool isParameterDiscrete = juceParam->isDiscrete();
  938. if (! isParameterDiscrete)
  939. flags |= kAudioUnitParameterFlag_CanRamp;
  940. if (juceParam->isMetaParameter())
  941. flags |= kAudioUnitParameterFlag_IsGlobalMeta;
  942. std::unique_ptr<NSMutableArray, NSObjectDeleter> valueStrings;
  943. // is this a meter?
  944. if (((juceParam->getCategory() & 0xffff0000) >> 16) == 2)
  945. {
  946. flags &= ~kAudioUnitParameterFlag_IsWritable;
  947. flags |= kAudioUnitParameterFlag_MeterReadOnly | kAudioUnitParameterFlag_DisplayLogarithmic;
  948. unit = kAudioUnitParameterUnit_LinearGain;
  949. }
  950. else
  951. {
  952. if (! forceLegacyParamIDs)
  953. {
  954. if (juceParam->isDiscrete())
  955. {
  956. unit = juceParam->isBoolean() ? kAudioUnitParameterUnit_Boolean : kAudioUnitParameterUnit_Indexed;
  957. auto maxValue = getMaximumParameterValue (juceParam);
  958. auto numSteps = juceParam->getNumSteps();
  959. // Some hosts can't handle the huge numbers of discrete parameter values created when
  960. // using the default number of steps.
  961. jassert (numSteps != AudioProcessor::getDefaultNumParameterSteps());
  962. valueStrings.reset ([NSMutableArray new]);
  963. for (int i = 0; i < numSteps; ++i)
  964. [valueStrings.get() addObject: juceStringToNS (juceParam->getText ((float) i / maxValue, 0))];
  965. }
  966. }
  967. }
  968. AUParameterAddress address = generateAUParameterAddress (juceParam);
  969. #if ! JUCE_FORCE_LEGACY_PARAMETER_AUTOMATION_TYPE
  970. // Consider yourself very unlucky if you hit this assertion. The hash codes of your
  971. // parameter ids are not unique.
  972. jassert (! paramMap.contains (static_cast<int64> (address)));
  973. paramAddresses.add (address);
  974. paramMap.set (static_cast<int64> (address), idx);
  975. #endif
  976. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  977. std::unique_ptr<AUParameter, NSObjectDeleter> param ([[AUParameterTree createParameterWithIdentifier: juceStringToNS (identifier)
  978. name: juceStringToNS (name)
  979. address: address
  980. min: 0.0f
  981. max: getMaximumParameterValue (juceParam)
  982. unit: unit
  983. unitName: nullptr
  984. flags: flags
  985. valueStrings: valueStrings.get()
  986. dependentParameters: nullptr] retain]);
  987. [param.get() setValue: juceParam->getDefaultValue()];
  988. [params.get() addObject: param.get()];
  989. [overviewParams.get() addObject: [NSNumber numberWithUnsignedLongLong:address]];
  990. }
  991. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  992. paramTree.reset ([[AUParameterTree createTreeWithChildren: params.get()] retain]);
  993. paramObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedFromHost);
  994. paramProvider = CreateObjCBlock (this, &JuceAudioUnitv3::getValue);
  995. stringFromValueProvider = CreateObjCBlock (this, &JuceAudioUnitv3::stringFromValue);
  996. valueFromStringProvider = CreateObjCBlock (this, &JuceAudioUnitv3::valueFromString);
  997. [paramTree.get() setImplementorValueObserver: paramObserver];
  998. [paramTree.get() setImplementorValueProvider: paramProvider];
  999. [paramTree.get() setImplementorStringFromValueCallback: stringFromValueProvider];
  1000. [paramTree.get() setImplementorValueFromStringCallback: valueFromStringProvider];
  1001. if (processor.hasEditor())
  1002. {
  1003. editorParamObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedForObserver);
  1004. editorObserverToken = [paramTree.get() tokenByAddingParameterObserver: editorParamObserver];
  1005. }
  1006. if ((bypassParam = processor.getBypassParameter()) != nullptr)
  1007. bypassParam->addListener (this);
  1008. }
  1009. void setAudioProcessorParameter (AudioProcessorParameter* juceParam, float value)
  1010. {
  1011. if (value != juceParam->getValue())
  1012. {
  1013. juceParam->setValue (value);
  1014. inParameterChangedCallback = true;
  1015. juceParam->sendValueChangedMessageToListeners (value);
  1016. }
  1017. }
  1018. void addPresets()
  1019. {
  1020. factoryPresets.reset ([[NSMutableArray<AUAudioUnitPreset*> alloc] init]);
  1021. const int n = getAudioProcessor().getNumPrograms();
  1022. for (int idx = 0; idx < n; ++idx)
  1023. {
  1024. String name = getAudioProcessor().getProgramName (idx);
  1025. std::unique_ptr<AUAudioUnitPreset, NSObjectDeleter> preset ([[AUAudioUnitPreset alloc] init]);
  1026. [preset.get() setName: juceStringToNS (name)];
  1027. [preset.get() setNumber: static_cast<NSInteger> (idx)];
  1028. [factoryPresets.get() addObject: preset.get()];
  1029. }
  1030. }
  1031. //==============================================================================
  1032. void allocateBusBuffer (bool isInput)
  1033. {
  1034. OwnedArray<BusBuffer>& busBuffers = isInput ? inBusBuffers : outBusBuffers;
  1035. busBuffers.clear();
  1036. const int n = AudioUnitHelpers::getBusCount (&getAudioProcessor(), isInput);
  1037. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  1038. for (int busIdx = 0; busIdx < n; ++busIdx)
  1039. busBuffers.add (new BusBuffer ([(isInput ? inputBusses.get() : outputBusses.get()) objectAtIndexedSubscript: static_cast<unsigned int> (busIdx)],
  1040. static_cast<int> (maxFrames)));
  1041. }
  1042. //==============================================================================
  1043. void processEvents (const AURenderEvent *__nullable realtimeEventListHead, int numParams, AUEventSampleTime startTime)
  1044. {
  1045. ignoreUnused (numParams);
  1046. for (const AURenderEvent* event = realtimeEventListHead; event != nullptr; event = event->head.next)
  1047. {
  1048. switch (event->head.eventType)
  1049. {
  1050. case AURenderEventMIDI:
  1051. {
  1052. const AUMIDIEvent& midiEvent = event->MIDI;
  1053. midiMessages.addEvent (midiEvent.data, midiEvent.length, static_cast<int> (midiEvent.eventSampleTime - startTime));
  1054. }
  1055. break;
  1056. case AURenderEventParameter:
  1057. case AURenderEventParameterRamp:
  1058. {
  1059. const AUParameterEvent& paramEvent = event->parameter;
  1060. if (auto* p = getJuceParameterForAUAddress (paramEvent.parameterAddress))
  1061. setAudioProcessorParameter (p, paramEvent.value);
  1062. }
  1063. break;
  1064. default:
  1065. break;
  1066. }
  1067. }
  1068. }
  1069. AUAudioUnitStatus renderCallback (AudioUnitRenderActionFlags* actionFlags, const AudioTimeStamp* timestamp, AUAudioFrameCount frameCount,
  1070. NSInteger outputBusNumber, AudioBufferList* outputData, const AURenderEvent *__nullable realtimeEventListHead,
  1071. AURenderPullInputBlock __nullable pullInputBlock)
  1072. {
  1073. auto& processor = getAudioProcessor();
  1074. jassert (static_cast<int> (frameCount) <= getAudioProcessor().getBlockSize());
  1075. // process params
  1076. const int numParams = juceParameters.getNumParameters();
  1077. processEvents (realtimeEventListHead, numParams, static_cast<AUEventSampleTime> (timestamp->mSampleTime));
  1078. if (lastTimeStamp.mSampleTime != timestamp->mSampleTime)
  1079. {
  1080. lastTimeStamp = *timestamp;
  1081. const int numInputBuses = inBusBuffers. size();
  1082. const int numOutputBuses = outBusBuffers.size();
  1083. // prepare buffers
  1084. {
  1085. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  1086. {
  1087. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  1088. const bool canUseDirectOutput =
  1089. (busIdx == outputBusNumber && outputData != nullptr && outputData->mNumberBuffers > 0);
  1090. busBuffer.prepare (frameCount, canUseDirectOutput ? outputData : nullptr);
  1091. }
  1092. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1093. {
  1094. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1095. busBuffer.prepare (frameCount, busIdx < numOutputBuses ? outBusBuffers[busIdx]->get() : nullptr);
  1096. }
  1097. audioBuffer.reset();
  1098. }
  1099. // pull inputs
  1100. {
  1101. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1102. {
  1103. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1104. AudioBufferList* buffer = busBuffer.get();
  1105. if (pullInputBlock == nullptr || pullInputBlock (actionFlags, timestamp, frameCount, busIdx, buffer) != noErr)
  1106. AudioUnitHelpers::clearAudioBuffer (*buffer);
  1107. if (actionFlags != nullptr && (*actionFlags & kAudioUnitRenderAction_OutputIsSilence) != 0)
  1108. AudioUnitHelpers::clearAudioBuffer (*buffer);
  1109. }
  1110. }
  1111. // set buffer pointer to minimize copying
  1112. {
  1113. int chIdx = 0;
  1114. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  1115. {
  1116. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  1117. AudioBufferList* buffer = busBuffer.get();
  1118. const bool interleaved = busBuffer.interleaved();
  1119. const int numChannels = busBuffer.numChannels();
  1120. const int* outLayoutMap = mapper.get (false, busIdx);
  1121. for (int ch = 0; ch < numChannels; ++ch)
  1122. audioBuffer.setBuffer (chIdx++, interleaved ? nullptr : static_cast<float*> (buffer->mBuffers[outLayoutMap[ch]].mData));
  1123. }
  1124. // use input pointers on remaining channels
  1125. for (int busIdx = 0; chIdx < totalInChannels;)
  1126. {
  1127. const int channelOffset = processor.getOffsetInBusBufferForAbsoluteChannelIndex (true, chIdx, busIdx);
  1128. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1129. AudioBufferList* buffer = busBuffer.get();
  1130. const int* inLayoutMap = mapper.get (true, busIdx);
  1131. audioBuffer.setBuffer (chIdx++, busBuffer.interleaved() ? nullptr : static_cast<float*> (buffer->mBuffers[inLayoutMap[channelOffset]].mData));
  1132. }
  1133. }
  1134. // copy input
  1135. {
  1136. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1137. audioBuffer.push (*inBusBuffers[busIdx]->get(), mapper.get (true, busIdx));
  1138. // clear remaining channels
  1139. for (int i = totalInChannels; i < totalOutChannels; ++i)
  1140. zeromem (audioBuffer.push(), sizeof (float) * frameCount);
  1141. }
  1142. // process audio
  1143. processBlock (audioBuffer.getBuffer (frameCount), midiMessages);
  1144. // send MIDI
  1145. #if JucePlugin_ProducesMidiOutput && JUCE_AUV3_MIDI_OUTPUT_SUPPORTED
  1146. auto midiOut = [au MIDIOutputEventBlock];
  1147. MidiMessage msg;
  1148. int samplePosition;
  1149. for (MidiBuffer::Iterator it (midiMessages); it.getNextEvent (msg, samplePosition);)
  1150. midiOut (samplePosition, 0, msg.getRawDataSize(), msg.getRawData());
  1151. #endif
  1152. midiMessages.clear();
  1153. }
  1154. // copy back
  1155. audioBuffer.pop (*outBusBuffers[(int) outputBusNumber]->get(),
  1156. mapper.get (false, (int) outputBusNumber));
  1157. return noErr;
  1158. }
  1159. void processBlock (AudioBuffer<float>& buffer, MidiBuffer& midiBuffer) noexcept
  1160. {
  1161. auto& processor = getAudioProcessor();
  1162. const ScopedLock sl (processor.getCallbackLock());
  1163. if (processor.isSuspended())
  1164. buffer.clear();
  1165. else if (bypassParam != nullptr && [au shouldBypassEffect])
  1166. processor.processBlockBypassed (buffer, midiBuffer);
  1167. else
  1168. processor.processBlock (buffer, midiBuffer);
  1169. }
  1170. //==============================================================================
  1171. void valueChangedFromHost (AUParameter* param, AUValue value)
  1172. {
  1173. if (param != nullptr)
  1174. {
  1175. if (auto* p = getJuceParameterForAUAddress ([param address]))
  1176. {
  1177. auto normalisedValue = value / getMaximumParameterValue (p);
  1178. setAudioProcessorParameter (p, normalisedValue);
  1179. }
  1180. }
  1181. }
  1182. AUValue getValue (AUParameter* param)
  1183. {
  1184. if (param != nullptr)
  1185. {
  1186. if (auto* p = getJuceParameterForAUAddress ([param address]))
  1187. return p->getValue() * getMaximumParameterValue (p);
  1188. }
  1189. return 0;
  1190. }
  1191. void valueChangedForObserver (AUParameterAddress, AUValue)
  1192. {
  1193. // this will have already been handled by valueChangedFromHost
  1194. }
  1195. NSString* stringFromValue (AUParameter* param, const AUValue* value)
  1196. {
  1197. String text;
  1198. if (param != nullptr && value != nullptr)
  1199. {
  1200. if (auto* p = getJuceParameterForAUAddress ([param address]))
  1201. {
  1202. if (LegacyAudioParameter::isLegacy (p))
  1203. text = String (*value);
  1204. else
  1205. text = p->getText (*value / getMaximumParameterValue (p), 0);
  1206. }
  1207. }
  1208. return juceStringToNS (text);
  1209. }
  1210. AUValue valueFromString (AUParameter* param, NSString* str)
  1211. {
  1212. if (param != nullptr && str != nullptr)
  1213. {
  1214. if (auto* p = getJuceParameterForAUAddress ([param address]))
  1215. {
  1216. const String text (nsStringToJuce (str));
  1217. if (LegacyAudioParameter::isLegacy (p))
  1218. return text.getFloatValue();
  1219. else
  1220. return p->getValueForText (text) * getMaximumParameterValue (p);
  1221. }
  1222. }
  1223. return 0;
  1224. }
  1225. //==============================================================================
  1226. // this is only ever called for the bypass parameter
  1227. void parameterValueChanged (int, float newValue) override
  1228. {
  1229. JuceAudioUnitv3Base::setShouldBypassEffect (newValue != 0.0f);
  1230. }
  1231. void parameterGestureChanged (int, bool) override {}
  1232. //==============================================================================
  1233. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept
  1234. {
  1235. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1236. return static_cast<AUParameterAddress> (paramIndex);
  1237. #else
  1238. return paramAddresses.getReference (paramIndex);
  1239. #endif
  1240. }
  1241. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept
  1242. {
  1243. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1244. return static_cast<int> (address);
  1245. #else
  1246. return paramMap[static_cast<int64> (address)];
  1247. #endif
  1248. }
  1249. AUParameterAddress generateAUParameterAddress (AudioProcessorParameter* param) const
  1250. {
  1251. const String& juceParamID = LegacyAudioParameter::getParamID (param, forceLegacyParamIDs);
  1252. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1253. auto result = juceParamID.getIntValue();
  1254. #else
  1255. auto result = juceParamID.hashCode64();
  1256. #endif
  1257. return static_cast<AUParameterAddress> (result);
  1258. }
  1259. AudioProcessorParameter* getJuceParameterForAUAddress (AUParameterAddress address) const noexcept
  1260. {
  1261. return juceParameters.getParamForIndex (getJuceParameterIndexForAUAddress (address));
  1262. }
  1263. //==============================================================================
  1264. static const double kDefaultSampleRate;
  1265. AudioProcessorHolder::Ptr processorHolder;
  1266. int totalInChannels, totalOutChannels;
  1267. std::unique_ptr<AUAudioUnitBusArray, NSObjectDeleter> inputBusses, outputBusses;
  1268. ObjCBlock<AUImplementorValueObserver> paramObserver;
  1269. ObjCBlock<AUImplementorValueProvider> paramProvider;
  1270. ObjCBlock<AUImplementorStringFromValueCallback> stringFromValueProvider;
  1271. ObjCBlock<AUImplementorValueFromStringCallback> valueFromStringProvider;
  1272. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1273. Array<AUParameterAddress> paramAddresses;
  1274. HashMap<int64, int> paramMap;
  1275. #endif
  1276. LegacyAudioParametersWrapper juceParameters;
  1277. // to avoid recursion on parameter changes, we need to add an
  1278. // editor observer to do the parameter changes
  1279. ObjCBlock<AUParameterObserver> editorParamObserver;
  1280. AUParameterObserverToken editorObserverToken;
  1281. std::unique_ptr<AUParameterTree, NSObjectDeleter> paramTree;
  1282. std::unique_ptr<NSMutableArray<NSNumber*>, NSObjectDeleter> overviewParams, channelCapabilities;
  1283. std::unique_ptr<NSMutableArray<AUAudioUnitPreset*>, NSObjectDeleter> factoryPresets;
  1284. ObjCBlock<AUInternalRenderBlock> internalRenderBlock;
  1285. AudioUnitHelpers::CoreAudioBufferList audioBuffer;
  1286. AudioUnitHelpers::ChannelRemapper mapper;
  1287. OwnedArray<BusBuffer> inBusBuffers, outBusBuffers;
  1288. MidiBuffer midiMessages;
  1289. ObjCBlock<AUHostMusicalContextBlock> hostMusicalContextCallback;
  1290. ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;
  1291. AudioTimeStamp lastTimeStamp;
  1292. CurrentPositionInfo lastAudioHead;
  1293. String contextName;
  1294. ThreadLocalValue<bool> inParameterChangedCallback;
  1295. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1296. static constexpr bool forceLegacyParamIDs = true;
  1297. #else
  1298. static constexpr bool forceLegacyParamIDs = false;
  1299. #endif
  1300. AudioProcessorParameter* bypassParam = nullptr;
  1301. };
  1302. const double JuceAudioUnitv3::kDefaultSampleRate = 44100.0;
  1303. JuceAudioUnitv3Base* JuceAudioUnitv3Base::create (AUAudioUnit* audioUnit, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  1304. {
  1305. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1306. return new JuceAudioUnitv3 (audioUnit, descr, options, error);
  1307. }
  1308. //==============================================================================
  1309. class JuceAUViewController
  1310. {
  1311. public:
  1312. JuceAUViewController (AUViewController<AUAudioUnitFactory>* p)
  1313. : myself (p)
  1314. {
  1315. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1316. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1317. initialiseJuce_GUI();
  1318. }
  1319. ~JuceAUViewController()
  1320. {
  1321. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1322. if (processorHolder != nullptr)
  1323. JuceAudioUnitv3::removeEditor (getAudioProcessor());
  1324. }
  1325. //==============================================================================
  1326. void loadView()
  1327. {
  1328. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1329. if (AudioProcessor* p = createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))
  1330. {
  1331. processorHolder = new AudioProcessorHolder (p);
  1332. auto& processor = getAudioProcessor();
  1333. if (processor.hasEditor())
  1334. {
  1335. if (AudioProcessorEditor* editor = processor.createEditorIfNeeded())
  1336. {
  1337. preferredSize = editor->getBounds();
  1338. JUCE_IOS_MAC_VIEW* view = [[[JUCE_IOS_MAC_VIEW alloc] initWithFrame: convertToCGRect (editor->getBounds())] autorelease];
  1339. [myself setView: view];
  1340. #if JUCE_IOS
  1341. editor->setVisible (false);
  1342. #else
  1343. editor->setVisible (true);
  1344. #endif
  1345. editor->addToDesktop (0, view);
  1346. #if JUCE_IOS
  1347. if (JUCE_IOS_MAC_VIEW* peerView = [[[myself view] subviews] objectAtIndex: 0])
  1348. [peerView setContentMode: UIViewContentModeTop];
  1349. #endif
  1350. }
  1351. }
  1352. }
  1353. }
  1354. void viewDidLayoutSubviews()
  1355. {
  1356. if (processorHolder != nullptr && [myself view] != nullptr)
  1357. {
  1358. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  1359. {
  1360. if (processorHolder->viewConfiguration != nullptr)
  1361. editor->hostMIDIControllerIsAvailable (processorHolder->viewConfiguration->hostHasMIDIController);
  1362. editor->setBounds (convertToRectInt ([[myself view] bounds]));
  1363. if (JUCE_IOS_MAC_VIEW* peerView = [[[myself view] subviews] objectAtIndex: 0])
  1364. {
  1365. #if JUCE_IOS
  1366. [peerView setNeedsDisplay];
  1367. #else
  1368. [peerView setNeedsDisplay: YES];
  1369. #endif
  1370. }
  1371. }
  1372. }
  1373. }
  1374. void didReceiveMemoryWarning()
  1375. {
  1376. if (processorHolder != nullptr)
  1377. if (auto* processor = processorHolder->get())
  1378. processor->memoryWarningReceived();
  1379. }
  1380. void viewDidAppear (bool)
  1381. {
  1382. if (processorHolder != nullptr)
  1383. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  1384. editor->setVisible (true);
  1385. }
  1386. void viewDidDisappear (bool)
  1387. {
  1388. if (processorHolder != nullptr)
  1389. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  1390. editor->setVisible (false);
  1391. }
  1392. CGSize getPreferredContentSize() const
  1393. {
  1394. return CGSizeMake (static_cast<float> (preferredSize.getWidth()),
  1395. static_cast<float> (preferredSize.getHeight()));
  1396. }
  1397. //==============================================================================
  1398. AUAudioUnit* createAudioUnit (const AudioComponentDescription& descr, NSError** error)
  1399. {
  1400. AUAudioUnit* retval = nil;
  1401. if (! MessageManager::getInstance()->isThisTheMessageThread())
  1402. {
  1403. WaitableEvent creationEvent;
  1404. // AUv3 headers say that we may block this thread and that the message thread is guaranteed
  1405. // to be unblocked
  1406. struct AUCreator : public CallbackMessage
  1407. {
  1408. JuceAUViewController& owner;
  1409. AudioComponentDescription pDescr;
  1410. NSError** pError;
  1411. AUAudioUnit*& outAU;
  1412. WaitableEvent& e;
  1413. AUCreator (JuceAUViewController& parent, const AudioComponentDescription& paramDescr, NSError** paramError,
  1414. AUAudioUnit*& outputAU, WaitableEvent& event)
  1415. : owner (parent), pDescr (paramDescr), pError (paramError), outAU (outputAU), e (event)
  1416. {}
  1417. void messageCallback() override
  1418. {
  1419. outAU = owner.createAudioUnitOnMessageThread (pDescr, pError);
  1420. e.signal();
  1421. }
  1422. };
  1423. (new AUCreator (*this, descr, error, retval, creationEvent))->post();
  1424. creationEvent.wait (-1);
  1425. }
  1426. else
  1427. {
  1428. retval = createAudioUnitOnMessageThread (descr, error);
  1429. }
  1430. return [retval autorelease];
  1431. }
  1432. private:
  1433. //==============================================================================
  1434. AUViewController<AUAudioUnitFactory>* myself;
  1435. AudioProcessorHolder::Ptr processorHolder = nullptr;
  1436. Rectangle<int> preferredSize { 1, 1 };
  1437. //==============================================================================
  1438. AUAudioUnit* createAudioUnitOnMessageThread (const AudioComponentDescription& descr, NSError** error)
  1439. {
  1440. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1441. [myself view]; // this will call [view load] and ensure that the AudioProcessor has been instantiated
  1442. if (processorHolder == nullptr)
  1443. return nullptr;
  1444. return (new JuceAudioUnitv3 (processorHolder, descr, 0, error))->getAudioUnit();
  1445. }
  1446. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  1447. };
  1448. //==============================================================================
  1449. // necessary glue code
  1450. @interface JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix) : AUViewController<AUAudioUnitFactory>
  1451. @end
  1452. @implementation JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix)
  1453. {
  1454. std::unique_ptr<JuceAUViewController> cpp;
  1455. }
  1456. - (instancetype) initWithNibName: (nullable NSString*) nib bundle: (nullable NSBundle*) bndl { self = [super initWithNibName: nib bundle: bndl]; cpp.reset (new JuceAUViewController (self)); return self; }
  1457. - (void) loadView { cpp->loadView(); }
  1458. - (AUAudioUnit *) createAudioUnitWithComponentDescription: (AudioComponentDescription) desc error: (NSError **) error { return cpp->createAudioUnit (desc, error); }
  1459. - (CGSize) preferredContentSize { return cpp->getPreferredContentSize(); }
  1460. - (void) viewDidLayoutSubviews { cpp->viewDidLayoutSubviews(); }
  1461. - (void) didReceiveMemoryWarning { cpp->didReceiveMemoryWarning(); }
  1462. #if JUCE_IOS
  1463. - (void) viewDidAppear: (BOOL) animated { cpp->viewDidAppear (animated); [super viewDidAppear:animated]; }
  1464. - (void) viewDidDisappear: (BOOL) animated { cpp->viewDidDisappear (animated); [super viewDidDisappear:animated]; }
  1465. #endif
  1466. @end
  1467. //==============================================================================
  1468. #if JUCE_IOS
  1469. bool JUCE_CALLTYPE juce_isInterAppAudioConnected() { return false; }
  1470. void JUCE_CALLTYPE juce_switchToHostApplication() {}
  1471. #if JUCE_MODULE_AVAILABLE_juce_gui_basics
  1472. Image JUCE_CALLTYPE juce_getIAAHostIcon (int) { return {}; }
  1473. #endif
  1474. #endif
  1475. #pragma clang diagnostic pop
  1476. #endif