The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1792 lines
78KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. #include "../../juce_core/system/juce_TargetPlatform.h"
  20. #include "../utility/juce_CheckSettingMacros.h"
  21. #if JucePlugin_Build_AUv3
  22. #import <CoreAudioKit/CoreAudioKit.h>
  23. #import <AudioToolbox/AudioToolbox.h>
  24. #import <AVFoundation/AVFoundation.h>
  25. #if JUCE_MAC
  26. #if (! defined MAC_OS_X_VERSION_MIN_REQUIRED) || (! defined MAC_OS_X_VERSION_10_11) || (MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_11)
  27. #error AUv3 needs Deployment Target OS X 10.11 or higher to compile
  28. #endif
  29. #if (defined MAC_OS_X_VERSION_10_13) && (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_13)
  30. #define JUCE_AUV3_MIDI_OUTPUT_SUPPORTED 1
  31. #define JUCE_AUV3_VIEW_CONFIG_SUPPORTED 1
  32. #endif
  33. #endif
  34. #if JUCE_IOS
  35. #if (! defined __IPHONE_OS_VERSION_MIN_REQUIRED) || (! defined __IPHONE_9_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_9_0)
  36. #error AUv3 needs Deployment Target iOS 9.0 or higher to compile
  37. #endif
  38. #if (defined __IPHONE_11_0) && (__IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_11_0)
  39. #define JUCE_AUV3_MIDI_OUTPUT_SUPPORTED 1
  40. #define JUCE_AUV3_VIEW_CONFIG_SUPPORTED 1
  41. #endif
  42. #endif
  43. #ifndef __OBJC2__
  44. #error AUv3 needs Objective-C 2 support (compile with 64-bit)
  45. #endif
  46. #define JUCE_CORE_INCLUDE_OBJC_HELPERS 1
  47. #include "../utility/juce_IncludeSystemHeaders.h"
  48. #include "../utility/juce_IncludeModuleHeaders.h"
  49. #include "../../juce_graphics/native/juce_mac_CoreGraphicsHelpers.h"
  50. #include "../../juce_audio_basics/native/juce_mac_CoreAudioLayouts.h"
  51. #include "../../juce_audio_processors/format_types/juce_AU_Shared.h"
  52. #define JUCE_VIEWCONTROLLER_OBJC_NAME(x) JUCE_JOIN_MACRO (x, FactoryAUv3)
  53. #if JUCE_IOS
  54. #define JUCE_IOS_MAC_VIEW UIView
  55. #else
  56. #define JUCE_IOS_MAC_VIEW NSView
  57. #endif
  58. #define JUCE_AUDIOUNIT_OBJC_NAME(x) JUCE_JOIN_MACRO (x, AUv3)
  59. #pragma clang diagnostic push
  60. #pragma clang diagnostic ignored "-Wnullability-completeness"
  61. using namespace juce;
  62. // TODO: use SFINAE to automatically generate this for all NSObjects
  63. template <> struct ContainerDeletePolicy<AUAudioUnitBusArray> { static void destroy (NSObject* o) { [o release]; } };
  64. template <> struct ContainerDeletePolicy<AUParameterTree> { static void destroy (NSObject* o) { [o release]; } };
  65. template <> struct ContainerDeletePolicy<NSMutableArray<AUParameterNode*>> { static void destroy (NSObject* o) { [o release]; } };
  66. template <> struct ContainerDeletePolicy<AUParameter> { static void destroy (NSObject* o) { [o release]; } };
  67. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitBus*>> { static void destroy (NSObject* o) { [o release]; } };
  68. template <> struct ContainerDeletePolicy<AUAudioUnitBus> { static void destroy (NSObject* o) { [o release]; } };
  69. template <> struct ContainerDeletePolicy<AVAudioFormat> { static void destroy (NSObject* o) { [o release]; } };
  70. template <> struct ContainerDeletePolicy<AVAudioPCMBuffer> { static void destroy (NSObject* o) { [o release]; } };
  71. template <> struct ContainerDeletePolicy<NSMutableArray<NSNumber*>> { static void destroy (NSObject* o) { [o release]; } };
  72. template <> struct ContainerDeletePolicy<NSNumber> { static void destroy (NSObject* o) { [o release]; } };
  73. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitPreset*>> { static void destroy (NSObject* o) { [o release]; } };
  74. template <> struct ContainerDeletePolicy<AUAudioUnitPreset> { static void destroy (NSObject* o) { [o release]; } };
  75. //==============================================================================
  76. struct AudioProcessorHolder : public ReferenceCountedObject
  77. {
  78. AudioProcessorHolder() {}
  79. AudioProcessorHolder (AudioProcessor* p) : processor (p) {}
  80. AudioProcessor& operator*() noexcept { return *processor; }
  81. AudioProcessor* operator->() noexcept { return processor; }
  82. AudioProcessor* get() noexcept { return processor; }
  83. struct ViewConfig
  84. {
  85. double width;
  86. double height;
  87. bool hostHasMIDIController;
  88. };
  89. ScopedPointer<ViewConfig> viewConfiguration;
  90. typedef ReferenceCountedObjectPtr<AudioProcessorHolder> Ptr;
  91. private:
  92. ScopedPointer<AudioProcessor> processor;
  93. AudioProcessorHolder& operator= (AudioProcessor*) = delete;
  94. AudioProcessorHolder (AudioProcessorHolder&) = delete;
  95. AudioProcessorHolder& operator= (AudioProcessorHolder&) = delete;
  96. };
  97. //==============================================================================
  98. class JuceAudioUnitv3Base
  99. {
  100. public:
  101. JuceAudioUnitv3Base (const AudioComponentDescription& descr,
  102. AudioComponentInstantiationOptions options,
  103. NSError** error)
  104. #pragma clang diagnostic push
  105. #pragma clang diagnostic ignored "-Wobjc-method-access"
  106. : au ([audioUnitObjCClass.createInstance() initWithComponentDescription: descr
  107. options: options
  108. error: error
  109. juceClass: this])
  110. #pragma clang diagnostic pop
  111. {}
  112. JuceAudioUnitv3Base (AUAudioUnit* audioUnit) : au (audioUnit)
  113. {
  114. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  115. initialiseJuce_GUI();
  116. }
  117. virtual ~JuceAudioUnitv3Base() {}
  118. //==============================================================================
  119. AUAudioUnit* getAudioUnit() noexcept { return au; }
  120. //==============================================================================
  121. virtual void reset() = 0;
  122. //==============================================================================
  123. virtual AUAudioUnitPreset* getCurrentPreset() = 0;
  124. virtual void setCurrentPreset(AUAudioUnitPreset*) = 0;
  125. virtual NSArray<AUAudioUnitPreset*>* getFactoryPresets() = 0;
  126. virtual NSDictionary<NSString*, id>* getFullState()
  127. {
  128. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  129. return ObjCMsgSendSuper<NSDictionary<NSString*, id>*> (&s, @selector (fullState));
  130. }
  131. virtual void setFullState (NSDictionary<NSString*, id>* state)
  132. {
  133. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  134. ObjCMsgSendSuper<void, NSDictionary<NSString*, id>*> (&s, @selector (setFullState:), state);
  135. }
  136. virtual AUParameterTree* getParameterTree() = 0;
  137. virtual NSArray<NSNumber*>* parametersForOverviewWithCount (int) = 0;
  138. //==============================================================================
  139. virtual NSTimeInterval getLatency() = 0;
  140. virtual NSTimeInterval getTailTime() = 0;
  141. //==============================================================================
  142. virtual AUAudioUnitBusArray* getInputBusses() = 0;
  143. virtual AUAudioUnitBusArray* getOutputBusses() = 0;
  144. virtual NSArray<NSNumber*>* getChannelCapabilities() = 0;
  145. virtual bool shouldChangeToFormat (AVAudioFormat*, AUAudioUnitBus*) = 0;
  146. //==============================================================================
  147. virtual int getVirtualMIDICableCount() = 0;
  148. virtual bool getSupportsMPE() = 0;
  149. virtual NSArray<NSString*>* getMIDIOutputNames() = 0;
  150. //==============================================================================
  151. virtual AUInternalRenderBlock getInternalRenderBlock() = 0;
  152. virtual bool getCanProcessInPlace() { return false; }
  153. virtual bool getRenderingOffline() = 0;
  154. virtual void setRenderingOffline (bool offline) = 0;
  155. //==============================================================================
  156. virtual NSString* getContextName() const = 0;
  157. virtual void setContextName (NSString*) = 0;
  158. virtual bool allocateRenderResourcesAndReturnError (NSError **outError)
  159. {
  160. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  161. return (ObjCMsgSendSuper<BOOL, NSError**> (&s, @selector (allocateRenderResourcesAndReturnError:), outError) == YES);
  162. }
  163. virtual void deallocateRenderResources()
  164. {
  165. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  166. ObjCMsgSendSuper<void> (&s, @selector (deallocateRenderResources));
  167. }
  168. //==============================================================================
  169. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  170. virtual NSIndexSet* getSupportedViewConfigurations (NSArray<AUAudioUnitViewConfiguration*>*) = 0;
  171. virtual void selectViewConfiguration (AUAudioUnitViewConfiguration*) = 0;
  172. #endif
  173. private:
  174. struct Class : public ObjCClass<AUAudioUnit>
  175. {
  176. Class() : ObjCClass<AUAudioUnit> ("AUAudioUnit_")
  177. {
  178. addIvar<JuceAudioUnitv3Base*> ("cppObject");
  179. #pragma clang diagnostic push
  180. #pragma clang diagnostic ignored "-Wundeclared-selector"
  181. addMethod (@selector (initWithComponentDescription:options:error:juceClass:),
  182. initWithComponentDescriptionAndJuceClass, "@@:",
  183. @encode (AudioComponentDescription),
  184. @encode (AudioComponentInstantiationOptions), "^@@");
  185. #pragma clang diagnostic pop
  186. addMethod (@selector (initWithComponentDescription:options:error:),
  187. initWithComponentDescription, "@@:",
  188. @encode (AudioComponentDescription),
  189. @encode (AudioComponentInstantiationOptions), "^@");
  190. addMethod (@selector (dealloc), dealloc, "v@:");
  191. //==============================================================================
  192. addMethod (@selector (reset), reset, "v@:");
  193. //==============================================================================
  194. addMethod (@selector (currentPreset), getCurrentPreset, "@@:");
  195. addMethod (@selector (setCurrentPreset:), setCurrentPreset, "v@:@");
  196. addMethod (@selector (factoryPresets), getFactoryPresets, "@@:");
  197. addMethod (@selector (fullState), getFullState, "@@:");
  198. addMethod (@selector (setFullState:), setFullState, "v@:@");
  199. addMethod (@selector (parameterTree), getParameterTree, "@@:");
  200. addMethod (@selector (parametersForOverviewWithCount:), parametersForOverviewWithCount, "@@:", @encode (NSInteger));
  201. //==============================================================================
  202. addMethod (@selector (latency), getLatency, @encode (NSTimeInterval), "@:");
  203. addMethod (@selector (tailTime), getTailTime, @encode (NSTimeInterval), "@:");
  204. //==============================================================================
  205. addMethod (@selector (inputBusses), getInputBusses, "@@:");
  206. addMethod (@selector (outputBusses), getOutputBusses, "@@:");
  207. addMethod (@selector (channelCapabilities), getChannelCapabilities, "@@:");
  208. addMethod (@selector (shouldChangeToFormat:forBus:), shouldChangeToFormat, "B@:@@");
  209. //==============================================================================
  210. addMethod (@selector (virtualMIDICableCount), getVirtualMIDICableCount, @encode (NSInteger), "@:");
  211. addMethod (@selector (supportsMPE), getSupportsMPE, @encode (BOOL), "@:");
  212. #if JUCE_AUV3_MIDI_OUTPUT_SUPPORTED
  213. addMethod (@selector (MIDIOutputNames), getMIDIOutputNames, "@@:");
  214. #endif
  215. //==============================================================================
  216. addMethod (@selector (internalRenderBlock), getInternalRenderBlock, @encode (AUInternalRenderBlock), "@:");
  217. addMethod (@selector (canProcessInPlace), getCanProcessInPlace, @encode (BOOL), "@:");
  218. addMethod (@selector (isRenderingOffline), getRenderingOffline, @encode (BOOL), "@:");
  219. addMethod (@selector (setRenderingOffline:), setRenderingOffline, "v@:", @encode (BOOL));
  220. addMethod (@selector (allocateRenderResourcesAndReturnError:), allocateRenderResourcesAndReturnError, "B@:^@");
  221. addMethod (@selector (deallocateRenderResources), deallocateRenderResources, "v@:");
  222. //==============================================================================
  223. addMethod (@selector (contextName), getContextName, "@@:");
  224. addMethod (@selector (setContextName:), setContextName, "v@:@");
  225. //==============================================================================
  226. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  227. addMethod (@selector (supportedViewConfigurations:), getSupportedViewConfigurations, "@@:@");
  228. addMethod (@selector (selectViewConfiguration:), selectViewConfiguration, "v@:@");
  229. #endif
  230. registerClass();
  231. }
  232. //==============================================================================
  233. static JuceAudioUnitv3Base* _this (id self) { return getIvar<JuceAudioUnitv3Base*> (self, "cppObject"); }
  234. static void setThis (id self, JuceAudioUnitv3Base* cpp) { object_setInstanceVariable (self, "cppObject", cpp); }
  235. //==============================================================================
  236. static id initWithComponentDescription (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  237. {
  238. AUAudioUnit* self = _self;
  239. objc_super s = { self, [AUAudioUnit class] };
  240. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  241. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  242. JuceAudioUnitv3Base* juceAU = JuceAudioUnitv3Base::create (self, descr, options, error);
  243. setThis (self, juceAU);
  244. return self;
  245. }
  246. static id initWithComponentDescriptionAndJuceClass (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error, JuceAudioUnitv3Base* juceAU)
  247. {
  248. AUAudioUnit* self = _self;
  249. objc_super s = { self, [AUAudioUnit class] };
  250. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  251. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  252. setThis (self, juceAU);
  253. return self;
  254. }
  255. static void dealloc (id self, SEL)
  256. {
  257. if (! MessageManager::getInstance()->isThisTheMessageThread())
  258. {
  259. WaitableEvent deletionEvent;
  260. struct AUDeleter : public CallbackMessage
  261. {
  262. AUDeleter (id selfToDelete, WaitableEvent& event)
  263. : parentSelf (selfToDelete), parentDeletionEvent (event)
  264. {
  265. }
  266. void messageCallback() override
  267. {
  268. delete _this (parentSelf);
  269. parentDeletionEvent.signal();
  270. }
  271. id parentSelf;
  272. WaitableEvent& parentDeletionEvent;
  273. };
  274. (new AUDeleter (self, deletionEvent))->post();
  275. deletionEvent.wait (-1);
  276. }
  277. else
  278. {
  279. delete _this (self);
  280. }
  281. }
  282. //==============================================================================
  283. static void reset (id self, SEL) { _this (self)->reset(); }
  284. //==============================================================================
  285. static AUAudioUnitPreset* getCurrentPreset (id self, SEL) { return _this (self)->getCurrentPreset(); }
  286. static void setCurrentPreset (id self, SEL, AUAudioUnitPreset* preset) { return _this (self)->setCurrentPreset (preset); }
  287. static NSArray<AUAudioUnitPreset*>* getFactoryPresets (id self, SEL) { return _this (self)->getFactoryPresets(); }
  288. static NSDictionary<NSString*, id>* getFullState (id self, SEL) { return _this (self)->getFullState(); }
  289. static void setFullState (id self, SEL, NSDictionary<NSString *, id>* state) { return _this (self)->setFullState (state); }
  290. static AUParameterTree* getParameterTree (id self, SEL) { return _this (self)->getParameterTree(); }
  291. static NSArray<NSNumber*>* parametersForOverviewWithCount (id self, SEL, NSInteger count) { return _this (self)->parametersForOverviewWithCount (static_cast<int> (count)); }
  292. //==============================================================================
  293. static NSTimeInterval getLatency (id self, SEL) { return _this (self)->getLatency(); }
  294. static NSTimeInterval getTailTime (id self, SEL) { return _this (self)->getTailTime(); }
  295. //==============================================================================
  296. static AUAudioUnitBusArray* getInputBusses (id self, SEL) { return _this (self)->getInputBusses(); }
  297. static AUAudioUnitBusArray* getOutputBusses (id self, SEL) { return _this (self)->getOutputBusses(); }
  298. static NSArray<NSNumber*>* getChannelCapabilities (id self, SEL) { return _this (self)->getChannelCapabilities(); }
  299. static BOOL shouldChangeToFormat (id self, SEL, AVAudioFormat* format, AUAudioUnitBus* bus) { return _this (self)->shouldChangeToFormat (format, bus) ? YES : NO; }
  300. //==============================================================================
  301. static NSInteger getVirtualMIDICableCount (id self, SEL) { return _this (self)->getVirtualMIDICableCount(); }
  302. static BOOL getSupportsMPE (id self, SEL) { return _this (self)->getSupportsMPE() ? YES : NO; }
  303. static NSArray<NSString*>* getMIDIOutputNames (id self, SEL) { return _this (self)->getMIDIOutputNames(); }
  304. //==============================================================================
  305. static AUInternalRenderBlock getInternalRenderBlock (id self, SEL) { return _this (self)->getInternalRenderBlock(); }
  306. static BOOL getCanProcessInPlace (id self, SEL) { return _this (self)->getCanProcessInPlace() ? YES : NO; }
  307. static BOOL getRenderingOffline (id self, SEL) { return _this (self)->getRenderingOffline() ? YES : NO; }
  308. static void setRenderingOffline (id self, SEL, BOOL renderingOffline) { _this (self)->setRenderingOffline (renderingOffline); }
  309. static BOOL allocateRenderResourcesAndReturnError (id self, SEL, NSError** error) { return _this (self)->allocateRenderResourcesAndReturnError (error) ? YES : NO; }
  310. static void deallocateRenderResources (id self, SEL) { _this (self)->deallocateRenderResources(); }
  311. //==============================================================================
  312. static NSString* getContextName (id self, SEL) { return _this (self)->getContextName(); }
  313. static void setContextName (id self, SEL, NSString* str) { return _this (self)->setContextName (str); }
  314. //==============================================================================
  315. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  316. static NSIndexSet* getSupportedViewConfigurations (id self, SEL, NSArray<AUAudioUnitViewConfiguration*>* configs) { return _this (self)->getSupportedViewConfigurations (configs); }
  317. static void selectViewConfiguration (id self, SEL, AUAudioUnitViewConfiguration* config) { _this (self)->selectViewConfiguration (config); }
  318. #endif
  319. };
  320. static JuceAudioUnitv3Base* create (AUAudioUnit*, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**);
  321. //==============================================================================
  322. static Class audioUnitObjCClass;
  323. protected:
  324. AUAudioUnit* au;
  325. };
  326. //==============================================================================
  327. JuceAudioUnitv3Base::Class JuceAudioUnitv3Base::audioUnitObjCClass;
  328. //==============================================================================
  329. //=========================== The actual AudioUnit =============================
  330. //==============================================================================
  331. class JuceAudioUnitv3 : public JuceAudioUnitv3Base,
  332. public AudioProcessorListener,
  333. public AudioPlayHead
  334. {
  335. public:
  336. JuceAudioUnitv3 (const AudioProcessorHolder::Ptr& processor,
  337. const AudioComponentDescription& descr,
  338. AudioComponentInstantiationOptions options,
  339. NSError** error)
  340. : JuceAudioUnitv3Base (descr, options, error),
  341. processorHolder (processor),
  342. mapper (*processorHolder->get())
  343. {
  344. init();
  345. }
  346. JuceAudioUnitv3 (AUAudioUnit* audioUnit, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**)
  347. : JuceAudioUnitv3Base (audioUnit),
  348. processorHolder (new AudioProcessorHolder (createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))),
  349. mapper (*processorHolder->get())
  350. {
  351. init();
  352. }
  353. ~JuceAudioUnitv3()
  354. {
  355. auto& processor = getAudioProcessor();
  356. processor.removeListener (this);
  357. removeEditor (processor);
  358. if (editorObserverToken != nullptr)
  359. {
  360. [paramTree removeParameterObserver: editorObserverToken];
  361. editorObserverToken = nullptr;
  362. }
  363. }
  364. //==============================================================================
  365. void init()
  366. {
  367. AudioProcessor& processor = getAudioProcessor();
  368. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  369. #ifdef JucePlugin_PreferredChannelConfigurations
  370. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  371. const int numConfigs = sizeof (configs) / sizeof (short[2]);
  372. jassert (numConfigs > 0 && (configs[0][0] > 0 || configs[0][1] > 0));
  373. processor.setPlayConfigDetails (configs[0][0], configs[0][1], kDefaultSampleRate, static_cast<int> (maxFrames));
  374. Array<AUChannelInfo> channelInfos;
  375. for (int i = 0; i < numConfigs; ++i)
  376. {
  377. AUChannelInfo channelInfo;
  378. channelInfo.inChannels = configs[i][0];
  379. channelInfo.outChannels = configs[i][1];
  380. channelInfos.add (channelInfo);
  381. }
  382. #else
  383. Array<AUChannelInfo> channelInfos = AudioUnitHelpers::getAUChannelInfo (processor);
  384. #endif
  385. processor.setPlayHead (this);
  386. totalInChannels = processor.getTotalNumInputChannels();
  387. totalOutChannels = processor.getTotalNumOutputChannels();
  388. {
  389. channelCapabilities = [[NSMutableArray<NSNumber*> alloc] init];
  390. for (int i = 0; i < channelInfos.size(); ++i)
  391. {
  392. AUChannelInfo& info = channelInfos.getReference (i);
  393. [channelCapabilities addObject: [NSNumber numberWithInteger: info.inChannels]];
  394. [channelCapabilities addObject: [NSNumber numberWithInteger: info.outChannels]];
  395. }
  396. }
  397. editorObserverToken = nullptr;
  398. internalRenderBlock = CreateObjCBlock (this, &JuceAudioUnitv3::renderCallback);
  399. processor.setRateAndBufferSizeDetails (kDefaultSampleRate, static_cast<int> (maxFrames));
  400. processor.prepareToPlay (kDefaultSampleRate, static_cast<int> (maxFrames));
  401. processor.addListener (this);
  402. addParameters();
  403. addPresets();
  404. addAudioUnitBusses (true);
  405. addAudioUnitBusses (false);
  406. }
  407. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  408. //==============================================================================
  409. void reset() override
  410. {
  411. midiMessages.clear();
  412. lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
  413. }
  414. //==============================================================================
  415. AUAudioUnitPreset* getCurrentPreset() override
  416. {
  417. const int n = static_cast<int> ([factoryPresets count]);
  418. const int idx = static_cast<int> (getAudioProcessor().getCurrentProgram());
  419. if (idx < n)
  420. return [factoryPresets objectAtIndex:static_cast<unsigned int> (idx)];
  421. return nullptr;
  422. }
  423. void setCurrentPreset(AUAudioUnitPreset* preset) override
  424. {
  425. const int n = static_cast<int> ([factoryPresets count]);
  426. const int idx = static_cast<int> ([preset number]);
  427. if (isPositiveAndBelow (idx, n))
  428. getAudioProcessor().setCurrentProgram (idx);
  429. }
  430. NSArray<AUAudioUnitPreset*>* getFactoryPresets() override
  431. {
  432. return factoryPresets;
  433. }
  434. NSDictionary<NSString*, id>* getFullState() override
  435. {
  436. NSMutableDictionary<NSString*, id>* retval = [[NSMutableDictionary<NSString*, id> alloc] init];
  437. {
  438. NSDictionary<NSString*, id>* superRetval = JuceAudioUnitv3Base::getFullState();
  439. if (superRetval != nullptr)
  440. [retval addEntriesFromDictionary:superRetval];
  441. }
  442. juce::MemoryBlock state;
  443. getAudioProcessor().getCurrentProgramStateInformation (state);
  444. if (state.getSize() > 0)
  445. {
  446. NSData* ourState = [[NSData alloc] initWithBytes: state.getData()
  447. length: state.getSize()];
  448. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  449. [retval setObject: ourState
  450. forKey: nsKey];
  451. [nsKey release];
  452. [ourState release];
  453. }
  454. return [retval autorelease];
  455. }
  456. void setFullState (NSDictionary<NSString*, id>* state) override
  457. {
  458. if (state == nullptr)
  459. return;
  460. NSMutableDictionary<NSString*, id>* modifiedState = [[NSMutableDictionary<NSString*, id> alloc] init];
  461. [modifiedState addEntriesFromDictionary: state];
  462. NSString* nsPresetKey = [[NSString alloc] initWithUTF8String: kAUPresetDataKey];
  463. [modifiedState removeObjectForKey: nsPresetKey];
  464. [nsPresetKey release];
  465. JuceAudioUnitv3Base::setFullState (modifiedState);
  466. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  467. NSObject* obj = [modifiedState objectForKey: nsKey];
  468. [nsKey release];
  469. if (obj != nullptr)
  470. {
  471. if ([obj isKindOfClass:[NSData class]])
  472. {
  473. NSData* data = reinterpret_cast<NSData*> (obj);
  474. const int numBytes = static_cast<int> ([data length]);
  475. const juce::uint8* const rawBytes = reinterpret_cast< const juce::uint8* const> ([data bytes]);
  476. if (numBytes > 0)
  477. getAudioProcessor().setCurrentProgramStateInformation (rawBytes, numBytes);
  478. }
  479. }
  480. [modifiedState release];
  481. }
  482. AUParameterTree* getParameterTree() override
  483. {
  484. return paramTree;
  485. }
  486. NSArray<NSNumber*>* parametersForOverviewWithCount (int count) override
  487. {
  488. const int n = static_cast<int> ([overviewParams count]);
  489. if (count >= n)
  490. return overviewParams;
  491. NSMutableArray<NSNumber*>* retval = [[NSMutableArray<NSNumber*>alloc] initWithArray: overviewParams];
  492. [retval removeObjectsInRange: NSMakeRange (static_cast<unsigned int> (count), static_cast<unsigned int> (n - count))];
  493. return [retval autorelease];
  494. }
  495. //==============================================================================
  496. NSTimeInterval getLatency() override
  497. {
  498. auto& p = getAudioProcessor();
  499. return p.getLatencySamples() / p.getSampleRate();
  500. }
  501. NSTimeInterval getTailTime() override
  502. {
  503. return getAudioProcessor().getTailLengthSeconds();
  504. }
  505. //==============================================================================
  506. AUAudioUnitBusArray* getInputBusses() override { return inputBusses; }
  507. AUAudioUnitBusArray* getOutputBusses() override { return outputBusses; }
  508. NSArray<NSNumber*>* getChannelCapabilities() override { return channelCapabilities; }
  509. bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* auBus) override
  510. {
  511. const bool isInput = ([auBus busType] == AUAudioUnitBusTypeInput);
  512. const int busIdx = static_cast<int> ([auBus index]);
  513. const int newNumChannels = static_cast<int> ([format channelCount]);
  514. AudioProcessor& processor = getAudioProcessor();
  515. if (AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx))
  516. {
  517. #ifdef JucePlugin_PreferredChannelConfigurations
  518. ignoreUnused (bus);
  519. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  520. if (! AudioUnitHelpers::isLayoutSupported (processor, isInput, busIdx, newNumChannels, configs))
  521. return false;
  522. #else
  523. const AVAudioChannelLayout* layout = [format channelLayout];
  524. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  525. if (layoutTag != 0)
  526. {
  527. AudioChannelSet newLayout = CoreAudioLayouts::fromCoreAudio (layoutTag);
  528. if (newLayout.size() != newNumChannels)
  529. return false;
  530. if (! bus->isLayoutSupported (newLayout))
  531. return false;
  532. }
  533. else
  534. {
  535. if (! bus->isNumberOfChannelsSupported (newNumChannels))
  536. return false;
  537. }
  538. #endif
  539. return true;
  540. }
  541. return false;
  542. }
  543. //==============================================================================
  544. int getVirtualMIDICableCount() override
  545. {
  546. #if JucePlugin_WantsMidiInput
  547. return 1;
  548. #else
  549. return 0;
  550. #endif
  551. }
  552. bool getSupportsMPE() override
  553. {
  554. return getAudioProcessor().supportsMPE();
  555. }
  556. NSArray<NSString*>* getMIDIOutputNames() override
  557. {
  558. #if JucePlugin_ProducesMidiOutput
  559. return @[@"MIDI Out"];
  560. #else
  561. return @[];
  562. #endif
  563. }
  564. //==============================================================================
  565. AUInternalRenderBlock getInternalRenderBlock() override { return internalRenderBlock; }
  566. bool getRenderingOffline() override { return getAudioProcessor().isNonRealtime(); }
  567. void setRenderingOffline (bool offline) override
  568. {
  569. auto& processor = getAudioProcessor();
  570. auto isCurrentlyNonRealtime = processor.isNonRealtime();
  571. if (isCurrentlyNonRealtime != offline)
  572. {
  573. ScopedLock callbackLock (processor.getCallbackLock());
  574. processor.setNonRealtime (offline);
  575. processor.prepareToPlay (processor.getSampleRate(), processor.getBlockSize());
  576. }
  577. }
  578. //==============================================================================
  579. NSString* getContextName() const override { return juceStringToNS (contextName); }
  580. void setContextName (NSString* str) override
  581. {
  582. if (str != nullptr)
  583. {
  584. AudioProcessor::TrackProperties props;
  585. props.name = nsStringToJuce (str);
  586. getAudioProcessor().updateTrackProperties (props);
  587. }
  588. }
  589. //==============================================================================
  590. bool allocateRenderResourcesAndReturnError (NSError **outError) override
  591. {
  592. AudioProcessor& processor = getAudioProcessor();
  593. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  594. if (! JuceAudioUnitv3Base::allocateRenderResourcesAndReturnError (outError))
  595. return false;
  596. if (outError != nullptr)
  597. *outError = nullptr;
  598. AudioProcessor::BusesLayout layouts;
  599. for (int dir = 0; dir < 2; ++dir)
  600. {
  601. const bool isInput = (dir == 0);
  602. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  603. Array<AudioChannelSet>& channelSets = (isInput ? layouts.inputBuses : layouts.outputBuses);
  604. AUAudioUnitBusArray* auBuses = (isInput ? [getAudioUnit() inputBusses] : [getAudioUnit() outputBusses]);
  605. jassert ([auBuses count] == static_cast<NSUInteger> (n));
  606. for (int busIdx = 0; busIdx < n; ++busIdx)
  607. {
  608. AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx);
  609. AVAudioFormat* format = [[auBuses objectAtIndexedSubscript:static_cast<NSUInteger> (busIdx)] format];
  610. AudioChannelSet newLayout;
  611. const AVAudioChannelLayout* layout = [format channelLayout];
  612. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  613. if (layoutTag != 0)
  614. newLayout = CoreAudioLayouts::fromCoreAudio (layoutTag);
  615. else
  616. newLayout = bus->supportedLayoutWithChannels (static_cast<int> ([format channelCount]));
  617. if (newLayout.isDisabled())
  618. return false;
  619. channelSets.add (newLayout);
  620. }
  621. }
  622. #ifdef JucePlugin_PreferredChannelConfigurations
  623. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  624. if (! AudioProcessor::containsLayout (layouts, configs))
  625. {
  626. if (outError != nullptr)
  627. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  628. return false;
  629. }
  630. #endif
  631. if (! AudioUnitHelpers::setBusesLayout (&getAudioProcessor(), layouts))
  632. {
  633. if (outError != nullptr)
  634. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  635. return false;
  636. }
  637. totalInChannels = processor.getTotalNumInputChannels();
  638. totalOutChannels = processor.getTotalNumOutputChannels();
  639. allocateBusBuffer (true);
  640. allocateBusBuffer (false);
  641. mapper.alloc();
  642. audioBuffer.prepare (totalInChannels, totalOutChannels, static_cast<int> (maxFrames));
  643. double sampleRate = (jmax (AudioUnitHelpers::getBusCount (&processor, true), AudioUnitHelpers::getBusCount (&processor, false)) > 0 ?
  644. [[[([inputBusses count] > 0 ? inputBusses : outputBusses) objectAtIndexedSubscript: 0] format] sampleRate] : 44100.0);
  645. processor.setRateAndBufferSizeDetails (sampleRate, static_cast<int> (maxFrames));
  646. processor.prepareToPlay (sampleRate, static_cast<int> (maxFrames));
  647. zeromem (&lastAudioHead, sizeof (lastAudioHead));
  648. hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
  649. hostTransportStateCallback = [getAudioUnit() transportStateBlock];
  650. reset();
  651. return true;
  652. }
  653. void deallocateRenderResources() override
  654. {
  655. hostMusicalContextCallback = nullptr;
  656. hostTransportStateCallback = nullptr;
  657. getAudioProcessor().releaseResources();
  658. audioBuffer.release();
  659. inBusBuffers. clear();
  660. outBusBuffers.clear();
  661. mapper.release();
  662. JuceAudioUnitv3Base::deallocateRenderResources();
  663. }
  664. //==============================================================================
  665. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  666. NSIndexSet* getSupportedViewConfigurations (NSArray<AUAudioUnitViewConfiguration*>* configs) override
  667. {
  668. auto supportedViewIndecies = [[NSMutableIndexSet alloc] init];
  669. auto n = [configs count];
  670. if (auto* editor = getAudioProcessor().createEditorIfNeeded())
  671. {
  672. // If you hit this assertion then your plug-in's editor is reporting that it doesn't support
  673. // any host MIDI controller configurations!
  674. jassert (editor->supportsHostMIDIControllerPresence (true) || editor->supportsHostMIDIControllerPresence (false));
  675. for (auto i = 0u; i < n; ++i)
  676. {
  677. if (auto* viewConfiguration = [configs objectAtIndex:i])
  678. {
  679. if (editor->supportsHostMIDIControllerPresence ([viewConfiguration hostHasController] == YES))
  680. {
  681. auto* constrainer = editor->getConstrainer();
  682. auto height = (int) [viewConfiguration height];
  683. auto width = (int) [viewConfiguration width];
  684. if (height <= constrainer->getMaximumHeight() && height >= constrainer->getMinimumHeight()
  685. && width <= constrainer->getMaximumWidth() && width >= constrainer->getMinimumWidth())
  686. [supportedViewIndecies addIndex: i];
  687. }
  688. }
  689. }
  690. }
  691. return [supportedViewIndecies autorelease];
  692. }
  693. void selectViewConfiguration (AUAudioUnitViewConfiguration* config) override
  694. {
  695. processorHolder->viewConfiguration = new AudioProcessorHolder::ViewConfig { [config width], [config height], [config hostHasController] == YES };
  696. }
  697. #endif
  698. //==============================================================================
  699. void audioProcessorChanged (AudioProcessor* processor) override
  700. {
  701. ignoreUnused (processor);
  702. [au willChangeValueForKey: @"allParameterValues"];
  703. addPresets();
  704. [au didChangeValueForKey: @"allParameterValues"];
  705. }
  706. void audioProcessorParameterChanged (AudioProcessor*, int idx, float newValue) override
  707. {
  708. if (isPositiveAndBelow (idx, getAudioProcessor().getNumParameters()))
  709. {
  710. if (AUParameter* param = [paramTree parameterWithAddress: getAUParameterAddressForIndex (idx)])
  711. {
  712. if (editorObserverToken != nullptr)
  713. [param setValue: newValue originator: editorObserverToken];
  714. else
  715. [param setValue: newValue];
  716. }
  717. }
  718. }
  719. //==============================================================================
  720. bool getCurrentPosition (CurrentPositionInfo& info) override
  721. {
  722. bool musicContextCallSucceeded = false;
  723. bool transportStateCallSucceeded = false;
  724. info = lastAudioHead;
  725. info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
  726. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  727. switch (lastTimeStamp.mSMPTETime.mType)
  728. {
  729. case kSMPTETimeType2398: info.frameRate = AudioPlayHead::fps23976; break;
  730. case kSMPTETimeType24: info.frameRate = AudioPlayHead::fps24; break;
  731. case kSMPTETimeType25: info.frameRate = AudioPlayHead::fps25; break;
  732. case kSMPTETimeType2997: info.frameRate = AudioPlayHead::fps2997; break;
  733. case kSMPTETimeType2997Drop: info.frameRate = AudioPlayHead::fps2997drop; break;
  734. case kSMPTETimeType30Drop: info.frameRate = AudioPlayHead::fps30drop; break;
  735. case kSMPTETimeType30: info.frameRate = AudioPlayHead::fps30; break;
  736. case kSMPTETimeType60Drop: info.frameRate = AudioPlayHead::fps60drop; break;
  737. case kSMPTETimeType60: info.frameRate = AudioPlayHead::fps60; break;
  738. default: info.frameRate = AudioPlayHead::fpsUnknown; break;
  739. }
  740. double num;
  741. NSInteger den;
  742. NSInteger outDeltaSampleOffsetToNextBeat;
  743. double outCurrentMeasureDownBeat, bpm;
  744. double ppqPosition;
  745. if (hostMusicalContextCallback != nullptr)
  746. {
  747. AUHostMusicalContextBlock musicalContextCallback = hostMusicalContextCallback;
  748. if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
  749. {
  750. musicContextCallSucceeded = true;
  751. info.timeSigNumerator = (int) num;
  752. info.timeSigDenominator = (int) den;
  753. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  754. info.bpm = bpm;
  755. info.ppqPosition = ppqPosition;
  756. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  757. }
  758. }
  759. double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
  760. AUHostTransportStateFlags flags;
  761. if (hostTransportStateCallback != nullptr)
  762. {
  763. AUHostTransportStateBlock transportStateCallback = hostTransportStateCallback;
  764. if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
  765. {
  766. transportStateCallSucceeded = true;
  767. info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
  768. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  769. info.isPlaying = ((flags & AUHostTransportStateMoving) != 0);
  770. info.isLooping = ((flags & AUHostTransportStateCycling) != 0);
  771. info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
  772. info.ppqLoopStart = outCycleStartBeat;
  773. info.ppqLoopEnd = outCycleEndBeat;
  774. }
  775. }
  776. if (musicContextCallSucceeded && transportStateCallSucceeded)
  777. lastAudioHead = info;
  778. return true;
  779. }
  780. //==============================================================================
  781. static void removeEditor (AudioProcessor& processor)
  782. {
  783. ScopedLock editorLock (processor.getCallbackLock());
  784. if (AudioProcessorEditor* editor = processor.getActiveEditor())
  785. {
  786. processor.editorBeingDeleted (editor);
  787. delete editor;
  788. }
  789. }
  790. private:
  791. //==============================================================================
  792. struct BusBuffer
  793. {
  794. BusBuffer (AUAudioUnitBus* bus, int maxFramesPerBuffer)
  795. : auBus (bus),
  796. maxFrames (maxFramesPerBuffer),
  797. numberOfChannels (static_cast<int> ([[auBus format] channelCount])),
  798. isInterleaved ([[auBus format] isInterleaved])
  799. {
  800. alloc();
  801. }
  802. //==============================================================================
  803. void alloc()
  804. {
  805. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  806. int bytes = static_cast<int> (sizeof (AudioBufferList))
  807. + ((numBuffers - 1) * static_cast<int> (sizeof (::AudioBuffer)));
  808. jassert (bytes > 0);
  809. bufferListStorage.calloc (static_cast<size_t> (bytes));
  810. bufferList = reinterpret_cast<AudioBufferList*> (bufferListStorage.getData());
  811. const int bufferChannels = isInterleaved ? numberOfChannels : 1;
  812. scratchBuffer.setSize (numBuffers, bufferChannels * maxFrames);
  813. }
  814. void dealloc()
  815. {
  816. bufferList = nullptr;
  817. bufferListStorage.free();
  818. scratchBuffer.setSize (0, 0);
  819. }
  820. //==============================================================================
  821. int numChannels() const noexcept { return numberOfChannels; }
  822. bool interleaved() const noexcept { return isInterleaved; }
  823. AudioBufferList* get() const noexcept { return bufferList; }
  824. //==============================================================================
  825. void prepare (UInt32 nFrames, const AudioBufferList* other = nullptr) noexcept
  826. {
  827. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  828. const bool isCompatible = isCompatibleWith (other);
  829. bufferList->mNumberBuffers = static_cast<UInt32> (numBuffers);
  830. for (int i = 0; i < numBuffers; ++i)
  831. {
  832. const UInt32 bufferChannels = static_cast<UInt32> (isInterleaved ? numberOfChannels : 1);
  833. bufferList->mBuffers[i].mNumberChannels = bufferChannels;
  834. bufferList->mBuffers[i].mData = (isCompatible ? other->mBuffers[i].mData
  835. : scratchBuffer.getWritePointer (i));
  836. bufferList->mBuffers[i].mDataByteSize = nFrames * bufferChannels * sizeof (float);
  837. }
  838. }
  839. //==============================================================================
  840. bool isCompatibleWith (const AudioBufferList* other) const noexcept
  841. {
  842. if (other == nullptr)
  843. return false;
  844. if (other->mNumberBuffers > 0)
  845. {
  846. const bool otherInterleaved = AudioUnitHelpers::isAudioBufferInterleaved (*other);
  847. const int otherChannels = static_cast<int> (otherInterleaved ? other->mBuffers[0].mNumberChannels
  848. : other->mNumberBuffers);
  849. return otherInterleaved == isInterleaved
  850. && numberOfChannels == otherChannels;
  851. }
  852. return numberOfChannels == 0;
  853. }
  854. private:
  855. AUAudioUnitBus* auBus;
  856. HeapBlock<char> bufferListStorage;
  857. AudioBufferList* bufferList = nullptr;
  858. int maxFrames, numberOfChannels;
  859. bool isInterleaved;
  860. AudioBuffer<float> scratchBuffer;
  861. };
  862. //==============================================================================
  863. void addAudioUnitBusses (bool isInput)
  864. {
  865. ScopedPointer<NSMutableArray<AUAudioUnitBus*>> array = [[NSMutableArray<AUAudioUnitBus*> alloc] init];
  866. AudioProcessor& processor = getAudioProcessor();
  867. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  868. for (int i = 0; i < n; ++i)
  869. {
  870. ScopedPointer<AUAudioUnitBus> audioUnitBus;
  871. {
  872. ScopedPointer<AVAudioFormat> defaultFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate: kDefaultSampleRate
  873. channels: static_cast<AVAudioChannelCount> (processor.getChannelCountOfBus (isInput, i))];
  874. audioUnitBus = [[AUAudioUnitBus alloc] initWithFormat: defaultFormat
  875. error: nullptr];
  876. }
  877. [array addObject: audioUnitBus];
  878. }
  879. (isInput ? inputBusses : outputBusses) = [[AUAudioUnitBusArray alloc] initWithAudioUnit: au
  880. busType: (isInput ? AUAudioUnitBusTypeInput : AUAudioUnitBusTypeOutput)
  881. busses: array];
  882. }
  883. // When parameters are discrete we need to use integer values.
  884. float getMaximumParameterValue (int parameterIndex)
  885. {
  886. #if JUCE_FORCE_LEGACY_PARAMETER_AUTOMATION_TYPE
  887. ignoreUnused (parameterIndex);
  888. return 1.0f;
  889. #else
  890. auto& processor = getAudioProcessor();
  891. return processor.isParameterDiscrete (parameterIndex) ? (float) (processor.getParameterNumSteps (parameterIndex) - 1) : 1.0f;
  892. #endif
  893. }
  894. void addParameters()
  895. {
  896. ScopedPointer<NSMutableArray<AUParameterNode*>> params = [[NSMutableArray<AUParameterNode*> alloc] init];
  897. overviewParams = [[NSMutableArray<NSNumber*> alloc] init];
  898. auto& processor = getAudioProcessor();
  899. const int n = processor.getNumParameters();
  900. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  901. // check if all parameters are managed?
  902. usingManagedParameter = (processor.getParameters().size() == processor.getNumParameters());
  903. #endif
  904. for (int idx = 0; idx < n; ++idx)
  905. {
  906. const String identifier (idx);
  907. const String name = processor.getParameterName (idx);
  908. AudioUnitParameterUnit unit = kAudioUnitParameterUnit_Generic;
  909. AudioUnitParameterOptions flags = (UInt32) (kAudioUnitParameterFlag_IsWritable
  910. | kAudioUnitParameterFlag_IsReadable
  911. | kAudioUnitParameterFlag_HasCFNameString
  912. | kAudioUnitParameterFlag_ValuesHaveStrings);
  913. #if ! JUCE_FORCE_LEGACY_PARAMETER_AUTOMATION_TYPE
  914. flags |= (UInt32) kAudioUnitParameterFlag_IsHighResolution;
  915. #endif
  916. // set whether the param is automatable (unnamed parameters aren't allowed to be automated)
  917. if (name.isEmpty() || ! processor.isParameterAutomatable (idx))
  918. flags |= kAudioUnitParameterFlag_NonRealTime;
  919. const bool isParameterDiscrete = processor.isParameterDiscrete (idx);
  920. if (! isParameterDiscrete)
  921. flags |= kAudioUnitParameterFlag_CanRamp;
  922. if (processor.isMetaParameter (idx))
  923. flags |= kAudioUnitParameterFlag_IsGlobalMeta;
  924. auto deleter = [](NSMutableArray* arr) { [arr release]; };
  925. std::unique_ptr<NSMutableArray, decltype (deleter)> valueStrings (nullptr, deleter);
  926. // is this a meter?
  927. if (((processor.getParameterCategory (idx) & 0xffff0000) >> 16) == 2)
  928. {
  929. flags &= ~kAudioUnitParameterFlag_IsWritable;
  930. flags |= kAudioUnitParameterFlag_MeterReadOnly | kAudioUnitParameterFlag_DisplayLogarithmic;
  931. unit = kAudioUnitParameterUnit_LinearGain;
  932. }
  933. else
  934. {
  935. #if ! JUCE_FORCE_LEGACY_PARAMETER_AUTOMATION_TYPE
  936. if (auto* param = processor.getParameters()[idx])
  937. {
  938. if (param->isDiscrete())
  939. {
  940. unit = param->isBoolean() ? kAudioUnitParameterUnit_Boolean : kAudioUnitParameterUnit_Indexed;
  941. auto maxValue = getMaximumParameterValue (idx);
  942. auto numSteps = param->getNumSteps();
  943. // Some hosts can't handle the huge numbers of discrete parameter values created when
  944. // using the default number of steps.
  945. jassert (numSteps != AudioProcessor::getDefaultNumParameterSteps());
  946. valueStrings.reset ([NSMutableArray new]);
  947. for (int i = 0; i < numSteps; ++i)
  948. [valueStrings.get() addObject: juceStringToNS (param->getText ((float) i / maxValue, 0))];
  949. }
  950. }
  951. #endif
  952. }
  953. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  954. AUParameterAddress address = static_cast<AUParameterAddress> (idx);
  955. #else
  956. AUParameterAddress address = generateAUParameterAddressForIndex (idx);
  957. // Consider yourself very unlucky if you hit this assertion. The hash codes of your
  958. // parameter ids are not unique.
  959. jassert (! paramMap.contains (static_cast<int64> (address)));
  960. paramAddresses.add (address);
  961. paramMap.set (static_cast<int64> (address), idx);
  962. #endif
  963. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  964. ScopedPointer<AUParameter> param = [[AUParameterTree createParameterWithIdentifier: juceStringToNS (identifier)
  965. name: juceStringToNS (name)
  966. address: address
  967. min: 0.0f
  968. max: getMaximumParameterValue (idx)
  969. unit: unit
  970. unitName: nullptr
  971. flags: flags
  972. valueStrings: valueStrings.get()
  973. dependentParameters: nullptr] retain];
  974. [param.get() setValue: processor.getParameterDefaultValue (idx)];
  975. [params addObject: param];
  976. [overviewParams addObject: [NSNumber numberWithUnsignedLongLong:address]];
  977. }
  978. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  979. paramTree = [[AUParameterTree createTreeWithChildren: params] retain];
  980. paramObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedFromHost);
  981. paramProvider = CreateObjCBlock (this, &JuceAudioUnitv3::getValue);
  982. stringFromValueProvider = CreateObjCBlock (this, &JuceAudioUnitv3::stringFromValue);
  983. valueFromStringProvider = CreateObjCBlock (this, &JuceAudioUnitv3::valueFromString);
  984. [paramTree setImplementorValueObserver: paramObserver];
  985. [paramTree setImplementorValueProvider: paramProvider];
  986. [paramTree setImplementorStringFromValueCallback: stringFromValueProvider];
  987. [paramTree setImplementorValueFromStringCallback: valueFromStringProvider];
  988. if (processor.hasEditor())
  989. {
  990. editorParamObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedForObserver);
  991. editorObserverToken = [paramTree tokenByAddingParameterObserver: editorParamObserver];
  992. }
  993. }
  994. void setAudioProcessorParameter (int index, float value)
  995. {
  996. if (auto* param = getAudioProcessor().getParameters()[index])
  997. {
  998. param->setValue (value);
  999. param->sendValueChangedMessageToListeners (value);
  1000. }
  1001. else if (isPositiveAndBelow (index, getAudioProcessor().getNumParameters()))
  1002. {
  1003. getAudioProcessor().setParameter (index, value);
  1004. }
  1005. }
  1006. void addPresets()
  1007. {
  1008. factoryPresets = [[NSMutableArray<AUAudioUnitPreset*> alloc] init];
  1009. const int n = getAudioProcessor().getNumPrograms();
  1010. for (int idx = 0; idx < n; ++idx)
  1011. {
  1012. String name = getAudioProcessor().getProgramName (idx);
  1013. ScopedPointer<AUAudioUnitPreset> preset = [[AUAudioUnitPreset alloc] init];
  1014. [preset setName: juceStringToNS (name)];
  1015. [preset setNumber: static_cast<NSInteger> (idx)];
  1016. [factoryPresets addObject: preset];
  1017. }
  1018. }
  1019. //==============================================================================
  1020. void allocateBusBuffer (bool isInput)
  1021. {
  1022. OwnedArray<BusBuffer>& busBuffers = isInput ? inBusBuffers : outBusBuffers;
  1023. busBuffers.clear();
  1024. const int n = AudioUnitHelpers::getBusCount (&getAudioProcessor(), isInput);
  1025. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  1026. for (int busIdx = 0; busIdx < n; ++busIdx)
  1027. busBuffers.add (new BusBuffer ([(isInput ? inputBusses : outputBusses) objectAtIndexedSubscript: static_cast<unsigned int> (busIdx)],
  1028. static_cast<int> (maxFrames)));
  1029. }
  1030. //==============================================================================
  1031. void processEvents (const AURenderEvent *__nullable realtimeEventListHead, int numParams, AUEventSampleTime startTime)
  1032. {
  1033. for (const AURenderEvent* event = realtimeEventListHead; event != nullptr; event = event->head.next)
  1034. {
  1035. switch (event->head.eventType)
  1036. {
  1037. case AURenderEventMIDI:
  1038. {
  1039. const AUMIDIEvent& midiEvent = event->MIDI;
  1040. midiMessages.addEvent (midiEvent.data, midiEvent.length, static_cast<int> (midiEvent.eventSampleTime - startTime));
  1041. }
  1042. break;
  1043. case AURenderEventParameter:
  1044. case AURenderEventParameterRamp:
  1045. {
  1046. const AUParameterEvent& paramEvent = event->parameter;
  1047. const int idx = getJuceParameterIndexForAUAddress (paramEvent.parameterAddress);
  1048. setAudioProcessorParameter (idx, paramEvent.value);
  1049. }
  1050. break;
  1051. default:
  1052. break;
  1053. }
  1054. }
  1055. }
  1056. AUAudioUnitStatus renderCallback (AudioUnitRenderActionFlags* actionFlags, const AudioTimeStamp* timestamp, AUAudioFrameCount frameCount,
  1057. NSInteger outputBusNumber, AudioBufferList* outputData, const AURenderEvent *__nullable realtimeEventListHead,
  1058. AURenderPullInputBlock __nullable pullInputBlock)
  1059. {
  1060. auto& processor = getAudioProcessor();
  1061. jassert (static_cast<int> (frameCount) <= getAudioProcessor().getBlockSize());
  1062. // process params
  1063. const int numParams = processor.getNumParameters();
  1064. processEvents (realtimeEventListHead, numParams, static_cast<AUEventSampleTime> (timestamp->mSampleTime));
  1065. if (lastTimeStamp.mSampleTime != timestamp->mSampleTime)
  1066. {
  1067. lastTimeStamp = *timestamp;
  1068. const int numInputBuses = inBusBuffers. size();
  1069. const int numOutputBuses = outBusBuffers.size();
  1070. // prepare buffers
  1071. {
  1072. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  1073. {
  1074. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  1075. const bool canUseDirectOutput =
  1076. (busIdx == outputBusNumber && outputData != nullptr && outputData->mNumberBuffers > 0);
  1077. busBuffer.prepare (frameCount, canUseDirectOutput ? outputData : nullptr);
  1078. }
  1079. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1080. {
  1081. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1082. busBuffer.prepare (frameCount, busIdx < numOutputBuses ? outBusBuffers[busIdx]->get() : nullptr);
  1083. }
  1084. audioBuffer.reset();
  1085. }
  1086. // pull inputs
  1087. {
  1088. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1089. {
  1090. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1091. AudioBufferList* buffer = busBuffer.get();
  1092. if (pullInputBlock == nullptr || pullInputBlock (actionFlags, timestamp, frameCount, busIdx, buffer) != noErr)
  1093. AudioUnitHelpers::clearAudioBuffer (*buffer);
  1094. if (actionFlags != nullptr && (*actionFlags & kAudioUnitRenderAction_OutputIsSilence) != 0)
  1095. AudioUnitHelpers::clearAudioBuffer (*buffer);
  1096. }
  1097. }
  1098. // set buffer pointer to minimize copying
  1099. {
  1100. int chIdx = 0;
  1101. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  1102. {
  1103. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  1104. AudioBufferList* buffer = busBuffer.get();
  1105. const bool interleaved = busBuffer.interleaved();
  1106. const int numChannels = busBuffer.numChannels();
  1107. const int* outLayoutMap = mapper.get (false, busIdx);
  1108. for (int ch = 0; ch < numChannels; ++ch)
  1109. audioBuffer.setBuffer (chIdx++, interleaved ? nullptr : static_cast<float*> (buffer->mBuffers[outLayoutMap[ch]].mData));
  1110. }
  1111. // use input pointers on remaining channels
  1112. for (int busIdx = 0; chIdx < totalInChannels;)
  1113. {
  1114. const int channelOffset = processor.getOffsetInBusBufferForAbsoluteChannelIndex (true, chIdx, busIdx);
  1115. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1116. AudioBufferList* buffer = busBuffer.get();
  1117. const int* inLayoutMap = mapper.get (true, busIdx);
  1118. audioBuffer.setBuffer (chIdx++, busBuffer.interleaved() ? nullptr : static_cast<float*> (buffer->mBuffers[inLayoutMap[channelOffset]].mData));
  1119. }
  1120. }
  1121. // copy input
  1122. {
  1123. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1124. audioBuffer.push (*inBusBuffers[busIdx]->get(), mapper.get (true, busIdx));
  1125. // clear remaining channels
  1126. for (int i = totalInChannels; i < totalOutChannels; ++i)
  1127. zeromem (audioBuffer.push(), sizeof (float) * frameCount);
  1128. }
  1129. // process audio
  1130. processBlock (audioBuffer.getBuffer (frameCount), midiMessages);
  1131. // send MIDI
  1132. #if JucePlugin_ProducesMidiOutput && JUCE_AUV3_MIDI_OUTPUT_SUPPORTED
  1133. auto midiOut = [au MIDIOutputEventBlock];
  1134. MidiMessage msg;
  1135. int samplePosition;
  1136. for (MidiBuffer::Iterator it (midiMessages); it.getNextEvent (msg, samplePosition);)
  1137. midiOut (samplePosition, 0, msg.getRawDataSize(), msg.getRawData());
  1138. #endif
  1139. midiMessages.clear();
  1140. }
  1141. // copy back
  1142. audioBuffer.pop (*outBusBuffers[(int) outputBusNumber]->get(),
  1143. mapper.get (false, (int) outputBusNumber));
  1144. return noErr;
  1145. }
  1146. void processBlock (AudioBuffer<float>& buffer, MidiBuffer& midiBuffer) noexcept
  1147. {
  1148. auto& processor = getAudioProcessor();
  1149. const ScopedLock sl (processor.getCallbackLock());
  1150. if (processor.isSuspended())
  1151. buffer.clear();
  1152. else if ([au shouldBypassEffect])
  1153. processor.processBlockBypassed (buffer, midiBuffer);
  1154. else
  1155. processor.processBlock (buffer, midiBuffer);
  1156. }
  1157. //==============================================================================
  1158. void valueChangedFromHost (AUParameter* param, AUValue value)
  1159. {
  1160. if (param != nullptr)
  1161. {
  1162. int idx = getJuceParameterIndexForAUAddress ([param address]);
  1163. auto normalisedValue = value / getMaximumParameterValue (idx);
  1164. setAudioProcessorParameter (idx, normalisedValue);
  1165. }
  1166. }
  1167. AUValue getValue (AUParameter* param)
  1168. {
  1169. if (param != nullptr)
  1170. {
  1171. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  1172. auto& processor = getAudioProcessor();
  1173. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  1174. return processor.getParameter (idx) * getMaximumParameterValue (idx);
  1175. }
  1176. return 0;
  1177. }
  1178. void valueChangedForObserver(AUParameterAddress, AUValue)
  1179. {
  1180. // this will have already been handled by valueChangedFromHost
  1181. }
  1182. NSString* stringFromValue (AUParameter* param, const AUValue* value)
  1183. {
  1184. String text;
  1185. if (param != nullptr && value != nullptr)
  1186. {
  1187. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  1188. auto& processor = getAudioProcessor();
  1189. if (auto* p = processor.getParameters()[idx])
  1190. text = p->getText (*value / getMaximumParameterValue (idx), 0);
  1191. else
  1192. text = String (*value);
  1193. }
  1194. return juceStringToNS (text);
  1195. }
  1196. AUValue valueFromString (AUParameter* param, NSString* str)
  1197. {
  1198. if (param != nullptr && str != nullptr)
  1199. {
  1200. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  1201. auto& processor = getAudioProcessor();
  1202. const String text (nsStringToJuce (str));
  1203. if (auto* p = processor.getParameters()[idx])
  1204. return p->getValueForText (text) * getMaximumParameterValue (idx);
  1205. else
  1206. return text.getFloatValue();
  1207. }
  1208. return 0;
  1209. }
  1210. //==============================================================================
  1211. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1212. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept { return static_cast<AUParameterAddress> (paramIndex); }
  1213. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept { return static_cast<int> (address); }
  1214. #else
  1215. AUParameterAddress generateAUParameterAddressForIndex (int paramIndex) const
  1216. {
  1217. auto& processor = getAudioProcessor();
  1218. const int n = processor.getNumParameters();
  1219. if (isPositiveAndBelow (paramIndex, n))
  1220. {
  1221. const String& juceParamID = processor.getParameterID (paramIndex);
  1222. return usingManagedParameter ? static_cast<AUParameterAddress> (juceParamID.hashCode64())
  1223. : static_cast<AUParameterAddress> (juceParamID.getIntValue());
  1224. }
  1225. return static_cast<AUParameterAddress> (-1);
  1226. }
  1227. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept
  1228. {
  1229. return usingManagedParameter ? paramAddresses.getReference (paramIndex)
  1230. : static_cast<AUParameterAddress> (paramIndex);
  1231. }
  1232. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept
  1233. {
  1234. return usingManagedParameter ? paramMap[static_cast<int64> (address)]
  1235. : static_cast<int> (address);
  1236. }
  1237. #endif
  1238. //==============================================================================
  1239. static const double kDefaultSampleRate;
  1240. AudioProcessorHolder::Ptr processorHolder;
  1241. int totalInChannels, totalOutChannels;
  1242. ScopedPointer<AUAudioUnitBusArray> inputBusses;
  1243. ScopedPointer<AUAudioUnitBusArray> outputBusses;
  1244. ObjCBlock<AUImplementorValueObserver> paramObserver;
  1245. ObjCBlock<AUImplementorValueProvider> paramProvider;
  1246. ObjCBlock<AUImplementorStringFromValueCallback> stringFromValueProvider;
  1247. ObjCBlock<AUImplementorValueFromStringCallback> valueFromStringProvider;
  1248. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1249. bool usingManagedParameter;
  1250. Array<AUParameterAddress> paramAddresses;
  1251. HashMap<int64, int> paramMap;
  1252. #endif
  1253. // to avoid recursion on parameter changes, we need to add an
  1254. // editor observer to do the parameter changes
  1255. ObjCBlock<AUParameterObserver> editorParamObserver;
  1256. AUParameterObserverToken editorObserverToken;
  1257. ScopedPointer<AUParameterTree> paramTree;
  1258. ScopedPointer<NSMutableArray<NSNumber*>> overviewParams;
  1259. ScopedPointer<NSMutableArray<NSNumber*>> channelCapabilities;
  1260. ScopedPointer<NSMutableArray<AUAudioUnitPreset*>> factoryPresets;
  1261. ObjCBlock<AUInternalRenderBlock> internalRenderBlock;
  1262. AudioUnitHelpers::CoreAudioBufferList audioBuffer;
  1263. AudioUnitHelpers::ChannelRemapper mapper;
  1264. OwnedArray<BusBuffer> inBusBuffers, outBusBuffers;
  1265. MidiBuffer midiMessages;
  1266. ObjCBlock<AUHostMusicalContextBlock> hostMusicalContextCallback;
  1267. ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;
  1268. AudioTimeStamp lastTimeStamp;
  1269. CurrentPositionInfo lastAudioHead;
  1270. String contextName;
  1271. };
  1272. const double JuceAudioUnitv3::kDefaultSampleRate = 44100.0;
  1273. JuceAudioUnitv3Base* JuceAudioUnitv3Base::create (AUAudioUnit* audioUnit, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  1274. {
  1275. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1276. return new JuceAudioUnitv3 (audioUnit, descr, options, error);
  1277. }
  1278. //==============================================================================
  1279. class JuceAUViewController
  1280. {
  1281. public:
  1282. JuceAUViewController (AUViewController<AUAudioUnitFactory>* p)
  1283. : myself (p)
  1284. {
  1285. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1286. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1287. initialiseJuce_GUI();
  1288. }
  1289. ~JuceAUViewController()
  1290. {
  1291. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1292. if (processorHolder != nullptr)
  1293. JuceAudioUnitv3::removeEditor (getAudioProcessor());
  1294. }
  1295. //==============================================================================
  1296. void loadView()
  1297. {
  1298. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1299. if (AudioProcessor* p = createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))
  1300. {
  1301. processorHolder = new AudioProcessorHolder (p);
  1302. auto& processor = getAudioProcessor();
  1303. if (processor.hasEditor())
  1304. {
  1305. if (AudioProcessorEditor* editor = processor.createEditorIfNeeded())
  1306. {
  1307. preferredSize = editor->getBounds();
  1308. JUCE_IOS_MAC_VIEW* view = [[[JUCE_IOS_MAC_VIEW alloc] initWithFrame: convertToCGRect (editor->getBounds())] autorelease];
  1309. [myself setView: view];
  1310. editor->setVisible (true);
  1311. editor->addToDesktop (0, view);
  1312. }
  1313. }
  1314. }
  1315. }
  1316. void viewDidLayoutSubviews()
  1317. {
  1318. if (processorHolder != nullptr && [myself view] != nullptr)
  1319. {
  1320. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  1321. {
  1322. if (processorHolder->viewConfiguration != nullptr)
  1323. editor->hostMIDIControllerIsAvailable (processorHolder->viewConfiguration->hostHasMIDIController);
  1324. editor->setBounds (convertToRectInt ([[myself view] bounds]));
  1325. if (JUCE_IOS_MAC_VIEW* peerView = [[[myself view] subviews] objectAtIndex: 0])
  1326. {
  1327. #if JUCE_IOS
  1328. [peerView setNeedsDisplay];
  1329. #else
  1330. [peerView setNeedsDisplay: YES];
  1331. #endif
  1332. }
  1333. }
  1334. }
  1335. }
  1336. void didReceiveMemoryWarning()
  1337. {
  1338. if (processorHolder != nullptr)
  1339. if (auto* processor = processorHolder->get())
  1340. processor->memoryWarningReceived();
  1341. }
  1342. CGSize getPreferredContentSize() const
  1343. {
  1344. return CGSizeMake (static_cast<float> (preferredSize.getWidth()),
  1345. static_cast<float> (preferredSize.getHeight()));
  1346. }
  1347. //==============================================================================
  1348. AUAudioUnit* createAudioUnit (const AudioComponentDescription& descr, NSError** error)
  1349. {
  1350. AUAudioUnit* retval = nil;
  1351. if (! MessageManager::getInstance()->isThisTheMessageThread())
  1352. {
  1353. WaitableEvent creationEvent;
  1354. // AUv3 headers say that we may block this thread and that the message thread is guaranteed
  1355. // to be unblocked
  1356. struct AUCreator : public CallbackMessage
  1357. {
  1358. JuceAUViewController& owner;
  1359. AudioComponentDescription pDescr;
  1360. NSError** pError;
  1361. AUAudioUnit*& outAU;
  1362. WaitableEvent& e;
  1363. AUCreator (JuceAUViewController& parent, const AudioComponentDescription& paramDescr, NSError** paramError,
  1364. AUAudioUnit*& outputAU, WaitableEvent& event)
  1365. : owner (parent), pDescr (paramDescr), pError (paramError), outAU (outputAU), e (event)
  1366. {}
  1367. void messageCallback() override
  1368. {
  1369. outAU = owner.createAudioUnitOnMessageThread (pDescr, pError);
  1370. e.signal();
  1371. }
  1372. };
  1373. (new AUCreator (*this, descr, error, retval, creationEvent))->post();
  1374. creationEvent.wait (-1);
  1375. }
  1376. else
  1377. {
  1378. retval = createAudioUnitOnMessageThread (descr, error);
  1379. }
  1380. return [retval autorelease];
  1381. }
  1382. private:
  1383. //==============================================================================
  1384. AUViewController<AUAudioUnitFactory>* myself;
  1385. AudioProcessorHolder::Ptr processorHolder = nullptr;
  1386. Rectangle<int> preferredSize { 1, 1 };
  1387. //==============================================================================
  1388. AUAudioUnit* createAudioUnitOnMessageThread (const AudioComponentDescription& descr, NSError** error)
  1389. {
  1390. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1391. [myself view]; // this will call [view load] and ensure that the AudioProcessor has been instantiated
  1392. if (processorHolder == nullptr)
  1393. return nullptr;
  1394. return (new JuceAudioUnitv3 (processorHolder, descr, 0, error))->getAudioUnit();
  1395. }
  1396. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  1397. };
  1398. //==============================================================================
  1399. // necessary glue code
  1400. @interface JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix) : AUViewController<AUAudioUnitFactory>
  1401. @end
  1402. @implementation JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix)
  1403. {
  1404. ScopedPointer<JuceAUViewController> cpp;
  1405. }
  1406. - (instancetype) initWithNibName: (nullable NSString*) nib bundle: (nullable NSBundle*) bndl { self = [super initWithNibName: nib bundle: bndl]; cpp = new JuceAUViewController (self); return self; }
  1407. - (void) loadView { cpp->loadView(); }
  1408. - (AUAudioUnit *) createAudioUnitWithComponentDescription: (AudioComponentDescription) desc error: (NSError **) error { return cpp->createAudioUnit (desc, error); }
  1409. - (CGSize) preferredContentSize { return cpp->getPreferredContentSize(); }
  1410. - (void) viewDidLayoutSubviews { cpp->viewDidLayoutSubviews(); }
  1411. - (void) didReceiveMemoryWarning { cpp->didReceiveMemoryWarning(); }
  1412. @end
  1413. //==============================================================================
  1414. #if JUCE_IOS
  1415. bool JUCE_CALLTYPE juce_isInterAppAudioConnected() { return false; }
  1416. void JUCE_CALLTYPE juce_switchToHostApplication() {}
  1417. #if JUCE_MODULE_AVAILABLE_juce_gui_basics
  1418. Image JUCE_CALLTYPE juce_getIAAHostIcon (int) { return {}; }
  1419. #endif
  1420. #endif
  1421. #pragma clang diagnostic pop
  1422. #endif