The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1342 lines
60KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. #include "../../juce_core/system/juce_TargetPlatform.h"
  18. #include "../utility/juce_CheckSettingMacros.h"
  19. #if JucePlugin_Build_AUv3
  20. #import <CoreAudioKit/CoreAudioKit.h>
  21. #import <AudioToolbox/AudioToolbox.h>
  22. #import <AVFoundation/AVFoundation.h>
  23. #if JUCE_MAC
  24. #if (! defined MAC_OS_X_VERSION_MIN_REQUIRED) || (! defined MAC_OS_X_VERSION_10_11) || (MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_11)
  25. #error AUv3 needs Deployment Target OS X 10.8 or higher to compile
  26. #endif
  27. #endif
  28. #if JUCE_IOS
  29. #if (! defined __IPHONE_OS_VERSION_MIN_REQUIRED) || (! defined __IPHONE_9_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_9_0)
  30. #error AUv3 needs Deployment Target iOS 9.0 or higher to compile
  31. #endif
  32. #endif
  33. #ifndef __OBJC2__
  34. #error AUv3 needs Objective-C 2 support (compile with 64-bit)
  35. #endif
  36. #include "../utility/juce_IncludeSystemHeaders.h"
  37. #include "../utility/juce_IncludeModuleHeaders.h"
  38. #include "../../juce_core/native/juce_osx_ObjCHelpers.h"
  39. #include "../utility/juce_PluginBusUtilities.h"
  40. #include "../../juce_graphics/native/juce_mac_CoreGraphicsHelpers.h"
  41. #include "juce_AU_Shared.h"
  42. #define JUCE_VIEWCONTROLLER_OBJC_NAME(x) JUCE_JOIN_MACRO (x, FactoryAUv3)
  43. #if ! JUCE_COMPILER_SUPPORTS_VARIADIC_TEMPLATES
  44. #error AUv3 wrapper requires variadic template support
  45. #endif
  46. #if JUCE_IOS
  47. #define JUCE_IOS_MAC_VIEW UIView
  48. #else
  49. #define JUCE_IOS_MAC_VIEW NSView
  50. #endif
  51. #define JUCE_AUDIOUNIT_OBJC_NAME(x) JUCE_JOIN_MACRO (x, AUv3)
  52. #pragma clang diagnostic push
  53. #pragma clang diagnostic ignored "-Wnullability-completeness"
  54. JUCE_DEFINE_WRAPPER_TYPE (wrapperType_AudioUnitv3);
  55. // TODO: ask Timur: use SFINAE to automatically generate this for all NSObjects
  56. template <> struct ContainerDeletePolicy<AUAudioUnitBusArray> { static void destroy (NSObject* o) { [o release]; } };
  57. template <> struct ContainerDeletePolicy<AUParameterTree> { static void destroy (NSObject* o) { [o release]; } };
  58. template <> struct ContainerDeletePolicy<NSMutableArray<AUParameterNode *> > { static void destroy (NSObject* o) { [o release]; } };
  59. template <> struct ContainerDeletePolicy<AUParameter> { static void destroy (NSObject* o) { [o release]; } };
  60. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitBus*> > { static void destroy (NSObject* o) { [o release]; } };
  61. template <> struct ContainerDeletePolicy<AUAudioUnitBus> { static void destroy (NSObject* o) { [o release]; } };
  62. template <> struct ContainerDeletePolicy<AVAudioFormat> { static void destroy (NSObject* o) { [o release]; } };
  63. template <> struct ContainerDeletePolicy<AVAudioPCMBuffer> { static void destroy (NSObject* o) { [o release]; } };
  64. template <> struct ContainerDeletePolicy<NSMutableArray<NSNumber*> > { static void destroy (NSObject* o) { [o release]; } };
  65. template <> struct ContainerDeletePolicy<NSNumber> { static void destroy (NSObject* o) { [o release]; } };
  66. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitPreset*> > { static void destroy (NSObject* o) { [o release]; } };
  67. template <> struct ContainerDeletePolicy<AUAudioUnitPreset> { static void destroy (NSObject* o) { [o release]; } };
  68. //==============================================================================
  69. struct AudioProcessorHolder : public ReferenceCountedObject
  70. {
  71. AudioProcessorHolder() {}
  72. AudioProcessorHolder (AudioProcessor* p) : processor (p) {}
  73. AudioProcessor& operator*() noexcept { return *processor; }
  74. AudioProcessor* operator->() noexcept { return processor; }
  75. AudioProcessor* get() noexcept { return processor; }
  76. typedef ReferenceCountedObjectPtr<AudioProcessorHolder> Ptr;
  77. private:
  78. ScopedPointer<AudioProcessor> processor;
  79. AudioProcessorHolder& operator= (AudioProcessor*) JUCE_DELETED_FUNCTION;
  80. AudioProcessorHolder (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  81. AudioProcessorHolder& operator= (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  82. };
  83. //==============================================================================
  84. class JuceAudioUnitv3Base
  85. {
  86. public:
  87. JuceAudioUnitv3Base (const AudioComponentDescription& descr,
  88. AudioComponentInstantiationOptions options,
  89. NSError** error)
  90. #pragma clang diagnostic push
  91. #pragma clang diagnostic ignored "-Wobjc-method-access"
  92. : au ([audioUnitObjCClass.createInstance() initWithComponentDescription: descr
  93. options: options
  94. error: error
  95. juceClass: this])
  96. #pragma clang diagnostic pop
  97. {
  98. }
  99. JuceAudioUnitv3Base (AUAudioUnit* audioUnit) : au (audioUnit)
  100. {
  101. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  102. initialiseJuce_GUI();
  103. }
  104. virtual ~JuceAudioUnitv3Base() {}
  105. //==============================================================================
  106. AUAudioUnit* getAudioUnit() noexcept { return au; }
  107. virtual int getVirtualMIDICableCount() { return 0; }
  108. virtual void reset() {}
  109. virtual bool shouldChangeToFormat (AVAudioFormat*, AUAudioUnitBus*) { return true; }
  110. virtual AUAudioUnitPreset* getCurrentPreset() { return nullptr; }
  111. virtual void setCurrentPreset(AUAudioUnitPreset*) {}
  112. virtual NSTimeInterval getLatency() { return 0.0; }
  113. virtual NSTimeInterval getTailTime() { return 0.0; }
  114. virtual bool getCanProcessInPlace() { return false; }
  115. virtual bool getRenderingOffline() { return false; }
  116. //==============================================================================
  117. virtual AUAudioUnitBusArray* getInputBusses() = 0;
  118. virtual AUAudioUnitBusArray* getOutputBusses() = 0;
  119. virtual AUParameterTree* getParameterTree() = 0;
  120. virtual AUInternalRenderBlock getInternalRenderBlock() = 0;
  121. virtual void setRenderingOffline (bool offline) = 0;
  122. virtual NSArray<NSNumber*> *getChannelCapabilities() = 0;
  123. //==============================================================================
  124. virtual NSArray<NSNumber*>* parametersForOverviewWithCount (int)
  125. {
  126. return [NSArray<NSNumber*> array];
  127. }
  128. virtual NSArray<AUAudioUnitPreset*>* getFactoryPresets()
  129. {
  130. return [NSArray<AUAudioUnitPreset*> array];
  131. }
  132. virtual NSDictionary<NSString*, id>* getFullState()
  133. {
  134. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  135. return ObjCMsgSendSuper<NSDictionary<NSString*, id>*> (&s, @selector (fullState));
  136. }
  137. virtual void setFullState (NSDictionary<NSString*, id>* state)
  138. {
  139. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  140. ObjCMsgSendSuper<void, NSDictionary<NSString*, id>*> (&s, @selector (setFullState:), state);
  141. }
  142. virtual bool allocateRenderResourcesAndReturnError (NSError **outError)
  143. {
  144. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  145. if (! ObjCMsgSendSuper<BOOL, NSError**> (&s, @selector (allocateRenderResourcesAndReturnError:), outError))
  146. return false;
  147. return true;
  148. }
  149. virtual void deallocateRenderResources()
  150. {
  151. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  152. ObjCMsgSendSuper<void> (&s, @selector (deallocateRenderResources));
  153. }
  154. private:
  155. struct Class : public ObjCClass<AUAudioUnit>
  156. {
  157. Class() : ObjCClass<AUAudioUnit> ("AUAudioUnit_")
  158. {
  159. addIvar<JuceAudioUnitv3Base*> ("cppObject");
  160. addMethod (@selector (initWithComponentDescription:options:error:juceClass:),
  161. initWithComponentDescriptionAndJuceClass, "@@:",
  162. @encode (AudioComponentDescription),
  163. @encode (AudioComponentInstantiationOptions), "^@@");
  164. addMethod (@selector (initWithComponentDescription:options:error:),
  165. initWithComponentDescription, "@@:",
  166. @encode (AudioComponentDescription),
  167. @encode (AudioComponentInstantiationOptions), "^@");
  168. addMethod (@selector (dealloc), dealloc, "v@:");
  169. addMethod (@selector (inputBusses), getInputBusses, "@@:");
  170. addMethod (@selector (outputBusses), getOutputBusses, "@@:");
  171. addMethod (@selector (parameterTree), getParameterTree, "@@:");
  172. addMethod (@selector (deallocateRenderResources), deallocateRenderResources, "v@:");
  173. addMethod (@selector (reset), reset, "v@:");
  174. addMethod (@selector (shouldChangeToFormat:forBus:), shouldChangeToFormat, "B@:@@");
  175. addMethod (@selector (factoryPresets), getFactoryPresets, "@@:");
  176. addMethod (@selector (currentPreset), getCurrentPreset, "@@:");
  177. addMethod (@selector (setCurrentPreset:), setCurrentPreset, "v@:@");
  178. addMethod (@selector (fullState), getFullState, "@@:");
  179. addMethod (@selector (setFullState:), setFullState, "v@:@");
  180. addMethod (@selector (channelCapabilities), getChannelCapabilities, "@@:");
  181. addMethod (@selector (allocateRenderResourcesAndReturnError:), allocateRenderResourcesAndReturnError, "B@:^@");
  182. addMethod (@selector (parametersForOverviewWithCount:), parametersForOverviewWithCount, "@@:", @encode (NSInteger));
  183. addMethod (@selector (setRenderingOffline:), setRenderingOffline, "v@:", @encode (BOOL));
  184. addMethod (@selector (internalRenderBlock), getInternalRenderBlock, @encode (AUInternalRenderBlock), "@:");
  185. addMethod (@selector (virtualMIDICableCount), getVirtualMIDICableCount, @encode (NSInteger), "@:");
  186. addMethod (@selector (latency), getLatency, @encode (NSTimeInterval), "@:");
  187. addMethod (@selector (tailTime), getTailTime, @encode (NSTimeInterval), "@:");
  188. addMethod (@selector (canProcessInPlace), getCanProcessInPlace, @encode (BOOL), "@:");
  189. addMethod (@selector (isRenderingOffline), getRenderingOffline, @encode (BOOL), "@:");
  190. registerClass();
  191. }
  192. //==============================================================================
  193. static JuceAudioUnitv3Base* _this (id self) { return getIvar<JuceAudioUnitv3Base*> (self, "cppObject"); }
  194. static void setThis (id self, JuceAudioUnitv3Base* cpp) { object_setInstanceVariable (self, "cppObject", cpp); }
  195. //==============================================================================
  196. static id initWithComponentDescription (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  197. {
  198. AUAudioUnit* self = _self;
  199. objc_super s = { self, [AUAudioUnit class] };
  200. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  201. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  202. JuceAudioUnitv3Base* juceAU = JuceAudioUnitv3Base::create (self, descr, options, error);
  203. setThis (self, juceAU);
  204. return self;
  205. }
  206. static id initWithComponentDescriptionAndJuceClass (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error, JuceAudioUnitv3Base* juceAU)
  207. {
  208. AUAudioUnit* self = _self;
  209. objc_super s = { self, [AUAudioUnit class] };
  210. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  211. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  212. setThis (self, juceAU);
  213. return self;
  214. }
  215. static void dealloc (id self, SEL) { delete _this (self); }
  216. static AUAudioUnitBusArray* getInputBusses (id self, SEL) { return _this (self)->getInputBusses(); }
  217. static AUAudioUnitBusArray* getOutputBusses (id self, SEL) { return _this (self)->getOutputBusses(); }
  218. static AUParameterTree* getParameterTree (id self, SEL) { return _this (self)->getParameterTree(); }
  219. static AUInternalRenderBlock getInternalRenderBlock (id self, SEL) { return _this (self)->getInternalRenderBlock(); }
  220. static BOOL allocateRenderResourcesAndReturnError (id self, SEL, NSError** error) { return _this (self)->allocateRenderResourcesAndReturnError (error); }
  221. static void deallocateRenderResources (id self, SEL) { _this (self)->deallocateRenderResources(); }
  222. static void reset (id self, SEL) { _this (self)->reset(); }
  223. static NSInteger getVirtualMIDICableCount (id self, SEL) { return _this (self)->getVirtualMIDICableCount(); }
  224. static BOOL shouldChangeToFormat (id self, SEL, AVAudioFormat* format, AUAudioUnitBus* bus) { return _this (self)->shouldChangeToFormat (format, bus); }
  225. static NSArray<NSNumber*>* parametersForOverviewWithCount (id self, SEL, NSInteger count) { return _this (self)->parametersForOverviewWithCount (static_cast<int> (count)); }
  226. static NSArray<AUAudioUnitPreset*>* getFactoryPresets (id self, SEL) { return _this (self)->getFactoryPresets(); }
  227. static AUAudioUnitPreset* getCurrentPreset (id self, SEL) { return _this (self)->getCurrentPreset(); }
  228. static void setCurrentPreset (id self, SEL, AUAudioUnitPreset* preset) { return _this (self)->setCurrentPreset (preset); }
  229. static NSDictionary<NSString*, id>* getFullState (id self, SEL) { return _this (self)->getFullState(); }
  230. static void setFullState (id self, SEL, NSDictionary<NSString *, id>* state) { return _this (self)->setFullState (state); }
  231. static NSTimeInterval getLatency (id self, SEL) { return _this (self)->getLatency(); }
  232. static NSTimeInterval getTailTime (id self, SEL) { return _this (self)->getTailTime(); }
  233. static BOOL getCanProcessInPlace (id self, SEL) { return _this (self)->getCanProcessInPlace(); }
  234. static BOOL getRenderingOffline (id self, SEL) { return _this (self)->getRenderingOffline(); }
  235. static void setRenderingOffline (id self, SEL, BOOL renderingOffline) { _this (self)->setRenderingOffline (renderingOffline); }
  236. static NSArray<NSNumber*>* getChannelCapabilities (id self, SEL) { return _this (self)->getChannelCapabilities(); }
  237. };
  238. static JuceAudioUnitv3Base* create (AUAudioUnit*, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**);
  239. //==============================================================================
  240. static Class audioUnitObjCClass;
  241. protected:
  242. AUAudioUnit* au;
  243. };
  244. //==============================================================================
  245. JuceAudioUnitv3Base::Class JuceAudioUnitv3Base::audioUnitObjCClass;
  246. //==============================================================================
  247. //=========================== The actual AudioUnit =============================
  248. //==============================================================================
  249. class JuceAudioUnitv3 : public JuceAudioUnitv3Base,
  250. public AudioProcessorListener,
  251. public AudioPlayHead
  252. {
  253. public:
  254. JuceAudioUnitv3 (const AudioProcessorHolder::Ptr& processor,
  255. const AudioComponentDescription& descr,
  256. AudioComponentInstantiationOptions options,
  257. NSError** error)
  258. : JuceAudioUnitv3Base (descr, options, error),
  259. processorHolder (processor),
  260. busUtils (**processorHolder, true, 8),
  261. mapper (busUtils)
  262. {
  263. init();
  264. }
  265. JuceAudioUnitv3 (AUAudioUnit* audioUnit, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**)
  266. : JuceAudioUnitv3Base (audioUnit),
  267. processorHolder (new AudioProcessorHolder (createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))),
  268. busUtils (**processorHolder, true, 8),
  269. mapper (busUtils)
  270. {
  271. init();
  272. }
  273. ~JuceAudioUnitv3()
  274. {
  275. auto& processor = getAudioProcessor();
  276. processor.removeListener (this);
  277. if (AudioProcessorEditor* editor = processor.getActiveEditor())
  278. processor.editorBeingDeleted (editor);
  279. if (editorObserverToken != nullptr)
  280. {
  281. [paramTree removeParameterObserver: editorObserverToken];
  282. editorObserverToken = nullptr;
  283. }
  284. }
  285. //==============================================================================
  286. void init()
  287. {
  288. busUtils.init();
  289. getAudioProcessor().setPlayHead (this);
  290. totalInChannels = busUtils.findTotalNumChannels (true);
  291. totalOutChannels = busUtils.findTotalNumChannels (false);
  292. {
  293. channelCapabilities = [[NSMutableArray<NSNumber*> alloc] init];
  294. Array<AUChannelInfo> channelInfo = AudioUnitHelpers::getAUChannelInfo (busUtils);
  295. for (int i = 0; i < channelInfo.size(); ++i)
  296. {
  297. AUChannelInfo& info = channelInfo.getReference (i);
  298. [channelCapabilities addObject: [NSNumber numberWithInteger: info.inChannels]];
  299. [channelCapabilities addObject: [NSNumber numberWithInteger: info.outChannels]];
  300. }
  301. }
  302. editorObserverToken = nullptr;
  303. internalRenderBlock = CreateObjCBlock (this, &JuceAudioUnitv3::renderCallback);
  304. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  305. auto& processor = getAudioProcessor();
  306. processor.setRateAndBufferSizeDetails (kDefaultSampleRate, static_cast<int> (maxFrames));
  307. processor.prepareToPlay (kDefaultSampleRate, static_cast<int> (maxFrames));
  308. processor.addListener (this);
  309. addParameters();
  310. addPresets();
  311. addAudioUnitBusses (true);
  312. addAudioUnitBusses (false);
  313. }
  314. //==============================================================================
  315. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  316. AUAudioUnitBusArray* getInputBusses() override { return inputBusses; }
  317. AUAudioUnitBusArray* getOutputBusses() override { return outputBusses; }
  318. AUParameterTree* getParameterTree() override { return paramTree; }
  319. AUInternalRenderBlock getInternalRenderBlock() override { return internalRenderBlock; }
  320. NSArray<AUAudioUnitPreset*>* getFactoryPresets() override { return factoryPresets; }
  321. bool getRenderingOffline() override { return getAudioProcessor().isNonRealtime(); }
  322. void setRenderingOffline (bool offline) override { getAudioProcessor().setNonRealtime (offline); }
  323. NSArray<NSNumber*>* getChannelCapabilities() override { return channelCapabilities; }
  324. //==============================================================================
  325. AUAudioUnitPreset* getCurrentPreset() override
  326. {
  327. const int n = static_cast<int> ([factoryPresets count]);
  328. const int idx = static_cast<int> (getAudioProcessor().getCurrentProgram());
  329. if (idx < n)
  330. return [factoryPresets objectAtIndex:static_cast<unsigned int> (idx)];
  331. return nullptr;
  332. }
  333. void setCurrentPreset(AUAudioUnitPreset* preset) override
  334. {
  335. const int n = static_cast<int> ([factoryPresets count]);
  336. const int idx = static_cast<int> ([preset number]);
  337. if (isPositiveAndBelow (idx, n))
  338. getAudioProcessor().setCurrentProgram (idx);
  339. }
  340. //==============================================================================
  341. NSDictionary<NSString*, id>* getFullState() override
  342. {
  343. NSMutableDictionary<NSString*, id>* retval = [[NSMutableDictionary<NSString*, id> alloc] init];
  344. {
  345. NSDictionary<NSString*, id>* superRetval = JuceAudioUnitv3Base::getFullState();
  346. if (superRetval != nullptr)
  347. [retval addEntriesFromDictionary:superRetval];
  348. }
  349. juce::MemoryBlock state;
  350. getAudioProcessor().getCurrentProgramStateInformation (state);
  351. if (state.getSize() > 0)
  352. {
  353. NSData* ourState = [[NSData alloc] initWithBytes: state.getData()
  354. length: state.getSize()];
  355. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  356. [retval setObject: ourState
  357. forKey: nsKey];
  358. [nsKey release];
  359. [ourState release];
  360. }
  361. return [retval autorelease];
  362. }
  363. void setFullState (NSDictionary<NSString*, id>* state) override
  364. {
  365. if (state == nullptr)
  366. return;
  367. NSMutableDictionary<NSString*, id>* modifiedState = [[NSMutableDictionary<NSString*, id> alloc] init];
  368. [modifiedState addEntriesFromDictionary: state];
  369. NSString* nsPresetKey = [[NSString alloc] initWithUTF8String: kAUPresetDataKey];
  370. [modifiedState removeObjectForKey: nsPresetKey];
  371. [nsPresetKey release];
  372. JuceAudioUnitv3Base::setFullState (modifiedState);
  373. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  374. NSObject* obj = [modifiedState objectForKey: nsKey];
  375. [nsKey release];
  376. if (obj != nullptr)
  377. {
  378. if ([obj isKindOfClass:[NSData class]])
  379. {
  380. NSData* data = reinterpret_cast<NSData*> (obj);
  381. const int numBytes = static_cast<int> ([data length]);
  382. const juce::uint8* const rawBytes = reinterpret_cast< const juce::uint8* const> ([data bytes]);
  383. if (numBytes > 0)
  384. getAudioProcessor().setCurrentProgramStateInformation (rawBytes, numBytes);
  385. }
  386. }
  387. [modifiedState release];
  388. }
  389. //==============================================================================
  390. NSArray<NSNumber*>* parametersForOverviewWithCount (int count) override
  391. {
  392. const int n = static_cast<int> ([overviewParams count]);
  393. if (count >= n)
  394. return overviewParams;
  395. NSMutableArray<NSNumber*>* retval = [[NSMutableArray<NSNumber*>alloc] initWithArray: overviewParams];
  396. [retval removeObjectsInRange: NSMakeRange (static_cast<unsigned int> (count), static_cast<unsigned int> (n - count))];
  397. return [retval autorelease];
  398. }
  399. int getVirtualMIDICableCount() override
  400. {
  401. #if JucePlugin_WantsMidiInput
  402. return 1;
  403. #else
  404. return 0;
  405. #endif
  406. }
  407. //==============================================================================
  408. bool allocateRenderResourcesAndReturnError (NSError **outError) override
  409. {
  410. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  411. if (! JuceAudioUnitv3Base::allocateRenderResourcesAndReturnError (outError))
  412. return false;
  413. totalInChannels = busUtils.findTotalNumChannels (true);
  414. totalOutChannels = busUtils.findTotalNumChannels (false);
  415. allocateBusBuffer (true);
  416. allocateBusBuffer (false);
  417. mapper.alloc();
  418. audioBuffer.prepare (totalInChannels, totalOutChannels, static_cast<int> (maxFrames));
  419. double sampleRate = (jmax (busUtils.getBusCount (true), busUtils.getBusCount (false)) > 0 ?
  420. [[[([inputBusses count] > 0 ? inputBusses : outputBusses) objectAtIndexedSubscript: 0] format] sampleRate] : 44100.0);
  421. auto& processor = getAudioProcessor();
  422. processor.setRateAndBufferSizeDetails (sampleRate, static_cast<int> (maxFrames));
  423. processor.prepareToPlay (sampleRate, static_cast<int> (maxFrames));
  424. zeromem (&lastAudioHead, sizeof (lastAudioHead));
  425. hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
  426. hostTransportStateCallback = [getAudioUnit() transportStateBlock];
  427. reset();
  428. return true;
  429. }
  430. void deallocateRenderResources() override
  431. {
  432. hostMusicalContextCallback = nullptr;
  433. hostTransportStateCallback = nullptr;
  434. getAudioProcessor().releaseResources();
  435. audioBuffer.release();
  436. inBusBuffers. clear();
  437. outBusBuffers.clear();
  438. mapper.release();
  439. JuceAudioUnitv3Base::deallocateRenderResources();
  440. }
  441. void reset() override
  442. {
  443. midiMessages.clear();
  444. lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
  445. }
  446. //==============================================================================
  447. bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* bus) override
  448. {
  449. const bool isInput = ([bus busType] == AUAudioUnitBusTypeInput);
  450. const int busIdx = static_cast<int> ([bus index]);
  451. const int newNumChannels = static_cast<int> ([format streamDescription]->mChannelsPerFrame);
  452. AudioChannelSet newLayout;
  453. if (const AVAudioChannelLayout* layout = [format channelLayout])
  454. newLayout = AudioUnitHelpers::CALayoutTagToChannelSet ([layout layoutTag]);
  455. else
  456. newLayout = busUtils.getDefaultLayoutForChannelNumAndBus(isInput, busIdx, newNumChannels);
  457. if (newLayout.size() != newNumChannels)
  458. return false;
  459. bool success = getAudioProcessor().setPreferredBusArrangement (isInput, busIdx, newLayout);
  460. totalInChannels = busUtils.findTotalNumChannels (true);
  461. totalOutChannels = busUtils.findTotalNumChannels (false);
  462. return success;
  463. }
  464. //==============================================================================
  465. void audioProcessorChanged (AudioProcessor* processor) override
  466. {
  467. ignoreUnused (processor);
  468. [au willChangeValueForKey: @"allParameterValues"];
  469. [au didChangeValueForKey: @"allParameterValues"];
  470. }
  471. void audioProcessorParameterChanged (AudioProcessor*, int idx, float newValue) override
  472. {
  473. if (isPositiveAndBelow (idx, getAudioProcessor().getNumParameters()))
  474. {
  475. if (AUParameter* param = [paramTree parameterWithAddress: getAUParameterAddressForIndex (idx)])
  476. {
  477. if (editorObserverToken != nullptr)
  478. [param setValue: newValue originator: editorObserverToken];
  479. else
  480. [param setValue: newValue];
  481. }
  482. }
  483. }
  484. //==============================================================================
  485. NSTimeInterval getLatency() override
  486. {
  487. auto& p = getAudioProcessor();
  488. return p.getLatencySamples() / p.getSampleRate();
  489. }
  490. NSTimeInterval getTailTime() override { return getAudioProcessor().getTailLengthSeconds(); }
  491. //==============================================================================
  492. bool getCurrentPosition (CurrentPositionInfo& info) override
  493. {
  494. bool musicContextCallSucceeded = false;
  495. bool transportStateCallSucceeded = false;
  496. info = lastAudioHead;
  497. info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
  498. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  499. switch (lastTimeStamp.mSMPTETime.mType)
  500. {
  501. case kSMPTETimeType24: info.frameRate = AudioPlayHead::fps24; break;
  502. case kSMPTETimeType25: info.frameRate = AudioPlayHead::fps25; break;
  503. case kSMPTETimeType30Drop: info.frameRate = AudioPlayHead::fps30drop; break;
  504. case kSMPTETimeType30: info.frameRate = AudioPlayHead::fps30; break;
  505. case kSMPTETimeType2997: info.frameRate = AudioPlayHead::fps2997; break;
  506. case kSMPTETimeType2997Drop: info.frameRate = AudioPlayHead::fps2997drop; break;
  507. default: info.frameRate = AudioPlayHead::fpsUnknown; break;
  508. }
  509. double num;
  510. NSInteger den;
  511. NSInteger outDeltaSampleOffsetToNextBeat;
  512. double outCurrentMeasureDownBeat, bpm;
  513. double ppqPosition;
  514. if (hostMusicalContextCallback != nullptr)
  515. {
  516. AUHostMusicalContextBlock musicalContextCallback = hostMusicalContextCallback;
  517. if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
  518. {
  519. musicContextCallSucceeded = true;
  520. info.timeSigNumerator = (int) num;
  521. info.timeSigDenominator = (int) den;
  522. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  523. info.bpm = bpm;
  524. info.ppqPosition = ppqPosition;
  525. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  526. }
  527. }
  528. double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
  529. AUHostTransportStateFlags flags;
  530. if (hostTransportStateCallback != nullptr)
  531. {
  532. AUHostTransportStateBlock transportStateCallback = hostTransportStateCallback;
  533. if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
  534. {
  535. transportStateCallSucceeded = true;
  536. info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
  537. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  538. info.isPlaying = ((flags & AUHostTransportStateMoving) != 0);
  539. info.isLooping = ((flags & AUHostTransportStateCycling) != 0);
  540. info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
  541. info.ppqLoopStart = outCycleStartBeat;
  542. info.ppqLoopEnd = outCycleEndBeat;
  543. }
  544. }
  545. if (musicContextCallSucceeded && transportStateCallSucceeded)
  546. lastAudioHead = info;
  547. return true;
  548. }
  549. private:
  550. //==============================================================================
  551. struct BusBuffer
  552. {
  553. BusBuffer (AUAudioUnitBus* bus, int maxFramesPerBuffer)
  554. : auBus (bus), bufferList (nullptr),
  555. maxFrames (maxFramesPerBuffer),
  556. numberOfChannels (static_cast<int> ([[auBus format] channelCount])),
  557. isInterleaved ([[auBus format] isInterleaved])
  558. {
  559. alloc();
  560. }
  561. //==============================================================================
  562. void alloc()
  563. {
  564. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  565. int bytes = static_cast<int> (sizeof (AudioBufferList))
  566. + ((numBuffers - 1) * static_cast<int> (sizeof (::AudioBuffer)));
  567. jassert (bytes > 0);
  568. bufferListStorage.calloc (static_cast<size_t> (bytes));
  569. bufferList = reinterpret_cast<AudioBufferList*> (bufferListStorage.getData());
  570. const int bufferChannels = isInterleaved ? numberOfChannels : 1;
  571. scratchBuffer.setSize (numBuffers, bufferChannels * maxFrames);
  572. }
  573. void dealloc()
  574. {
  575. bufferList = nullptr;
  576. bufferListStorage.free();
  577. scratchBuffer.setSize (0, 0);
  578. }
  579. //==============================================================================
  580. int numChannels() const noexcept { return numberOfChannels; }
  581. bool interleaved() const noexcept { return isInterleaved; }
  582. AudioBufferList* get() const noexcept { return bufferList; }
  583. //==============================================================================
  584. void prepare (UInt32 nFrames, const AudioBufferList* other = nullptr) noexcept
  585. {
  586. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  587. const bool isCompatible = isCompatibleWith (other);
  588. bufferList->mNumberBuffers = static_cast<UInt32> (numBuffers);
  589. for (int i = 0; i < numBuffers; ++i)
  590. {
  591. const UInt32 bufferChannels = static_cast<UInt32> (isInterleaved ? numberOfChannels : 1);
  592. bufferList->mBuffers[i].mNumberChannels = bufferChannels;
  593. bufferList->mBuffers[i].mData = (isCompatible ? other->mBuffers[i].mData
  594. : scratchBuffer.getWritePointer (i));
  595. bufferList->mBuffers[i].mDataByteSize = nFrames * bufferChannels * sizeof (float);
  596. }
  597. }
  598. //==============================================================================
  599. bool isCompatibleWith (const AudioBufferList* other) const noexcept
  600. {
  601. if (other == nullptr)
  602. return false;
  603. if (other->mNumberBuffers > 0)
  604. {
  605. const bool otherInterleaved = AudioUnitHelpers::isAudioBufferInterleaved (*other);
  606. const int otherChannels = static_cast<int> (otherInterleaved ? other->mBuffers[0].mNumberChannels
  607. : other->mNumberBuffers);
  608. return otherInterleaved == isInterleaved
  609. && numberOfChannels == otherChannels;
  610. }
  611. return numberOfChannels == 0;
  612. }
  613. private:
  614. AUAudioUnitBus* auBus;
  615. HeapBlock<char> bufferListStorage;
  616. AudioBufferList* bufferList;
  617. int maxFrames, numberOfChannels;
  618. bool isInterleaved;
  619. AudioSampleBuffer scratchBuffer;
  620. };
  621. //==============================================================================
  622. void addAudioUnitBusses (bool isInput)
  623. {
  624. ScopedPointer<NSMutableArray<AUAudioUnitBus*> > array = [[NSMutableArray<AUAudioUnitBus*> alloc] init];
  625. for (int i = 0; i < busUtils.getBusCount (isInput); ++i)
  626. {
  627. ScopedPointer<AUAudioUnitBus> audioUnitBus;
  628. {
  629. ScopedPointer<AVAudioFormat> defaultFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate: kDefaultSampleRate
  630. channels: static_cast<AVAudioChannelCount> (busUtils.getNumChannels (isInput, i))];
  631. audioUnitBus = [[AUAudioUnitBus alloc] initWithFormat: defaultFormat
  632. error: nullptr];
  633. }
  634. [array addObject: audioUnitBus];
  635. }
  636. (isInput ? inputBusses : outputBusses) = [[AUAudioUnitBusArray alloc] initWithAudioUnit: au
  637. busType: (isInput ? AUAudioUnitBusTypeInput : AUAudioUnitBusTypeOutput)
  638. busses: array];
  639. }
  640. void addParameters()
  641. {
  642. ScopedPointer<NSMutableArray<AUParameterNode*> > params = [[NSMutableArray<AUParameterNode*> alloc] init];
  643. paramObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedFromHost);
  644. paramProvider = CreateObjCBlock (this, &JuceAudioUnitv3::getValue);
  645. overviewParams = [[NSMutableArray<NSNumber*> alloc] init];
  646. auto& processor = getAudioProcessor();
  647. const int n = processor.getNumParameters();
  648. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  649. // check if all parameters are managed?
  650. usingManagedParameter = (processor.getParameters().size() == processor.getNumParameters());
  651. #endif
  652. for (int idx = 0; idx < n; ++idx)
  653. {
  654. const String identifier (idx);
  655. const String name = processor.getParameterName (idx);
  656. AudioUnitParameterOptions flags = (UInt32) (kAudioUnitParameterFlag_IsWritable
  657. | kAudioUnitParameterFlag_IsReadable
  658. | kAudioUnitParameterFlag_HasCFNameString
  659. | kAudioUnitParameterFlag_ValuesHaveStrings);
  660. #if JucePlugin_AUHighResolutionParameters
  661. flags |= (UInt32) kAudioUnitParameterFlag_IsHighResolution;
  662. #endif
  663. // set whether the param is automatable (unnamed parameters aren't allowed to be automated)
  664. if (name.isEmpty() || ! processor.isParameterAutomatable (idx))
  665. flags |= kAudioUnitParameterFlag_NonRealTime;
  666. if (processor.isMetaParameter (idx))
  667. flags |= kAudioUnitParameterFlag_IsGlobalMeta;
  668. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  669. AUParameterAddress address = static_cast<AUParameterAddress> (idx);
  670. #else
  671. AUParameterAddress address = generateAUParameterAddressForIndex (idx);
  672. paramAddresses.add (address);
  673. paramMap.set (static_cast<int64> (address), idx);
  674. #endif
  675. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  676. ScopedPointer<AUParameter> param = [[AUParameterTree createParameterWithIdentifier: juceStringToNS (identifier)
  677. name: juceStringToNS (name)
  678. address: address
  679. min: 0.0f
  680. max: 1.0f
  681. unit: kAudioUnitParameterUnit_Generic
  682. unitName: nullptr
  683. flags: flags
  684. valueStrings: nullptr
  685. dependentParameters: nullptr] retain];
  686. [params addObject: param];
  687. [overviewParams addObject: [NSNumber numberWithUnsignedLongLong:address]];
  688. }
  689. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  690. paramTree = [[AUParameterTree createTreeWithChildren: params] retain];
  691. [paramTree setImplementorValueObserver: paramObserver];
  692. [paramTree setImplementorValueProvider: paramProvider];
  693. if (processor.hasEditor())
  694. {
  695. editorParamObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedForObserver);
  696. editorObserverToken = [paramTree tokenByAddingParameterObserver: editorParamObserver];
  697. }
  698. }
  699. void addPresets()
  700. {
  701. factoryPresets = [[NSMutableArray<AUAudioUnitPreset*> alloc] init];
  702. const int n = getAudioProcessor().getNumPrograms();
  703. for (int idx = 0; idx < n; ++idx)
  704. {
  705. String name = getAudioProcessor().getProgramName (idx);
  706. ScopedPointer<AUAudioUnitPreset> preset = [[AUAudioUnitPreset alloc] init];
  707. [preset setName: juceStringToNS (name)];
  708. [preset setNumber: static_cast<NSInteger> (idx)];
  709. [factoryPresets addObject: preset];
  710. }
  711. }
  712. //==============================================================================
  713. void allocateBusBuffer (bool isInput)
  714. {
  715. OwnedArray<BusBuffer>& busBuffers = isInput ? inBusBuffers : outBusBuffers;
  716. busBuffers.clear();
  717. const int n = busUtils.getBusCount (isInput);
  718. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  719. for (int busIdx = 0; busIdx < n; ++busIdx)
  720. busBuffers.add (new BusBuffer ([(isInput ? inputBusses : outputBusses) objectAtIndexedSubscript: static_cast<unsigned int> (busIdx)],
  721. static_cast<int> (maxFrames)));
  722. }
  723. void processEvents (const AURenderEvent *__nullable realtimeEventListHead, int numParams, AUEventSampleTime startTime)
  724. {
  725. for (const AURenderEvent* event = realtimeEventListHead; event != nullptr; event = event->head.next)
  726. {
  727. switch (event->head.eventType)
  728. {
  729. case AURenderEventMIDI:
  730. {
  731. const AUMIDIEvent& midiEvent = event->MIDI;
  732. midiMessages.addEvent (midiEvent.data, midiEvent.length, static_cast<int> (midiEvent.eventSampleTime - startTime));
  733. }
  734. break;
  735. case AURenderEventParameter:
  736. case AURenderEventParameterRamp:
  737. {
  738. const AUParameterEvent& paramEvent = event->parameter;
  739. const int idx = getJuceParameterIndexForAUAddress (paramEvent.parameterAddress);
  740. if (isPositiveAndBelow (idx, numParams))
  741. getAudioProcessor().setParameter (idx, paramEvent.value);
  742. }
  743. break;
  744. default:
  745. break;
  746. }
  747. }
  748. }
  749. AUAudioUnitStatus renderCallback (AudioUnitRenderActionFlags* actionFlags, const AudioTimeStamp* timestamp, AUAudioFrameCount frameCount,
  750. NSInteger outputBusNumber, AudioBufferList* outputData, const AURenderEvent *__nullable realtimeEventListHead,
  751. AURenderPullInputBlock __nullable pullInputBlock)
  752. {
  753. jassert (static_cast<int> (frameCount) <= getAudioProcessor().getBlockSize());
  754. // process params
  755. const int numParams = getAudioProcessor().getNumParameters();
  756. processEvents (realtimeEventListHead, numParams, static_cast<AUEventSampleTime> (timestamp->mSampleTime));
  757. if (lastTimeStamp.mSampleTime != timestamp->mSampleTime)
  758. {
  759. lastTimeStamp = *timestamp;
  760. const int numInputBuses = inBusBuffers. size();
  761. const int numOutputBuses = outBusBuffers.size();
  762. // prepare buffers
  763. {
  764. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  765. {
  766. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  767. const bool canUseDirectOutput =
  768. (busIdx == outputBusNumber && outputData != nullptr && outputData->mNumberBuffers > 0);
  769. busBuffer.prepare (frameCount, canUseDirectOutput ? outputData : nullptr);
  770. }
  771. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  772. {
  773. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  774. busBuffer.prepare (frameCount, busIdx < numOutputBuses ? outBusBuffers[busIdx]->get() : nullptr);
  775. }
  776. audioBuffer.reset();
  777. }
  778. // pull inputs
  779. {
  780. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  781. {
  782. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  783. AudioBufferList* buffer = busBuffer.get();
  784. if (pullInputBlock == nullptr || pullInputBlock (actionFlags, timestamp, frameCount, busIdx, buffer) != noErr)
  785. AudioUnitHelpers::clearAudioBuffer (*buffer);
  786. if (actionFlags != nullptr && (*actionFlags & kAudioUnitRenderAction_OutputIsSilence) != 0)
  787. AudioUnitHelpers::clearAudioBuffer (*buffer);
  788. }
  789. }
  790. // set buffer pointer to minimize copying
  791. {
  792. int chIdx = 0;
  793. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  794. {
  795. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  796. AudioBufferList* buffer = busBuffer.get();
  797. const bool interleaved = busBuffer.interleaved();
  798. const int numChannels = busBuffer.numChannels();
  799. const int* outLayoutMap = mapper.get (false, busIdx);
  800. for (int ch = 0; ch < numChannels; ++ch)
  801. audioBuffer.setBuffer (chIdx++, interleaved ? nullptr : static_cast<float*> (buffer->mBuffers[outLayoutMap[ch]].mData));
  802. }
  803. // use input pointers on remaining channels
  804. int channelCount = 0;
  805. for (int busIdx = 0; chIdx < totalInChannels;)
  806. {
  807. busIdx = busUtils.getBusIdxForChannelIdx (true, chIdx, channelCount, busIdx);
  808. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  809. AudioBufferList* buffer = busBuffer.get();
  810. const bool interleaved = busBuffer.interleaved();
  811. const int numChannels = busBuffer.numChannels();
  812. const int* inLayoutMap = mapper.get (true, busIdx);
  813. for (int ch = chIdx - channelCount; ch < numChannels; ++ch)
  814. audioBuffer.setBuffer (chIdx++, interleaved ? nullptr : static_cast<float*> (buffer->mBuffers[inLayoutMap[ch]].mData));
  815. }
  816. }
  817. // copy input
  818. {
  819. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  820. audioBuffer.push (*inBusBuffers[busIdx]->get(), mapper.get (true, busIdx));
  821. // clear remaining channels
  822. for (int i = totalInChannels; i < totalOutChannels; ++i)
  823. zeromem (audioBuffer.push(), sizeof (float) * frameCount);
  824. }
  825. // process audio
  826. processBlock (audioBuffer.getBuffer (frameCount), midiMessages);
  827. midiMessages.clear();
  828. }
  829. // copy back
  830. audioBuffer.pop (*outBusBuffers[(int) outputBusNumber]->get(),
  831. mapper.get (false, (int) outputBusNumber));
  832. return noErr;
  833. }
  834. void processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiBuffer) noexcept
  835. {
  836. auto& processor = getAudioProcessor();
  837. const ScopedLock sl (processor.getCallbackLock());
  838. if (processor.isSuspended())
  839. buffer.clear();
  840. else if ([au shouldBypassEffect])
  841. processor.processBlockBypassed (buffer, midiBuffer);
  842. else
  843. processor.processBlock (buffer, midiBuffer);
  844. }
  845. //==============================================================================
  846. void valueChangedFromHost (AUParameter* param, AUValue value)
  847. {
  848. if (param != nullptr)
  849. {
  850. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  851. auto& processor = getAudioProcessor();
  852. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  853. processor.setParameter (idx, value);
  854. }
  855. }
  856. AUValue getValue (AUParameter* param)
  857. {
  858. if (param != nullptr)
  859. {
  860. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  861. auto& processor = getAudioProcessor();
  862. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  863. return processor.getParameter (idx);
  864. }
  865. return 0;
  866. }
  867. void valueChangedForObserver(AUParameterAddress, AUValue)
  868. {
  869. // this will have already been handled bny valueChangedFromHost
  870. }
  871. //==============================================================================
  872. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  873. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept { return static_cast<AUParameterAddress> (paramIndex); }
  874. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept { return static_cast<int> (address); }
  875. #else
  876. AUParameterAddress generateAUParameterAddressForIndex (int paramIndex) const
  877. {
  878. auto& processor = getAudioProcessor();
  879. const int n = processor.getNumParameters();
  880. if (isPositiveAndBelow (paramIndex, n))
  881. {
  882. const String& juceParamID = processor.getParameterID (paramIndex);
  883. return usingManagedParameter ? static_cast<AUParameterAddress> (juceParamID.hashCode64())
  884. : static_cast<AUParameterAddress> (juceParamID.getIntValue());
  885. }
  886. return static_cast<AUParameterAddress> (-1);
  887. }
  888. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept
  889. {
  890. return usingManagedParameter ? paramAddresses.getReference (paramIndex)
  891. : static_cast<AUParameterAddress> (paramIndex);
  892. }
  893. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept
  894. {
  895. return usingManagedParameter ? paramMap[static_cast<int64> (address)]
  896. : static_cast<int> (address);
  897. }
  898. #endif
  899. //==============================================================================
  900. static const double kDefaultSampleRate;
  901. AudioProcessorHolder::Ptr processorHolder;
  902. PluginBusUtilities busUtils;
  903. int totalInChannels, totalOutChannels;
  904. ScopedPointer<AUAudioUnitBusArray> inputBusses;
  905. ScopedPointer<AUAudioUnitBusArray> outputBusses;
  906. ObjCBlock<AUImplementorValueObserver> paramObserver;
  907. ObjCBlock<AUImplementorValueProvider> paramProvider;
  908. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  909. bool usingManagedParameter;
  910. Array<AUParameterAddress> paramAddresses;
  911. HashMap<int64, int> paramMap;
  912. #endif
  913. // to avoid recursion on parameter changes, we need to add an
  914. // editor observer to do the parameter changes
  915. ObjCBlock<AUParameterObserver> editorParamObserver;
  916. AUParameterObserverToken editorObserverToken;
  917. ScopedPointer<AUParameterTree> paramTree;
  918. ScopedPointer<NSMutableArray<NSNumber*> > overviewParams;
  919. ScopedPointer<NSMutableArray<NSNumber*> > channelCapabilities;
  920. ScopedPointer<NSMutableArray<AUAudioUnitPreset*> > factoryPresets;
  921. ObjCBlock<AUInternalRenderBlock> internalRenderBlock;
  922. AudioUnitHelpers::CoreAudioBufferList audioBuffer;
  923. AudioUnitHelpers::ChannelRemapper mapper;
  924. OwnedArray<BusBuffer> inBusBuffers, outBusBuffers;
  925. MidiBuffer midiMessages;
  926. ObjCBlock<AUHostMusicalContextBlock> hostMusicalContextCallback;
  927. ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;
  928. AudioTimeStamp lastTimeStamp;
  929. CurrentPositionInfo lastAudioHead;
  930. };
  931. const double JuceAudioUnitv3::kDefaultSampleRate = 44100.0;
  932. JuceAudioUnitv3Base* JuceAudioUnitv3Base::create (AUAudioUnit* audioUnit, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  933. {
  934. JUCE_DECLARE_WRAPPER_TYPE (wrapperType_AudioUnitv3);
  935. return new JuceAudioUnitv3 (audioUnit, descr, options, error);
  936. }
  937. //==============================================================================
  938. class JuceAUViewController
  939. {
  940. public:
  941. JuceAUViewController (AUViewController<AUAudioUnitFactory>* p)
  942. : myself (p), processorHolder (nullptr), preferredSize (1.0f, 1.0f)
  943. {
  944. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  945. JUCE_DECLARE_WRAPPER_TYPE (wrapperType_AudioUnitv3);
  946. initialiseJuce_GUI();
  947. }
  948. ~JuceAUViewController()
  949. {
  950. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  951. }
  952. //==============================================================================
  953. void loadView()
  954. {
  955. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  956. if (AudioProcessor* p = createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))
  957. {
  958. processorHolder = new AudioProcessorHolder (p);
  959. auto& processor = getAudioProcessor();
  960. if (processor.hasEditor())
  961. {
  962. if (AudioProcessorEditor* editor = processor.createEditorIfNeeded())
  963. {
  964. preferredSize = editor->getBounds();
  965. JUCE_IOS_MAC_VIEW* view = [[[JUCE_IOS_MAC_VIEW alloc] initWithFrame: convertToCGRect (editor->getBounds())] autorelease];
  966. [myself setView: view];
  967. editor->setVisible (true);
  968. editor->addToDesktop (0, view);
  969. }
  970. }
  971. }
  972. }
  973. void viewDidLayoutSubviews()
  974. {
  975. if (processorHolder != nullptr && [myself view] != nullptr)
  976. {
  977. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  978. {
  979. editor->setBounds (convertToRectInt ([[myself view] bounds]));
  980. if (JUCE_IOS_MAC_VIEW* peerView = [[[myself view] subviews] objectAtIndex: 0])
  981. {
  982. #if JUCE_IOS
  983. [peerView setNeedsDisplay];
  984. #else
  985. [peerView setNeedsDisplay: YES];
  986. #endif
  987. }
  988. }
  989. }
  990. }
  991. CGSize getPreferredContentSize() const
  992. {
  993. return CGSizeMake (static_cast<float> (preferredSize.getWidth()),
  994. static_cast<float> (preferredSize.getHeight()));
  995. }
  996. //==============================================================================
  997. AUAudioUnit* createAudioUnit (const AudioComponentDescription& descr, NSError** error)
  998. {
  999. AUAudioUnit* retval = nil;
  1000. if (! MessageManager::getInstance()->isThisTheMessageThread())
  1001. {
  1002. WaitableEvent creationEvent;
  1003. // AUv3 headers say that we may block this thread and that the message thread is guaranteed
  1004. // to be unblocked
  1005. struct AUCreator : public CallbackMessage
  1006. {
  1007. JuceAUViewController& owner;
  1008. AudioComponentDescription pDescr;
  1009. NSError** pError;
  1010. AUAudioUnit*& outAU;
  1011. WaitableEvent& e;
  1012. AUCreator (JuceAUViewController& parent, const AudioComponentDescription& paramDescr, NSError** paramError,
  1013. AUAudioUnit*& outputAU, WaitableEvent& event)
  1014. : owner (parent), pDescr (paramDescr), pError (paramError), outAU (outputAU), e (event)
  1015. {}
  1016. void messageCallback() override
  1017. {
  1018. outAU = owner.createAudioUnitOnMessageThread (pDescr, pError);
  1019. e.signal();
  1020. }
  1021. };
  1022. (new AUCreator (*this, descr, error, retval, creationEvent))->post();
  1023. creationEvent.wait (-1);
  1024. }
  1025. else
  1026. retval = createAudioUnitOnMessageThread (descr, error);
  1027. return [retval autorelease];
  1028. }
  1029. private:
  1030. //==============================================================================
  1031. AUViewController<AUAudioUnitFactory>* myself;
  1032. AudioProcessorHolder::Ptr processorHolder;
  1033. Rectangle<int> preferredSize;
  1034. //==============================================================================
  1035. AUAudioUnit* createAudioUnitOnMessageThread (const AudioComponentDescription& descr, NSError** error)
  1036. {
  1037. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1038. [myself view]; // this will call [view load] and ensure that the AudioProcessor has been instantiated
  1039. if (processorHolder == nullptr)
  1040. return nullptr;
  1041. return (new JuceAudioUnitv3 (processorHolder, descr, 0, error))->getAudioUnit();
  1042. }
  1043. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  1044. };
  1045. //==============================================================================
  1046. // necessary glue code
  1047. @interface JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix) : AUViewController<AUAudioUnitFactory>
  1048. @end
  1049. @implementation JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix)
  1050. {
  1051. ScopedPointer<JuceAUViewController> cpp;
  1052. }
  1053. - (instancetype) initWithNibName: (nullable NSString*) nib bundle: (nullable NSBundle*) bndl { self = [super initWithNibName: nib bundle: bndl]; cpp = new JuceAUViewController (self); return self;}
  1054. - (void) loadView { cpp->loadView(); }
  1055. - (AUAudioUnit *)createAudioUnitWithComponentDescription:(AudioComponentDescription)desc error:(NSError **)error { return cpp->createAudioUnit (desc, error); }
  1056. - (CGSize) preferredContentSize { return cpp->getPreferredContentSize(); }
  1057. - (void)viewDidLayoutSubviews { return cpp->viewDidLayoutSubviews(); }
  1058. @end
  1059. #pragma clang diagnostic pop
  1060. #endif