The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1427 lines
63KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. #include "../../juce_core/system/juce_TargetPlatform.h"
  18. #include "../utility/juce_CheckSettingMacros.h"
  19. #if JucePlugin_Build_AUv3
  20. #import <CoreAudioKit/CoreAudioKit.h>
  21. #import <AudioToolbox/AudioToolbox.h>
  22. #import <AVFoundation/AVFoundation.h>
  23. #if JUCE_MAC
  24. #if (! defined MAC_OS_X_VERSION_MIN_REQUIRED) || (! defined MAC_OS_X_VERSION_10_11) || (MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_11)
  25. #error AUv3 needs Deployment Target OS X 10.11 or higher to compile
  26. #endif
  27. #endif
  28. #if JUCE_IOS
  29. #if (! defined __IPHONE_OS_VERSION_MIN_REQUIRED) || (! defined __IPHONE_9_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_9_0)
  30. #error AUv3 needs Deployment Target iOS 9.0 or higher to compile
  31. #endif
  32. #endif
  33. #ifndef __OBJC2__
  34. #error AUv3 needs Objective-C 2 support (compile with 64-bit)
  35. #endif
  36. #include "../utility/juce_IncludeSystemHeaders.h"
  37. #include "../utility/juce_IncludeModuleHeaders.h"
  38. #include "../../juce_core/native/juce_osx_ObjCHelpers.h"
  39. #include "../../juce_graphics/native/juce_mac_CoreGraphicsHelpers.h"
  40. #include "juce_AU_Shared.h"
  41. #define JUCE_VIEWCONTROLLER_OBJC_NAME(x) JUCE_JOIN_MACRO (x, FactoryAUv3)
  42. #if ! JUCE_COMPILER_SUPPORTS_VARIADIC_TEMPLATES
  43. #error AUv3 wrapper requires variadic template support
  44. #endif
  45. #if JUCE_IOS
  46. #define JUCE_IOS_MAC_VIEW UIView
  47. #else
  48. #define JUCE_IOS_MAC_VIEW NSView
  49. #endif
  50. #define JUCE_AUDIOUNIT_OBJC_NAME(x) JUCE_JOIN_MACRO (x, AUv3)
  51. #pragma clang diagnostic push
  52. #pragma clang diagnostic ignored "-Wnullability-completeness"
  53. // TODO: ask Timur: use SFINAE to automatically generate this for all NSObjects
  54. template <> struct ContainerDeletePolicy<AUAudioUnitBusArray> { static void destroy (NSObject* o) { [o release]; } };
  55. template <> struct ContainerDeletePolicy<AUParameterTree> { static void destroy (NSObject* o) { [o release]; } };
  56. template <> struct ContainerDeletePolicy<NSMutableArray<AUParameterNode *> > { static void destroy (NSObject* o) { [o release]; } };
  57. template <> struct ContainerDeletePolicy<AUParameter> { static void destroy (NSObject* o) { [o release]; } };
  58. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitBus*> > { static void destroy (NSObject* o) { [o release]; } };
  59. template <> struct ContainerDeletePolicy<AUAudioUnitBus> { static void destroy (NSObject* o) { [o release]; } };
  60. template <> struct ContainerDeletePolicy<AVAudioFormat> { static void destroy (NSObject* o) { [o release]; } };
  61. template <> struct ContainerDeletePolicy<AVAudioPCMBuffer> { static void destroy (NSObject* o) { [o release]; } };
  62. template <> struct ContainerDeletePolicy<NSMutableArray<NSNumber*> > { static void destroy (NSObject* o) { [o release]; } };
  63. template <> struct ContainerDeletePolicy<NSNumber> { static void destroy (NSObject* o) { [o release]; } };
  64. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitPreset*> > { static void destroy (NSObject* o) { [o release]; } };
  65. template <> struct ContainerDeletePolicy<AUAudioUnitPreset> { static void destroy (NSObject* o) { [o release]; } };
  66. //==============================================================================
  67. struct AudioProcessorHolder : public ReferenceCountedObject
  68. {
  69. AudioProcessorHolder() {}
  70. AudioProcessorHolder (AudioProcessor* p) : processor (p) {}
  71. AudioProcessor& operator*() noexcept { return *processor; }
  72. AudioProcessor* operator->() noexcept { return processor; }
  73. AudioProcessor* get() noexcept { return processor; }
  74. typedef ReferenceCountedObjectPtr<AudioProcessorHolder> Ptr;
  75. private:
  76. ScopedPointer<AudioProcessor> processor;
  77. AudioProcessorHolder& operator= (AudioProcessor*) JUCE_DELETED_FUNCTION;
  78. AudioProcessorHolder (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  79. AudioProcessorHolder& operator= (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  80. };
  81. //==============================================================================
  82. class JuceAudioUnitv3Base
  83. {
  84. public:
  85. JuceAudioUnitv3Base (const AudioComponentDescription& descr,
  86. AudioComponentInstantiationOptions options,
  87. NSError** error)
  88. #pragma clang diagnostic push
  89. #pragma clang diagnostic ignored "-Wobjc-method-access"
  90. : au ([audioUnitObjCClass.createInstance() initWithComponentDescription: descr
  91. options: options
  92. error: error
  93. juceClass: this])
  94. #pragma clang diagnostic pop
  95. {
  96. }
  97. JuceAudioUnitv3Base (AUAudioUnit* audioUnit) : au (audioUnit)
  98. {
  99. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  100. initialiseJuce_GUI();
  101. }
  102. virtual ~JuceAudioUnitv3Base() {}
  103. //==============================================================================
  104. AUAudioUnit* getAudioUnit() noexcept { return au; }
  105. virtual int getVirtualMIDICableCount() { return 0; }
  106. virtual void reset() {}
  107. virtual bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* bus)
  108. {
  109. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  110. return (ObjCMsgSendSuper<BOOL, AVAudioFormat*,AUAudioUnitBus* > (&s, @selector (shouldChangeToFormat:forBus:), format, bus) == YES);
  111. }
  112. virtual AUAudioUnitPreset* getCurrentPreset() { return nullptr; }
  113. virtual void setCurrentPreset(AUAudioUnitPreset*) {}
  114. virtual NSTimeInterval getLatency() { return 0.0; }
  115. virtual NSTimeInterval getTailTime() { return 0.0; }
  116. virtual bool getCanProcessInPlace() { return false; }
  117. virtual bool getRenderingOffline() { return false; }
  118. //==============================================================================
  119. virtual AUAudioUnitBusArray* getInputBusses() = 0;
  120. virtual AUAudioUnitBusArray* getOutputBusses() = 0;
  121. virtual AUParameterTree* getParameterTree() = 0;
  122. virtual AUInternalRenderBlock getInternalRenderBlock() = 0;
  123. virtual void setRenderingOffline (bool offline) = 0;
  124. virtual NSArray<NSNumber*> *getChannelCapabilities() = 0;
  125. //==============================================================================
  126. virtual NSArray<NSNumber*>* parametersForOverviewWithCount (int)
  127. {
  128. return [NSArray<NSNumber*> array];
  129. }
  130. virtual NSArray<AUAudioUnitPreset*>* getFactoryPresets()
  131. {
  132. return [NSArray<AUAudioUnitPreset*> array];
  133. }
  134. virtual NSDictionary<NSString*, id>* getFullState()
  135. {
  136. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  137. return ObjCMsgSendSuper<NSDictionary<NSString*, id>*> (&s, @selector (fullState));
  138. }
  139. virtual void setFullState (NSDictionary<NSString*, id>* state)
  140. {
  141. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  142. ObjCMsgSendSuper<void, NSDictionary<NSString*, id>*> (&s, @selector (setFullState:), state);
  143. }
  144. virtual bool allocateRenderResourcesAndReturnError (NSError **outError)
  145. {
  146. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  147. return (ObjCMsgSendSuper<BOOL, NSError**> (&s, @selector (allocateRenderResourcesAndReturnError:), outError) == YES);
  148. }
  149. virtual void deallocateRenderResources()
  150. {
  151. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  152. ObjCMsgSendSuper<void> (&s, @selector (deallocateRenderResources));
  153. }
  154. private:
  155. struct Class : public ObjCClass<AUAudioUnit>
  156. {
  157. Class() : ObjCClass<AUAudioUnit> ("AUAudioUnit_")
  158. {
  159. addIvar<JuceAudioUnitv3Base*> ("cppObject");
  160. addMethod (@selector (initWithComponentDescription:options:error:juceClass:),
  161. initWithComponentDescriptionAndJuceClass, "@@:",
  162. @encode (AudioComponentDescription),
  163. @encode (AudioComponentInstantiationOptions), "^@@");
  164. addMethod (@selector (initWithComponentDescription:options:error:),
  165. initWithComponentDescription, "@@:",
  166. @encode (AudioComponentDescription),
  167. @encode (AudioComponentInstantiationOptions), "^@");
  168. addMethod (@selector (dealloc), dealloc, "v@:");
  169. addMethod (@selector (inputBusses), getInputBusses, "@@:");
  170. addMethod (@selector (outputBusses), getOutputBusses, "@@:");
  171. addMethod (@selector (parameterTree), getParameterTree, "@@:");
  172. addMethod (@selector (deallocateRenderResources), deallocateRenderResources, "v@:");
  173. addMethod (@selector (reset), reset, "v@:");
  174. addMethod (@selector (shouldChangeToFormat:forBus:), shouldChangeToFormat, "B@:@@");
  175. addMethod (@selector (factoryPresets), getFactoryPresets, "@@:");
  176. addMethod (@selector (currentPreset), getCurrentPreset, "@@:");
  177. addMethod (@selector (setCurrentPreset:), setCurrentPreset, "v@:@");
  178. addMethod (@selector (fullState), getFullState, "@@:");
  179. addMethod (@selector (setFullState:), setFullState, "v@:@");
  180. addMethod (@selector (channelCapabilities), getChannelCapabilities, "@@:");
  181. addMethod (@selector (allocateRenderResourcesAndReturnError:), allocateRenderResourcesAndReturnError, "B@:^@");
  182. addMethod (@selector (parametersForOverviewWithCount:), parametersForOverviewWithCount, "@@:", @encode (NSInteger));
  183. addMethod (@selector (setRenderingOffline:), setRenderingOffline, "v@:", @encode (BOOL));
  184. addMethod (@selector (internalRenderBlock), getInternalRenderBlock, @encode (AUInternalRenderBlock), "@:");
  185. addMethod (@selector (virtualMIDICableCount), getVirtualMIDICableCount, @encode (NSInteger), "@:");
  186. addMethod (@selector (latency), getLatency, @encode (NSTimeInterval), "@:");
  187. addMethod (@selector (tailTime), getTailTime, @encode (NSTimeInterval), "@:");
  188. addMethod (@selector (canProcessInPlace), getCanProcessInPlace, @encode (BOOL), "@:");
  189. addMethod (@selector (isRenderingOffline), getRenderingOffline, @encode (BOOL), "@:");
  190. registerClass();
  191. }
  192. //==============================================================================
  193. static JuceAudioUnitv3Base* _this (id self) { return getIvar<JuceAudioUnitv3Base*> (self, "cppObject"); }
  194. static void setThis (id self, JuceAudioUnitv3Base* cpp) { object_setInstanceVariable (self, "cppObject", cpp); }
  195. //==============================================================================
  196. static id initWithComponentDescription (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  197. {
  198. AUAudioUnit* self = _self;
  199. objc_super s = { self, [AUAudioUnit class] };
  200. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  201. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  202. JuceAudioUnitv3Base* juceAU = JuceAudioUnitv3Base::create (self, descr, options, error);
  203. setThis (self, juceAU);
  204. return self;
  205. }
  206. static id initWithComponentDescriptionAndJuceClass (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error, JuceAudioUnitv3Base* juceAU)
  207. {
  208. AUAudioUnit* self = _self;
  209. objc_super s = { self, [AUAudioUnit class] };
  210. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  211. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  212. setThis (self, juceAU);
  213. return self;
  214. }
  215. static void dealloc (id self, SEL) { delete _this (self); }
  216. static AUAudioUnitBusArray* getInputBusses (id self, SEL) { return _this (self)->getInputBusses(); }
  217. static AUAudioUnitBusArray* getOutputBusses (id self, SEL) { return _this (self)->getOutputBusses(); }
  218. static AUParameterTree* getParameterTree (id self, SEL) { return _this (self)->getParameterTree(); }
  219. static AUInternalRenderBlock getInternalRenderBlock (id self, SEL) { return _this (self)->getInternalRenderBlock(); }
  220. static BOOL allocateRenderResourcesAndReturnError (id self, SEL, NSError** error) { return _this (self)->allocateRenderResourcesAndReturnError (error) ? YES : NO; }
  221. static void deallocateRenderResources (id self, SEL) { _this (self)->deallocateRenderResources(); }
  222. static void reset (id self, SEL) { _this (self)->reset(); }
  223. static NSInteger getVirtualMIDICableCount (id self, SEL) { return _this (self)->getVirtualMIDICableCount(); }
  224. static BOOL shouldChangeToFormat (id self, SEL, AVAudioFormat* format, AUAudioUnitBus* bus) { return _this (self)->shouldChangeToFormat (format, bus) ? YES : NO; }
  225. static NSArray<NSNumber*>* parametersForOverviewWithCount (id self, SEL, NSInteger count) { return _this (self)->parametersForOverviewWithCount (static_cast<int> (count)); }
  226. static NSArray<AUAudioUnitPreset*>* getFactoryPresets (id self, SEL) { return _this (self)->getFactoryPresets(); }
  227. static AUAudioUnitPreset* getCurrentPreset (id self, SEL) { return _this (self)->getCurrentPreset(); }
  228. static void setCurrentPreset (id self, SEL, AUAudioUnitPreset* preset) { return _this (self)->setCurrentPreset (preset); }
  229. static NSDictionary<NSString*, id>* getFullState (id self, SEL) { return _this (self)->getFullState(); }
  230. static void setFullState (id self, SEL, NSDictionary<NSString *, id>* state) { return _this (self)->setFullState (state); }
  231. static NSTimeInterval getLatency (id self, SEL) { return _this (self)->getLatency(); }
  232. static NSTimeInterval getTailTime (id self, SEL) { return _this (self)->getTailTime(); }
  233. static BOOL getCanProcessInPlace (id self, SEL) { return _this (self)->getCanProcessInPlace() ? YES : NO; }
  234. static BOOL getRenderingOffline (id self, SEL) { return _this (self)->getRenderingOffline() ? YES : NO; }
  235. static void setRenderingOffline (id self, SEL, BOOL renderingOffline) { _this (self)->setRenderingOffline (renderingOffline); }
  236. static NSArray<NSNumber*>* getChannelCapabilities (id self, SEL) { return _this (self)->getChannelCapabilities(); }
  237. };
  238. static JuceAudioUnitv3Base* create (AUAudioUnit*, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**);
  239. //==============================================================================
  240. static Class audioUnitObjCClass;
  241. protected:
  242. AUAudioUnit* au;
  243. };
  244. //==============================================================================
  245. JuceAudioUnitv3Base::Class JuceAudioUnitv3Base::audioUnitObjCClass;
  246. //==============================================================================
  247. //=========================== The actual AudioUnit =============================
  248. //==============================================================================
  249. class JuceAudioUnitv3 : public JuceAudioUnitv3Base,
  250. public AudioProcessorListener,
  251. public AudioPlayHead
  252. {
  253. public:
  254. JuceAudioUnitv3 (const AudioProcessorHolder::Ptr& processor,
  255. const AudioComponentDescription& descr,
  256. AudioComponentInstantiationOptions options,
  257. NSError** error)
  258. : JuceAudioUnitv3Base (descr, options, error),
  259. processorHolder (processor),
  260. mapper (*processorHolder->get())
  261. {
  262. init();
  263. }
  264. JuceAudioUnitv3 (AUAudioUnit* audioUnit, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**)
  265. : JuceAudioUnitv3Base (audioUnit),
  266. processorHolder (new AudioProcessorHolder (createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))),
  267. mapper (*processorHolder->get())
  268. {
  269. init();
  270. }
  271. ~JuceAudioUnitv3()
  272. {
  273. auto& processor = getAudioProcessor();
  274. processor.removeListener (this);
  275. if (AudioProcessorEditor* editor = processor.getActiveEditor())
  276. processor.editorBeingDeleted (editor);
  277. if (editorObserverToken != nullptr)
  278. {
  279. [paramTree removeParameterObserver: editorObserverToken];
  280. editorObserverToken = nullptr;
  281. }
  282. }
  283. //==============================================================================
  284. void init()
  285. {
  286. AudioProcessor& processor = getAudioProcessor();
  287. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  288. #ifdef JucePlugin_PreferredChannelConfigurations
  289. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  290. const int numConfigs = sizeof (configs) / sizeof (short[2]);
  291. jassert (numConfigs > 0 && (configs[0][0] > 0 || configs[0][1] > 0));
  292. processor.setPlayConfigDetails (configs[0][0], configs[0][1], kDefaultSampleRate, static_cast<int> (maxFrames));
  293. Array<AUChannelInfo> channelInfos;
  294. for (int i = 0; i < numConfigs; ++i)
  295. {
  296. AUChannelInfo channelInfo;
  297. channelInfo.inChannels = configs[i][0];
  298. channelInfo.outChannels = configs[i][1];
  299. channelInfos.add (channelInfo);
  300. }
  301. #else
  302. Array<AUChannelInfo> channelInfos = AudioUnitHelpers::getAUChannelInfo (processor);
  303. #endif
  304. processor.setPlayHead (this);
  305. totalInChannels = processor.getTotalNumInputChannels();
  306. totalOutChannels = processor.getTotalNumOutputChannels();
  307. {
  308. channelCapabilities = [[NSMutableArray<NSNumber*> alloc] init];
  309. for (int i = 0; i < channelInfos.size(); ++i)
  310. {
  311. AUChannelInfo& info = channelInfos.getReference (i);
  312. [channelCapabilities addObject: [NSNumber numberWithInteger: info.inChannels]];
  313. [channelCapabilities addObject: [NSNumber numberWithInteger: info.outChannels]];
  314. }
  315. }
  316. editorObserverToken = nullptr;
  317. internalRenderBlock = CreateObjCBlock (this, &JuceAudioUnitv3::renderCallback);
  318. processor.setRateAndBufferSizeDetails (kDefaultSampleRate, static_cast<int> (maxFrames));
  319. processor.prepareToPlay (kDefaultSampleRate, static_cast<int> (maxFrames));
  320. processor.addListener (this);
  321. addParameters();
  322. addPresets();
  323. addAudioUnitBusses (true);
  324. addAudioUnitBusses (false);
  325. }
  326. //==============================================================================
  327. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  328. AUAudioUnitBusArray* getInputBusses() override { return inputBusses; }
  329. AUAudioUnitBusArray* getOutputBusses() override { return outputBusses; }
  330. AUParameterTree* getParameterTree() override { return paramTree; }
  331. AUInternalRenderBlock getInternalRenderBlock() override { return internalRenderBlock; }
  332. NSArray<AUAudioUnitPreset*>* getFactoryPresets() override { return factoryPresets; }
  333. bool getRenderingOffline() override { return getAudioProcessor().isNonRealtime(); }
  334. void setRenderingOffline (bool offline) override { getAudioProcessor().setNonRealtime (offline); }
  335. NSArray<NSNumber*>* getChannelCapabilities() override { return channelCapabilities; }
  336. //==============================================================================
  337. AUAudioUnitPreset* getCurrentPreset() override
  338. {
  339. const int n = static_cast<int> ([factoryPresets count]);
  340. const int idx = static_cast<int> (getAudioProcessor().getCurrentProgram());
  341. if (idx < n)
  342. return [factoryPresets objectAtIndex:static_cast<unsigned int> (idx)];
  343. return nullptr;
  344. }
  345. void setCurrentPreset(AUAudioUnitPreset* preset) override
  346. {
  347. const int n = static_cast<int> ([factoryPresets count]);
  348. const int idx = static_cast<int> ([preset number]);
  349. if (isPositiveAndBelow (idx, n))
  350. getAudioProcessor().setCurrentProgram (idx);
  351. }
  352. //==============================================================================
  353. NSDictionary<NSString*, id>* getFullState() override
  354. {
  355. NSMutableDictionary<NSString*, id>* retval = [[NSMutableDictionary<NSString*, id> alloc] init];
  356. {
  357. NSDictionary<NSString*, id>* superRetval = JuceAudioUnitv3Base::getFullState();
  358. if (superRetval != nullptr)
  359. [retval addEntriesFromDictionary:superRetval];
  360. }
  361. juce::MemoryBlock state;
  362. getAudioProcessor().getCurrentProgramStateInformation (state);
  363. if (state.getSize() > 0)
  364. {
  365. NSData* ourState = [[NSData alloc] initWithBytes: state.getData()
  366. length: state.getSize()];
  367. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  368. [retval setObject: ourState
  369. forKey: nsKey];
  370. [nsKey release];
  371. [ourState release];
  372. }
  373. return [retval autorelease];
  374. }
  375. void setFullState (NSDictionary<NSString*, id>* state) override
  376. {
  377. if (state == nullptr)
  378. return;
  379. NSMutableDictionary<NSString*, id>* modifiedState = [[NSMutableDictionary<NSString*, id> alloc] init];
  380. [modifiedState addEntriesFromDictionary: state];
  381. NSString* nsPresetKey = [[NSString alloc] initWithUTF8String: kAUPresetDataKey];
  382. [modifiedState removeObjectForKey: nsPresetKey];
  383. [nsPresetKey release];
  384. JuceAudioUnitv3Base::setFullState (modifiedState);
  385. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  386. NSObject* obj = [modifiedState objectForKey: nsKey];
  387. [nsKey release];
  388. if (obj != nullptr)
  389. {
  390. if ([obj isKindOfClass:[NSData class]])
  391. {
  392. NSData* data = reinterpret_cast<NSData*> (obj);
  393. const int numBytes = static_cast<int> ([data length]);
  394. const juce::uint8* const rawBytes = reinterpret_cast< const juce::uint8* const> ([data bytes]);
  395. if (numBytes > 0)
  396. getAudioProcessor().setCurrentProgramStateInformation (rawBytes, numBytes);
  397. }
  398. }
  399. [modifiedState release];
  400. }
  401. //==============================================================================
  402. NSArray<NSNumber*>* parametersForOverviewWithCount (int count) override
  403. {
  404. const int n = static_cast<int> ([overviewParams count]);
  405. if (count >= n)
  406. return overviewParams;
  407. NSMutableArray<NSNumber*>* retval = [[NSMutableArray<NSNumber*>alloc] initWithArray: overviewParams];
  408. [retval removeObjectsInRange: NSMakeRange (static_cast<unsigned int> (count), static_cast<unsigned int> (n - count))];
  409. return [retval autorelease];
  410. }
  411. int getVirtualMIDICableCount() override
  412. {
  413. #if JucePlugin_WantsMidiInput
  414. return 1;
  415. #else
  416. return 0;
  417. #endif
  418. }
  419. //==============================================================================
  420. bool allocateRenderResourcesAndReturnError (NSError **outError) override
  421. {
  422. AudioProcessor& processor = getAudioProcessor();
  423. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  424. if (! JuceAudioUnitv3Base::allocateRenderResourcesAndReturnError (outError))
  425. return false;
  426. if (outError != nullptr)
  427. *outError = nullptr;
  428. AudioProcessor::BusesLayout layouts;
  429. for (int dir = 0; dir < 2; ++dir)
  430. {
  431. const bool isInput = (dir == 0);
  432. const int n = processor.getBusCount (isInput);
  433. Array<AudioChannelSet>& channelSets = (isInput ? layouts.inputBuses : layouts.outputBuses);
  434. AUAudioUnitBusArray* auBuses = (isInput ? [getAudioUnit() inputBusses] : [getAudioUnit() outputBusses]);
  435. jassert ([auBuses count] == static_cast<NSUInteger> (n));
  436. for (int busIdx = 0; busIdx < n; ++busIdx)
  437. {
  438. AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx);
  439. AVAudioFormat* format = [[auBuses objectAtIndexedSubscript:static_cast<NSUInteger> (busIdx)] format];
  440. AudioChannelSet newLayout;
  441. if (const AVAudioChannelLayout* layout = [format channelLayout])
  442. newLayout = AudioUnitHelpers::CALayoutTagToChannelSet ([layout layoutTag]);
  443. else
  444. newLayout = bus->supportedLayoutWithChannels (static_cast<int> ([format channelCount]));
  445. if (newLayout.isDisabled())
  446. return false;
  447. channelSets.add (newLayout);
  448. }
  449. }
  450. #ifdef JucePlugin_PreferredChannelConfigurations
  451. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  452. if (! AudioProcessor::containsLayout (layouts, configs))
  453. {
  454. if (outError != nullptr)
  455. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  456. return false;
  457. }
  458. #endif
  459. if (! processor.setBusesLayout (layouts))
  460. {
  461. if (outError != nullptr)
  462. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  463. return false;
  464. }
  465. totalInChannels = processor.getTotalNumInputChannels();
  466. totalOutChannels = processor.getTotalNumOutputChannels();
  467. allocateBusBuffer (true);
  468. allocateBusBuffer (false);
  469. mapper.alloc();
  470. audioBuffer.prepare (totalInChannels, totalOutChannels, static_cast<int> (maxFrames));
  471. double sampleRate = (jmax (processor.getBusCount (true), processor.getBusCount (false)) > 0 ?
  472. [[[([inputBusses count] > 0 ? inputBusses : outputBusses) objectAtIndexedSubscript: 0] format] sampleRate] : 44100.0);
  473. processor.setRateAndBufferSizeDetails (sampleRate, static_cast<int> (maxFrames));
  474. processor.prepareToPlay (sampleRate, static_cast<int> (maxFrames));
  475. zeromem (&lastAudioHead, sizeof (lastAudioHead));
  476. hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
  477. hostTransportStateCallback = [getAudioUnit() transportStateBlock];
  478. reset();
  479. return true;
  480. }
  481. void deallocateRenderResources() override
  482. {
  483. hostMusicalContextCallback = nullptr;
  484. hostTransportStateCallback = nullptr;
  485. getAudioProcessor().releaseResources();
  486. audioBuffer.release();
  487. inBusBuffers. clear();
  488. outBusBuffers.clear();
  489. mapper.release();
  490. JuceAudioUnitv3Base::deallocateRenderResources();
  491. }
  492. void reset() override
  493. {
  494. midiMessages.clear();
  495. lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
  496. }
  497. //==============================================================================
  498. bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* auBus) override
  499. {
  500. const bool isInput = ([auBus busType] == AUAudioUnitBusTypeInput);
  501. const int busIdx = static_cast<int> ([auBus index]);
  502. const int newNumChannels = static_cast<int> ([format channelCount]);
  503. AudioProcessor& processor = getAudioProcessor();
  504. if (AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx))
  505. {
  506. #ifdef JucePlugin_PreferredChannelConfigurations
  507. ignoreUnused (bus);
  508. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  509. if (! AudioUnitHelpers::isLayoutSupported (processor, isInput, busIdx, newNumChannels, configs))
  510. return false;
  511. #else
  512. if (const AVAudioChannelLayout* layout = [format channelLayout])
  513. {
  514. AudioChannelSet newLayout = AudioUnitHelpers::CALayoutTagToChannelSet ([layout layoutTag]);
  515. if (newLayout.size() != newNumChannels)
  516. return false;
  517. if (! bus->isLayoutSupported (newLayout))
  518. return false;
  519. }
  520. else
  521. {
  522. if (! bus->isNumberOfChannelsSupported (newNumChannels))
  523. return false;
  524. }
  525. #endif
  526. return true;
  527. }
  528. return false;
  529. }
  530. //==============================================================================
  531. void audioProcessorChanged (AudioProcessor* processor) override
  532. {
  533. ignoreUnused (processor);
  534. [au willChangeValueForKey: @"allParameterValues"];
  535. [au didChangeValueForKey: @"allParameterValues"];
  536. }
  537. void audioProcessorParameterChanged (AudioProcessor*, int idx, float newValue) override
  538. {
  539. if (isPositiveAndBelow (idx, getAudioProcessor().getNumParameters()))
  540. {
  541. if (AUParameter* param = [paramTree parameterWithAddress: getAUParameterAddressForIndex (idx)])
  542. {
  543. if (editorObserverToken != nullptr)
  544. [param setValue: newValue originator: editorObserverToken];
  545. else
  546. [param setValue: newValue];
  547. }
  548. }
  549. }
  550. //==============================================================================
  551. NSTimeInterval getLatency() override
  552. {
  553. auto& p = getAudioProcessor();
  554. return p.getLatencySamples() / p.getSampleRate();
  555. }
  556. NSTimeInterval getTailTime() override { return getAudioProcessor().getTailLengthSeconds(); }
  557. //==============================================================================
  558. bool getCurrentPosition (CurrentPositionInfo& info) override
  559. {
  560. bool musicContextCallSucceeded = false;
  561. bool transportStateCallSucceeded = false;
  562. info = lastAudioHead;
  563. info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
  564. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  565. switch (lastTimeStamp.mSMPTETime.mType)
  566. {
  567. case kSMPTETimeType24: info.frameRate = AudioPlayHead::fps24; break;
  568. case kSMPTETimeType25: info.frameRate = AudioPlayHead::fps25; break;
  569. case kSMPTETimeType30Drop: info.frameRate = AudioPlayHead::fps30drop; break;
  570. case kSMPTETimeType30: info.frameRate = AudioPlayHead::fps30; break;
  571. case kSMPTETimeType2997: info.frameRate = AudioPlayHead::fps2997; break;
  572. case kSMPTETimeType2997Drop: info.frameRate = AudioPlayHead::fps2997drop; break;
  573. default: info.frameRate = AudioPlayHead::fpsUnknown; break;
  574. }
  575. double num;
  576. NSInteger den;
  577. NSInteger outDeltaSampleOffsetToNextBeat;
  578. double outCurrentMeasureDownBeat, bpm;
  579. double ppqPosition;
  580. if (hostMusicalContextCallback != nullptr)
  581. {
  582. AUHostMusicalContextBlock musicalContextCallback = hostMusicalContextCallback;
  583. if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
  584. {
  585. musicContextCallSucceeded = true;
  586. info.timeSigNumerator = (int) num;
  587. info.timeSigDenominator = (int) den;
  588. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  589. info.bpm = bpm;
  590. info.ppqPosition = ppqPosition;
  591. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  592. }
  593. }
  594. double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
  595. AUHostTransportStateFlags flags;
  596. if (hostTransportStateCallback != nullptr)
  597. {
  598. AUHostTransportStateBlock transportStateCallback = hostTransportStateCallback;
  599. if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
  600. {
  601. transportStateCallSucceeded = true;
  602. info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
  603. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  604. info.isPlaying = ((flags & AUHostTransportStateMoving) != 0);
  605. info.isLooping = ((flags & AUHostTransportStateCycling) != 0);
  606. info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
  607. info.ppqLoopStart = outCycleStartBeat;
  608. info.ppqLoopEnd = outCycleEndBeat;
  609. }
  610. }
  611. if (musicContextCallSucceeded && transportStateCallSucceeded)
  612. lastAudioHead = info;
  613. return true;
  614. }
  615. private:
  616. //==============================================================================
  617. struct BusBuffer
  618. {
  619. BusBuffer (AUAudioUnitBus* bus, int maxFramesPerBuffer)
  620. : auBus (bus), bufferList (nullptr),
  621. maxFrames (maxFramesPerBuffer),
  622. numberOfChannels (static_cast<int> ([[auBus format] channelCount])),
  623. isInterleaved ([[auBus format] isInterleaved])
  624. {
  625. alloc();
  626. }
  627. //==============================================================================
  628. void alloc()
  629. {
  630. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  631. int bytes = static_cast<int> (sizeof (AudioBufferList))
  632. + ((numBuffers - 1) * static_cast<int> (sizeof (::AudioBuffer)));
  633. jassert (bytes > 0);
  634. bufferListStorage.calloc (static_cast<size_t> (bytes));
  635. bufferList = reinterpret_cast<AudioBufferList*> (bufferListStorage.getData());
  636. const int bufferChannels = isInterleaved ? numberOfChannels : 1;
  637. scratchBuffer.setSize (numBuffers, bufferChannels * maxFrames);
  638. }
  639. void dealloc()
  640. {
  641. bufferList = nullptr;
  642. bufferListStorage.free();
  643. scratchBuffer.setSize (0, 0);
  644. }
  645. //==============================================================================
  646. int numChannels() const noexcept { return numberOfChannels; }
  647. bool interleaved() const noexcept { return isInterleaved; }
  648. AudioBufferList* get() const noexcept { return bufferList; }
  649. //==============================================================================
  650. void prepare (UInt32 nFrames, const AudioBufferList* other = nullptr) noexcept
  651. {
  652. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  653. const bool isCompatible = isCompatibleWith (other);
  654. bufferList->mNumberBuffers = static_cast<UInt32> (numBuffers);
  655. for (int i = 0; i < numBuffers; ++i)
  656. {
  657. const UInt32 bufferChannels = static_cast<UInt32> (isInterleaved ? numberOfChannels : 1);
  658. bufferList->mBuffers[i].mNumberChannels = bufferChannels;
  659. bufferList->mBuffers[i].mData = (isCompatible ? other->mBuffers[i].mData
  660. : scratchBuffer.getWritePointer (i));
  661. bufferList->mBuffers[i].mDataByteSize = nFrames * bufferChannels * sizeof (float);
  662. }
  663. }
  664. //==============================================================================
  665. bool isCompatibleWith (const AudioBufferList* other) const noexcept
  666. {
  667. if (other == nullptr)
  668. return false;
  669. if (other->mNumberBuffers > 0)
  670. {
  671. const bool otherInterleaved = AudioUnitHelpers::isAudioBufferInterleaved (*other);
  672. const int otherChannels = static_cast<int> (otherInterleaved ? other->mBuffers[0].mNumberChannels
  673. : other->mNumberBuffers);
  674. return otherInterleaved == isInterleaved
  675. && numberOfChannels == otherChannels;
  676. }
  677. return numberOfChannels == 0;
  678. }
  679. private:
  680. AUAudioUnitBus* auBus;
  681. HeapBlock<char> bufferListStorage;
  682. AudioBufferList* bufferList;
  683. int maxFrames, numberOfChannels;
  684. bool isInterleaved;
  685. AudioSampleBuffer scratchBuffer;
  686. };
  687. //==============================================================================
  688. void addAudioUnitBusses (bool isInput)
  689. {
  690. ScopedPointer<NSMutableArray<AUAudioUnitBus*> > array = [[NSMutableArray<AUAudioUnitBus*> alloc] init];
  691. AudioProcessor& processor = getAudioProcessor();
  692. const int n = processor.getBusCount (isInput);
  693. for (int i = 0; i < n; ++i)
  694. {
  695. ScopedPointer<AUAudioUnitBus> audioUnitBus;
  696. {
  697. ScopedPointer<AVAudioFormat> defaultFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate: kDefaultSampleRate
  698. channels: static_cast<AVAudioChannelCount> (processor.getChannelCountOfBus (isInput, i))];
  699. audioUnitBus = [[AUAudioUnitBus alloc] initWithFormat: defaultFormat
  700. error: nullptr];
  701. }
  702. [array addObject: audioUnitBus];
  703. }
  704. (isInput ? inputBusses : outputBusses) = [[AUAudioUnitBusArray alloc] initWithAudioUnit: au
  705. busType: (isInput ? AUAudioUnitBusTypeInput : AUAudioUnitBusTypeOutput)
  706. busses: array];
  707. }
  708. void addParameters()
  709. {
  710. ScopedPointer<NSMutableArray<AUParameterNode*> > params = [[NSMutableArray<AUParameterNode*> alloc] init];
  711. paramObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedFromHost);
  712. paramProvider = CreateObjCBlock (this, &JuceAudioUnitv3::getValue);
  713. overviewParams = [[NSMutableArray<NSNumber*> alloc] init];
  714. auto& processor = getAudioProcessor();
  715. const int n = processor.getNumParameters();
  716. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  717. // check if all parameters are managed?
  718. usingManagedParameter = (processor.getParameters().size() == processor.getNumParameters());
  719. #endif
  720. for (int idx = 0; idx < n; ++idx)
  721. {
  722. const String identifier (idx);
  723. const String name = processor.getParameterName (idx);
  724. AudioUnitParameterOptions flags = (UInt32) (kAudioUnitParameterFlag_IsWritable
  725. | kAudioUnitParameterFlag_IsReadable
  726. | kAudioUnitParameterFlag_HasCFNameString
  727. | kAudioUnitParameterFlag_ValuesHaveStrings);
  728. #if JucePlugin_AUHighResolutionParameters
  729. flags |= (UInt32) kAudioUnitParameterFlag_IsHighResolution;
  730. #endif
  731. // set whether the param is automatable (unnamed parameters aren't allowed to be automated)
  732. if (name.isEmpty() || ! processor.isParameterAutomatable (idx))
  733. flags |= kAudioUnitParameterFlag_NonRealTime;
  734. if (processor.isMetaParameter (idx))
  735. flags |= kAudioUnitParameterFlag_IsGlobalMeta;
  736. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  737. AUParameterAddress address = static_cast<AUParameterAddress> (idx);
  738. #else
  739. AUParameterAddress address = generateAUParameterAddressForIndex (idx);
  740. // Consider yourself very unlucky if you hit this assertion. The hash code of your
  741. // parameter ids are not unique.
  742. jassert (! paramMap.contains (static_cast<int64> (address)));
  743. paramAddresses.add (address);
  744. paramMap.set (static_cast<int64> (address), idx);
  745. #endif
  746. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  747. ScopedPointer<AUParameter> param = [[AUParameterTree createParameterWithIdentifier: juceStringToNS (identifier)
  748. name: juceStringToNS (name)
  749. address: address
  750. min: 0.0f
  751. max: 1.0f
  752. unit: kAudioUnitParameterUnit_Generic
  753. unitName: nullptr
  754. flags: flags
  755. valueStrings: nullptr
  756. dependentParameters: nullptr] retain];
  757. [params addObject: param];
  758. [overviewParams addObject: [NSNumber numberWithUnsignedLongLong:address]];
  759. }
  760. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  761. paramTree = [[AUParameterTree createTreeWithChildren: params] retain];
  762. [paramTree setImplementorValueObserver: paramObserver];
  763. [paramTree setImplementorValueProvider: paramProvider];
  764. if (processor.hasEditor())
  765. {
  766. editorParamObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedForObserver);
  767. editorObserverToken = [paramTree tokenByAddingParameterObserver: editorParamObserver];
  768. }
  769. }
  770. void addPresets()
  771. {
  772. factoryPresets = [[NSMutableArray<AUAudioUnitPreset*> alloc] init];
  773. const int n = getAudioProcessor().getNumPrograms();
  774. for (int idx = 0; idx < n; ++idx)
  775. {
  776. String name = getAudioProcessor().getProgramName (idx);
  777. ScopedPointer<AUAudioUnitPreset> preset = [[AUAudioUnitPreset alloc] init];
  778. [preset setName: juceStringToNS (name)];
  779. [preset setNumber: static_cast<NSInteger> (idx)];
  780. [factoryPresets addObject: preset];
  781. }
  782. }
  783. //==============================================================================
  784. void allocateBusBuffer (bool isInput)
  785. {
  786. OwnedArray<BusBuffer>& busBuffers = isInput ? inBusBuffers : outBusBuffers;
  787. busBuffers.clear();
  788. const int n = getAudioProcessor().getBusCount (isInput);
  789. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  790. for (int busIdx = 0; busIdx < n; ++busIdx)
  791. busBuffers.add (new BusBuffer ([(isInput ? inputBusses : outputBusses) objectAtIndexedSubscript: static_cast<unsigned int> (busIdx)],
  792. static_cast<int> (maxFrames)));
  793. }
  794. void processEvents (const AURenderEvent *__nullable realtimeEventListHead, int numParams, AUEventSampleTime startTime)
  795. {
  796. for (const AURenderEvent* event = realtimeEventListHead; event != nullptr; event = event->head.next)
  797. {
  798. switch (event->head.eventType)
  799. {
  800. case AURenderEventMIDI:
  801. {
  802. const AUMIDIEvent& midiEvent = event->MIDI;
  803. midiMessages.addEvent (midiEvent.data, midiEvent.length, static_cast<int> (midiEvent.eventSampleTime - startTime));
  804. }
  805. break;
  806. case AURenderEventParameter:
  807. case AURenderEventParameterRamp:
  808. {
  809. const AUParameterEvent& paramEvent = event->parameter;
  810. const int idx = getJuceParameterIndexForAUAddress (paramEvent.parameterAddress);
  811. if (isPositiveAndBelow (idx, numParams))
  812. getAudioProcessor().setParameter (idx, paramEvent.value);
  813. }
  814. break;
  815. default:
  816. break;
  817. }
  818. }
  819. }
  820. AUAudioUnitStatus renderCallback (AudioUnitRenderActionFlags* actionFlags, const AudioTimeStamp* timestamp, AUAudioFrameCount frameCount,
  821. NSInteger outputBusNumber, AudioBufferList* outputData, const AURenderEvent *__nullable realtimeEventListHead,
  822. AURenderPullInputBlock __nullable pullInputBlock)
  823. {
  824. auto& processor = getAudioProcessor();
  825. jassert (static_cast<int> (frameCount) <= getAudioProcessor().getBlockSize());
  826. // process params
  827. const int numParams = processor.getNumParameters();
  828. processEvents (realtimeEventListHead, numParams, static_cast<AUEventSampleTime> (timestamp->mSampleTime));
  829. if (lastTimeStamp.mSampleTime != timestamp->mSampleTime)
  830. {
  831. lastTimeStamp = *timestamp;
  832. const int numInputBuses = inBusBuffers. size();
  833. const int numOutputBuses = outBusBuffers.size();
  834. // prepare buffers
  835. {
  836. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  837. {
  838. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  839. const bool canUseDirectOutput =
  840. (busIdx == outputBusNumber && outputData != nullptr && outputData->mNumberBuffers > 0);
  841. busBuffer.prepare (frameCount, canUseDirectOutput ? outputData : nullptr);
  842. }
  843. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  844. {
  845. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  846. busBuffer.prepare (frameCount, busIdx < numOutputBuses ? outBusBuffers[busIdx]->get() : nullptr);
  847. }
  848. audioBuffer.reset();
  849. }
  850. // pull inputs
  851. {
  852. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  853. {
  854. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  855. AudioBufferList* buffer = busBuffer.get();
  856. if (pullInputBlock == nullptr || pullInputBlock (actionFlags, timestamp, frameCount, busIdx, buffer) != noErr)
  857. AudioUnitHelpers::clearAudioBuffer (*buffer);
  858. if (actionFlags != nullptr && (*actionFlags & kAudioUnitRenderAction_OutputIsSilence) != 0)
  859. AudioUnitHelpers::clearAudioBuffer (*buffer);
  860. }
  861. }
  862. // set buffer pointer to minimize copying
  863. {
  864. int chIdx = 0;
  865. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  866. {
  867. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  868. AudioBufferList* buffer = busBuffer.get();
  869. const bool interleaved = busBuffer.interleaved();
  870. const int numChannels = busBuffer.numChannels();
  871. const int* outLayoutMap = mapper.get (false, busIdx);
  872. for (int ch = 0; ch < numChannels; ++ch)
  873. audioBuffer.setBuffer (chIdx++, interleaved ? nullptr : static_cast<float*> (buffer->mBuffers[outLayoutMap[ch]].mData));
  874. }
  875. // use input pointers on remaining channels
  876. for (int busIdx = 0; chIdx < totalInChannels;)
  877. {
  878. const int channelOffset = processor.getOffsetInBusBufferForAbsoluteChannelIndex (true, chIdx, busIdx);
  879. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  880. AudioBufferList* buffer = busBuffer.get();
  881. const int* inLayoutMap = mapper.get (true, busIdx);
  882. audioBuffer.setBuffer (chIdx++, busBuffer.interleaved() ? nullptr : static_cast<float*> (buffer->mBuffers[inLayoutMap[channelOffset]].mData));
  883. }
  884. }
  885. // copy input
  886. {
  887. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  888. audioBuffer.push (*inBusBuffers[busIdx]->get(), mapper.get (true, busIdx));
  889. // clear remaining channels
  890. for (int i = totalInChannels; i < totalOutChannels; ++i)
  891. zeromem (audioBuffer.push(), sizeof (float) * frameCount);
  892. }
  893. // process audio
  894. processBlock (audioBuffer.getBuffer (frameCount), midiMessages);
  895. midiMessages.clear();
  896. }
  897. // copy back
  898. audioBuffer.pop (*outBusBuffers[(int) outputBusNumber]->get(),
  899. mapper.get (false, (int) outputBusNumber));
  900. return noErr;
  901. }
  902. void processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiBuffer) noexcept
  903. {
  904. auto& processor = getAudioProcessor();
  905. const ScopedLock sl (processor.getCallbackLock());
  906. if (processor.isSuspended())
  907. buffer.clear();
  908. else if ([au shouldBypassEffect])
  909. processor.processBlockBypassed (buffer, midiBuffer);
  910. else
  911. processor.processBlock (buffer, midiBuffer);
  912. }
  913. //==============================================================================
  914. void valueChangedFromHost (AUParameter* param, AUValue value)
  915. {
  916. if (param != nullptr)
  917. {
  918. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  919. auto& processor = getAudioProcessor();
  920. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  921. processor.setParameter (idx, value);
  922. }
  923. }
  924. AUValue getValue (AUParameter* param)
  925. {
  926. if (param != nullptr)
  927. {
  928. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  929. auto& processor = getAudioProcessor();
  930. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  931. return processor.getParameter (idx);
  932. }
  933. return 0;
  934. }
  935. void valueChangedForObserver(AUParameterAddress, AUValue)
  936. {
  937. // this will have already been handled bny valueChangedFromHost
  938. }
  939. //==============================================================================
  940. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  941. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept { return static_cast<AUParameterAddress> (paramIndex); }
  942. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept { return static_cast<int> (address); }
  943. #else
  944. AUParameterAddress generateAUParameterAddressForIndex (int paramIndex) const
  945. {
  946. auto& processor = getAudioProcessor();
  947. const int n = processor.getNumParameters();
  948. if (isPositiveAndBelow (paramIndex, n))
  949. {
  950. const String& juceParamID = processor.getParameterID (paramIndex);
  951. return usingManagedParameter ? static_cast<AUParameterAddress> (juceParamID.hashCode64())
  952. : static_cast<AUParameterAddress> (juceParamID.getIntValue());
  953. }
  954. return static_cast<AUParameterAddress> (-1);
  955. }
  956. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept
  957. {
  958. return usingManagedParameter ? paramAddresses.getReference (paramIndex)
  959. : static_cast<AUParameterAddress> (paramIndex);
  960. }
  961. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept
  962. {
  963. return usingManagedParameter ? paramMap[static_cast<int64> (address)]
  964. : static_cast<int> (address);
  965. }
  966. #endif
  967. //==============================================================================
  968. static const double kDefaultSampleRate;
  969. AudioProcessorHolder::Ptr processorHolder;
  970. int totalInChannels, totalOutChannels;
  971. ScopedPointer<AUAudioUnitBusArray> inputBusses;
  972. ScopedPointer<AUAudioUnitBusArray> outputBusses;
  973. ObjCBlock<AUImplementorValueObserver> paramObserver;
  974. ObjCBlock<AUImplementorValueProvider> paramProvider;
  975. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  976. bool usingManagedParameter;
  977. Array<AUParameterAddress> paramAddresses;
  978. HashMap<int64, int> paramMap;
  979. #endif
  980. // to avoid recursion on parameter changes, we need to add an
  981. // editor observer to do the parameter changes
  982. ObjCBlock<AUParameterObserver> editorParamObserver;
  983. AUParameterObserverToken editorObserverToken;
  984. ScopedPointer<AUParameterTree> paramTree;
  985. ScopedPointer<NSMutableArray<NSNumber*> > overviewParams;
  986. ScopedPointer<NSMutableArray<NSNumber*> > channelCapabilities;
  987. ScopedPointer<NSMutableArray<AUAudioUnitPreset*> > factoryPresets;
  988. ObjCBlock<AUInternalRenderBlock> internalRenderBlock;
  989. AudioUnitHelpers::CoreAudioBufferList audioBuffer;
  990. AudioUnitHelpers::ChannelRemapper mapper;
  991. OwnedArray<BusBuffer> inBusBuffers, outBusBuffers;
  992. MidiBuffer midiMessages;
  993. ObjCBlock<AUHostMusicalContextBlock> hostMusicalContextCallback;
  994. ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;
  995. AudioTimeStamp lastTimeStamp;
  996. CurrentPositionInfo lastAudioHead;
  997. };
  998. const double JuceAudioUnitv3::kDefaultSampleRate = 44100.0;
  999. JuceAudioUnitv3Base* JuceAudioUnitv3Base::create (AUAudioUnit* audioUnit, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  1000. {
  1001. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1002. return new JuceAudioUnitv3 (audioUnit, descr, options, error);
  1003. }
  1004. //==============================================================================
  1005. class JuceAUViewController
  1006. {
  1007. public:
  1008. JuceAUViewController (AUViewController<AUAudioUnitFactory>* p)
  1009. : myself (p), processorHolder (nullptr), preferredSize (1.0f, 1.0f)
  1010. {
  1011. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1012. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1013. initialiseJuce_GUI();
  1014. }
  1015. ~JuceAUViewController()
  1016. {
  1017. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1018. }
  1019. //==============================================================================
  1020. void loadView()
  1021. {
  1022. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1023. if (AudioProcessor* p = createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))
  1024. {
  1025. processorHolder = new AudioProcessorHolder (p);
  1026. auto& processor = getAudioProcessor();
  1027. if (processor.hasEditor())
  1028. {
  1029. if (AudioProcessorEditor* editor = processor.createEditorIfNeeded())
  1030. {
  1031. preferredSize = editor->getBounds();
  1032. JUCE_IOS_MAC_VIEW* view = [[[JUCE_IOS_MAC_VIEW alloc] initWithFrame: convertToCGRect (editor->getBounds())] autorelease];
  1033. [myself setView: view];
  1034. editor->setVisible (true);
  1035. editor->addToDesktop (0, view);
  1036. }
  1037. }
  1038. }
  1039. }
  1040. void viewDidLayoutSubviews()
  1041. {
  1042. if (processorHolder != nullptr && [myself view] != nullptr)
  1043. {
  1044. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  1045. {
  1046. editor->setBounds (convertToRectInt ([[myself view] bounds]));
  1047. if (JUCE_IOS_MAC_VIEW* peerView = [[[myself view] subviews] objectAtIndex: 0])
  1048. {
  1049. #if JUCE_IOS
  1050. [peerView setNeedsDisplay];
  1051. #else
  1052. [peerView setNeedsDisplay: YES];
  1053. #endif
  1054. }
  1055. }
  1056. }
  1057. }
  1058. CGSize getPreferredContentSize() const
  1059. {
  1060. return CGSizeMake (static_cast<float> (preferredSize.getWidth()),
  1061. static_cast<float> (preferredSize.getHeight()));
  1062. }
  1063. //==============================================================================
  1064. AUAudioUnit* createAudioUnit (const AudioComponentDescription& descr, NSError** error)
  1065. {
  1066. AUAudioUnit* retval = nil;
  1067. if (! MessageManager::getInstance()->isThisTheMessageThread())
  1068. {
  1069. WaitableEvent creationEvent;
  1070. // AUv3 headers say that we may block this thread and that the message thread is guaranteed
  1071. // to be unblocked
  1072. struct AUCreator : public CallbackMessage
  1073. {
  1074. JuceAUViewController& owner;
  1075. AudioComponentDescription pDescr;
  1076. NSError** pError;
  1077. AUAudioUnit*& outAU;
  1078. WaitableEvent& e;
  1079. AUCreator (JuceAUViewController& parent, const AudioComponentDescription& paramDescr, NSError** paramError,
  1080. AUAudioUnit*& outputAU, WaitableEvent& event)
  1081. : owner (parent), pDescr (paramDescr), pError (paramError), outAU (outputAU), e (event)
  1082. {}
  1083. void messageCallback() override
  1084. {
  1085. outAU = owner.createAudioUnitOnMessageThread (pDescr, pError);
  1086. e.signal();
  1087. }
  1088. };
  1089. (new AUCreator (*this, descr, error, retval, creationEvent))->post();
  1090. creationEvent.wait (-1);
  1091. }
  1092. else
  1093. retval = createAudioUnitOnMessageThread (descr, error);
  1094. return [retval autorelease];
  1095. }
  1096. private:
  1097. //==============================================================================
  1098. AUViewController<AUAudioUnitFactory>* myself;
  1099. AudioProcessorHolder::Ptr processorHolder;
  1100. Rectangle<int> preferredSize;
  1101. //==============================================================================
  1102. AUAudioUnit* createAudioUnitOnMessageThread (const AudioComponentDescription& descr, NSError** error)
  1103. {
  1104. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1105. [myself view]; // this will call [view load] and ensure that the AudioProcessor has been instantiated
  1106. if (processorHolder == nullptr)
  1107. return nullptr;
  1108. return (new JuceAudioUnitv3 (processorHolder, descr, 0, error))->getAudioUnit();
  1109. }
  1110. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  1111. };
  1112. //==============================================================================
  1113. // necessary glue code
  1114. @interface JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix) : AUViewController<AUAudioUnitFactory>
  1115. @end
  1116. @implementation JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix)
  1117. {
  1118. ScopedPointer<JuceAUViewController> cpp;
  1119. }
  1120. - (instancetype) initWithNibName: (nullable NSString*) nib bundle: (nullable NSBundle*) bndl { self = [super initWithNibName: nib bundle: bndl]; cpp = new JuceAUViewController (self); return self;}
  1121. - (void) loadView { cpp->loadView(); }
  1122. - (AUAudioUnit *)createAudioUnitWithComponentDescription:(AudioComponentDescription)desc error:(NSError **)error { return cpp->createAudioUnit (desc, error); }
  1123. - (CGSize) preferredContentSize { return cpp->getPreferredContentSize(); }
  1124. - (void)viewDidLayoutSubviews { return cpp->viewDidLayoutSubviews(); }
  1125. @end
  1126. #pragma clang diagnostic pop
  1127. #endif