The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1468 lines
65KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. #include "../../juce_core/system/juce_TargetPlatform.h"
  20. #include "../utility/juce_CheckSettingMacros.h"
  21. #if JucePlugin_Build_AUv3
  22. #import <CoreAudioKit/CoreAudioKit.h>
  23. #import <AudioToolbox/AudioToolbox.h>
  24. #import <AVFoundation/AVFoundation.h>
  25. #if JUCE_MAC
  26. #if (! defined MAC_OS_X_VERSION_MIN_REQUIRED) || (! defined MAC_OS_X_VERSION_10_11) || (MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_11)
  27. #error AUv3 needs Deployment Target OS X 10.11 or higher to compile
  28. #endif
  29. #endif
  30. #if JUCE_IOS
  31. #if (! defined __IPHONE_OS_VERSION_MIN_REQUIRED) || (! defined __IPHONE_9_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_9_0)
  32. #error AUv3 needs Deployment Target iOS 9.0 or higher to compile
  33. #endif
  34. #endif
  35. #ifndef __OBJC2__
  36. #error AUv3 needs Objective-C 2 support (compile with 64-bit)
  37. #endif
  38. #include "../utility/juce_IncludeSystemHeaders.h"
  39. #include "../utility/juce_IncludeModuleHeaders.h"
  40. #include "../../juce_core/native/juce_osx_ObjCHelpers.h"
  41. #include "../../juce_graphics/native/juce_mac_CoreGraphicsHelpers.h"
  42. #include "../../juce_audio_processors/format_types/juce_AU_Shared.h"
  43. #define JUCE_VIEWCONTROLLER_OBJC_NAME(x) JUCE_JOIN_MACRO (x, FactoryAUv3)
  44. #if ! JUCE_COMPILER_SUPPORTS_VARIADIC_TEMPLATES
  45. #error AUv3 wrapper requires variadic template support
  46. #endif
  47. #if JUCE_IOS
  48. #define JUCE_IOS_MAC_VIEW UIView
  49. #else
  50. #define JUCE_IOS_MAC_VIEW NSView
  51. #endif
  52. #define JUCE_AUDIOUNIT_OBJC_NAME(x) JUCE_JOIN_MACRO (x, AUv3)
  53. #pragma clang diagnostic push
  54. #pragma clang diagnostic ignored "-Wnullability-completeness"
  55. // TODO: ask Timur: use SFINAE to automatically generate this for all NSObjects
  56. template <> struct ContainerDeletePolicy<AUAudioUnitBusArray> { static void destroy (NSObject* o) { [o release]; } };
  57. template <> struct ContainerDeletePolicy<AUParameterTree> { static void destroy (NSObject* o) { [o release]; } };
  58. template <> struct ContainerDeletePolicy<NSMutableArray<AUParameterNode *> > { static void destroy (NSObject* o) { [o release]; } };
  59. template <> struct ContainerDeletePolicy<AUParameter> { static void destroy (NSObject* o) { [o release]; } };
  60. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitBus*> > { static void destroy (NSObject* o) { [o release]; } };
  61. template <> struct ContainerDeletePolicy<AUAudioUnitBus> { static void destroy (NSObject* o) { [o release]; } };
  62. template <> struct ContainerDeletePolicy<AVAudioFormat> { static void destroy (NSObject* o) { [o release]; } };
  63. template <> struct ContainerDeletePolicy<AVAudioPCMBuffer> { static void destroy (NSObject* o) { [o release]; } };
  64. template <> struct ContainerDeletePolicy<NSMutableArray<NSNumber*> > { static void destroy (NSObject* o) { [o release]; } };
  65. template <> struct ContainerDeletePolicy<NSNumber> { static void destroy (NSObject* o) { [o release]; } };
  66. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitPreset*> > { static void destroy (NSObject* o) { [o release]; } };
  67. template <> struct ContainerDeletePolicy<AUAudioUnitPreset> { static void destroy (NSObject* o) { [o release]; } };
  68. //==============================================================================
  69. struct AudioProcessorHolder : public ReferenceCountedObject
  70. {
  71. AudioProcessorHolder() {}
  72. AudioProcessorHolder (AudioProcessor* p) : processor (p) {}
  73. AudioProcessor& operator*() noexcept { return *processor; }
  74. AudioProcessor* operator->() noexcept { return processor; }
  75. AudioProcessor* get() noexcept { return processor; }
  76. typedef ReferenceCountedObjectPtr<AudioProcessorHolder> Ptr;
  77. private:
  78. ScopedPointer<AudioProcessor> processor;
  79. AudioProcessorHolder& operator= (AudioProcessor*) JUCE_DELETED_FUNCTION;
  80. AudioProcessorHolder (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  81. AudioProcessorHolder& operator= (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  82. };
  83. //==============================================================================
  84. class JuceAudioUnitv3Base
  85. {
  86. public:
  87. JuceAudioUnitv3Base (const AudioComponentDescription& descr,
  88. AudioComponentInstantiationOptions options,
  89. NSError** error)
  90. #pragma clang diagnostic push
  91. #pragma clang diagnostic ignored "-Wobjc-method-access"
  92. : au ([audioUnitObjCClass.createInstance() initWithComponentDescription: descr
  93. options: options
  94. error: error
  95. juceClass: this])
  96. #pragma clang diagnostic pop
  97. {
  98. }
  99. JuceAudioUnitv3Base (AUAudioUnit* audioUnit) : au (audioUnit)
  100. {
  101. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  102. initialiseJuce_GUI();
  103. }
  104. virtual ~JuceAudioUnitv3Base() {}
  105. //==============================================================================
  106. AUAudioUnit* getAudioUnit() noexcept { return au; }
  107. virtual int getVirtualMIDICableCount() { return 0; }
  108. virtual void reset() {}
  109. virtual bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* bus)
  110. {
  111. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  112. return (ObjCMsgSendSuper<BOOL, AVAudioFormat*,AUAudioUnitBus* > (&s, @selector (shouldChangeToFormat:forBus:), format, bus) == YES);
  113. }
  114. virtual AUAudioUnitPreset* getCurrentPreset() { return nullptr; }
  115. virtual void setCurrentPreset(AUAudioUnitPreset*) {}
  116. virtual NSTimeInterval getLatency() { return 0.0; }
  117. virtual NSTimeInterval getTailTime() { return 0.0; }
  118. virtual bool getCanProcessInPlace() { return false; }
  119. virtual bool getRenderingOffline() { return false; }
  120. //==============================================================================
  121. virtual AUAudioUnitBusArray* getInputBusses() = 0;
  122. virtual AUAudioUnitBusArray* getOutputBusses() = 0;
  123. virtual AUParameterTree* getParameterTree() = 0;
  124. virtual AUInternalRenderBlock getInternalRenderBlock() = 0;
  125. virtual void setRenderingOffline (bool offline) = 0;
  126. virtual NSArray<NSNumber*> *getChannelCapabilities() = 0;
  127. //==============================================================================
  128. virtual NSArray<NSNumber*>* parametersForOverviewWithCount (int)
  129. {
  130. return [NSArray<NSNumber*> array];
  131. }
  132. virtual NSArray<AUAudioUnitPreset*>* getFactoryPresets()
  133. {
  134. return [NSArray<AUAudioUnitPreset*> array];
  135. }
  136. virtual NSDictionary<NSString*, id>* getFullState()
  137. {
  138. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  139. return ObjCMsgSendSuper<NSDictionary<NSString*, id>*> (&s, @selector (fullState));
  140. }
  141. virtual void setFullState (NSDictionary<NSString*, id>* state)
  142. {
  143. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  144. ObjCMsgSendSuper<void, NSDictionary<NSString*, id>*> (&s, @selector (setFullState:), state);
  145. }
  146. virtual bool allocateRenderResourcesAndReturnError (NSError **outError)
  147. {
  148. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  149. return (ObjCMsgSendSuper<BOOL, NSError**> (&s, @selector (allocateRenderResourcesAndReturnError:), outError) == YES);
  150. }
  151. virtual void deallocateRenderResources()
  152. {
  153. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  154. ObjCMsgSendSuper<void> (&s, @selector (deallocateRenderResources));
  155. }
  156. private:
  157. struct Class : public ObjCClass<AUAudioUnit>
  158. {
  159. Class() : ObjCClass<AUAudioUnit> ("AUAudioUnit_")
  160. {
  161. addIvar<JuceAudioUnitv3Base*> ("cppObject");
  162. addMethod (@selector (initWithComponentDescription:options:error:juceClass:),
  163. initWithComponentDescriptionAndJuceClass, "@@:",
  164. @encode (AudioComponentDescription),
  165. @encode (AudioComponentInstantiationOptions), "^@@");
  166. addMethod (@selector (initWithComponentDescription:options:error:),
  167. initWithComponentDescription, "@@:",
  168. @encode (AudioComponentDescription),
  169. @encode (AudioComponentInstantiationOptions), "^@");
  170. addMethod (@selector (dealloc), dealloc, "v@:");
  171. addMethod (@selector (inputBusses), getInputBusses, "@@:");
  172. addMethod (@selector (outputBusses), getOutputBusses, "@@:");
  173. addMethod (@selector (parameterTree), getParameterTree, "@@:");
  174. addMethod (@selector (deallocateRenderResources), deallocateRenderResources, "v@:");
  175. addMethod (@selector (reset), reset, "v@:");
  176. addMethod (@selector (shouldChangeToFormat:forBus:), shouldChangeToFormat, "B@:@@");
  177. addMethod (@selector (factoryPresets), getFactoryPresets, "@@:");
  178. addMethod (@selector (currentPreset), getCurrentPreset, "@@:");
  179. addMethod (@selector (setCurrentPreset:), setCurrentPreset, "v@:@");
  180. addMethod (@selector (fullState), getFullState, "@@:");
  181. addMethod (@selector (setFullState:), setFullState, "v@:@");
  182. addMethod (@selector (channelCapabilities), getChannelCapabilities, "@@:");
  183. addMethod (@selector (allocateRenderResourcesAndReturnError:), allocateRenderResourcesAndReturnError, "B@:^@");
  184. addMethod (@selector (parametersForOverviewWithCount:), parametersForOverviewWithCount, "@@:", @encode (NSInteger));
  185. addMethod (@selector (setRenderingOffline:), setRenderingOffline, "v@:", @encode (BOOL));
  186. addMethod (@selector (internalRenderBlock), getInternalRenderBlock, @encode (AUInternalRenderBlock), "@:");
  187. addMethod (@selector (virtualMIDICableCount), getVirtualMIDICableCount, @encode (NSInteger), "@:");
  188. addMethod (@selector (latency), getLatency, @encode (NSTimeInterval), "@:");
  189. addMethod (@selector (tailTime), getTailTime, @encode (NSTimeInterval), "@:");
  190. addMethod (@selector (canProcessInPlace), getCanProcessInPlace, @encode (BOOL), "@:");
  191. addMethod (@selector (isRenderingOffline), getRenderingOffline, @encode (BOOL), "@:");
  192. registerClass();
  193. }
  194. //==============================================================================
  195. static JuceAudioUnitv3Base* _this (id self) { return getIvar<JuceAudioUnitv3Base*> (self, "cppObject"); }
  196. static void setThis (id self, JuceAudioUnitv3Base* cpp) { object_setInstanceVariable (self, "cppObject", cpp); }
  197. //==============================================================================
  198. static id initWithComponentDescription (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  199. {
  200. AUAudioUnit* self = _self;
  201. objc_super s = { self, [AUAudioUnit class] };
  202. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  203. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  204. JuceAudioUnitv3Base* juceAU = JuceAudioUnitv3Base::create (self, descr, options, error);
  205. setThis (self, juceAU);
  206. return self;
  207. }
  208. static id initWithComponentDescriptionAndJuceClass (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error, JuceAudioUnitv3Base* juceAU)
  209. {
  210. AUAudioUnit* self = _self;
  211. objc_super s = { self, [AUAudioUnit class] };
  212. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  213. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  214. setThis (self, juceAU);
  215. return self;
  216. }
  217. static void dealloc (id self, SEL) { delete _this (self); }
  218. static AUAudioUnitBusArray* getInputBusses (id self, SEL) { return _this (self)->getInputBusses(); }
  219. static AUAudioUnitBusArray* getOutputBusses (id self, SEL) { return _this (self)->getOutputBusses(); }
  220. static AUParameterTree* getParameterTree (id self, SEL) { return _this (self)->getParameterTree(); }
  221. static AUInternalRenderBlock getInternalRenderBlock (id self, SEL) { return _this (self)->getInternalRenderBlock(); }
  222. static BOOL allocateRenderResourcesAndReturnError (id self, SEL, NSError** error) { return _this (self)->allocateRenderResourcesAndReturnError (error) ? YES : NO; }
  223. static void deallocateRenderResources (id self, SEL) { _this (self)->deallocateRenderResources(); }
  224. static void reset (id self, SEL) { _this (self)->reset(); }
  225. static NSInteger getVirtualMIDICableCount (id self, SEL) { return _this (self)->getVirtualMIDICableCount(); }
  226. static BOOL shouldChangeToFormat (id self, SEL, AVAudioFormat* format, AUAudioUnitBus* bus) { return _this (self)->shouldChangeToFormat (format, bus) ? YES : NO; }
  227. static NSArray<NSNumber*>* parametersForOverviewWithCount (id self, SEL, NSInteger count) { return _this (self)->parametersForOverviewWithCount (static_cast<int> (count)); }
  228. static NSArray<AUAudioUnitPreset*>* getFactoryPresets (id self, SEL) { return _this (self)->getFactoryPresets(); }
  229. static AUAudioUnitPreset* getCurrentPreset (id self, SEL) { return _this (self)->getCurrentPreset(); }
  230. static void setCurrentPreset (id self, SEL, AUAudioUnitPreset* preset) { return _this (self)->setCurrentPreset (preset); }
  231. static NSDictionary<NSString*, id>* getFullState (id self, SEL) { return _this (self)->getFullState(); }
  232. static void setFullState (id self, SEL, NSDictionary<NSString *, id>* state) { return _this (self)->setFullState (state); }
  233. static NSTimeInterval getLatency (id self, SEL) { return _this (self)->getLatency(); }
  234. static NSTimeInterval getTailTime (id self, SEL) { return _this (self)->getTailTime(); }
  235. static BOOL getCanProcessInPlace (id self, SEL) { return _this (self)->getCanProcessInPlace() ? YES : NO; }
  236. static BOOL getRenderingOffline (id self, SEL) { return _this (self)->getRenderingOffline() ? YES : NO; }
  237. static void setRenderingOffline (id self, SEL, BOOL renderingOffline) { _this (self)->setRenderingOffline (renderingOffline); }
  238. static NSArray<NSNumber*>* getChannelCapabilities (id self, SEL) { return _this (self)->getChannelCapabilities(); }
  239. };
  240. static JuceAudioUnitv3Base* create (AUAudioUnit*, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**);
  241. //==============================================================================
  242. static Class audioUnitObjCClass;
  243. protected:
  244. AUAudioUnit* au;
  245. };
  246. //==============================================================================
  247. JuceAudioUnitv3Base::Class JuceAudioUnitv3Base::audioUnitObjCClass;
  248. //==============================================================================
  249. //=========================== The actual AudioUnit =============================
  250. //==============================================================================
  251. class JuceAudioUnitv3 : public JuceAudioUnitv3Base,
  252. public AudioProcessorListener,
  253. public AudioPlayHead
  254. {
  255. public:
  256. JuceAudioUnitv3 (const AudioProcessorHolder::Ptr& processor,
  257. const AudioComponentDescription& descr,
  258. AudioComponentInstantiationOptions options,
  259. NSError** error)
  260. : JuceAudioUnitv3Base (descr, options, error),
  261. processorHolder (processor),
  262. mapper (*processorHolder->get())
  263. {
  264. init();
  265. }
  266. JuceAudioUnitv3 (AUAudioUnit* audioUnit, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**)
  267. : JuceAudioUnitv3Base (audioUnit),
  268. processorHolder (new AudioProcessorHolder (createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))),
  269. mapper (*processorHolder->get())
  270. {
  271. init();
  272. }
  273. ~JuceAudioUnitv3()
  274. {
  275. auto& processor = getAudioProcessor();
  276. processor.removeListener (this);
  277. removeEditor (processor);
  278. if (editorObserverToken != nullptr)
  279. {
  280. [paramTree removeParameterObserver: editorObserverToken];
  281. editorObserverToken = nullptr;
  282. }
  283. }
  284. //==============================================================================
  285. void init()
  286. {
  287. AudioProcessor& processor = getAudioProcessor();
  288. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  289. #ifdef JucePlugin_PreferredChannelConfigurations
  290. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  291. const int numConfigs = sizeof (configs) / sizeof (short[2]);
  292. jassert (numConfigs > 0 && (configs[0][0] > 0 || configs[0][1] > 0));
  293. processor.setPlayConfigDetails (configs[0][0], configs[0][1], kDefaultSampleRate, static_cast<int> (maxFrames));
  294. Array<AUChannelInfo> channelInfos;
  295. for (int i = 0; i < numConfigs; ++i)
  296. {
  297. AUChannelInfo channelInfo;
  298. channelInfo.inChannels = configs[i][0];
  299. channelInfo.outChannels = configs[i][1];
  300. channelInfos.add (channelInfo);
  301. }
  302. #else
  303. Array<AUChannelInfo> channelInfos = AudioUnitHelpers::getAUChannelInfo (processor);
  304. #endif
  305. processor.setPlayHead (this);
  306. totalInChannels = processor.getTotalNumInputChannels();
  307. totalOutChannels = processor.getTotalNumOutputChannels();
  308. {
  309. channelCapabilities = [[NSMutableArray<NSNumber*> alloc] init];
  310. for (int i = 0; i < channelInfos.size(); ++i)
  311. {
  312. AUChannelInfo& info = channelInfos.getReference (i);
  313. [channelCapabilities addObject: [NSNumber numberWithInteger: info.inChannels]];
  314. [channelCapabilities addObject: [NSNumber numberWithInteger: info.outChannels]];
  315. }
  316. }
  317. editorObserverToken = nullptr;
  318. internalRenderBlock = CreateObjCBlock (this, &JuceAudioUnitv3::renderCallback);
  319. processor.setRateAndBufferSizeDetails (kDefaultSampleRate, static_cast<int> (maxFrames));
  320. processor.prepareToPlay (kDefaultSampleRate, static_cast<int> (maxFrames));
  321. processor.addListener (this);
  322. addParameters();
  323. addPresets();
  324. addAudioUnitBusses (true);
  325. addAudioUnitBusses (false);
  326. }
  327. //==============================================================================
  328. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  329. AUAudioUnitBusArray* getInputBusses() override { return inputBusses; }
  330. AUAudioUnitBusArray* getOutputBusses() override { return outputBusses; }
  331. AUParameterTree* getParameterTree() override { return paramTree; }
  332. AUInternalRenderBlock getInternalRenderBlock() override { return internalRenderBlock; }
  333. NSArray<AUAudioUnitPreset*>* getFactoryPresets() override { return factoryPresets; }
  334. bool getRenderingOffline() override { return getAudioProcessor().isNonRealtime(); }
  335. void setRenderingOffline (bool offline) override { getAudioProcessor().setNonRealtime (offline); }
  336. NSArray<NSNumber*>* getChannelCapabilities() override { return channelCapabilities; }
  337. //==============================================================================
  338. AUAudioUnitPreset* getCurrentPreset() override
  339. {
  340. const int n = static_cast<int> ([factoryPresets count]);
  341. const int idx = static_cast<int> (getAudioProcessor().getCurrentProgram());
  342. if (idx < n)
  343. return [factoryPresets objectAtIndex:static_cast<unsigned int> (idx)];
  344. return nullptr;
  345. }
  346. void setCurrentPreset(AUAudioUnitPreset* preset) override
  347. {
  348. const int n = static_cast<int> ([factoryPresets count]);
  349. const int idx = static_cast<int> ([preset number]);
  350. if (isPositiveAndBelow (idx, n))
  351. getAudioProcessor().setCurrentProgram (idx);
  352. }
  353. //==============================================================================
  354. NSDictionary<NSString*, id>* getFullState() override
  355. {
  356. NSMutableDictionary<NSString*, id>* retval = [[NSMutableDictionary<NSString*, id> alloc] init];
  357. {
  358. NSDictionary<NSString*, id>* superRetval = JuceAudioUnitv3Base::getFullState();
  359. if (superRetval != nullptr)
  360. [retval addEntriesFromDictionary:superRetval];
  361. }
  362. juce::MemoryBlock state;
  363. getAudioProcessor().getCurrentProgramStateInformation (state);
  364. if (state.getSize() > 0)
  365. {
  366. NSData* ourState = [[NSData alloc] initWithBytes: state.getData()
  367. length: state.getSize()];
  368. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  369. [retval setObject: ourState
  370. forKey: nsKey];
  371. [nsKey release];
  372. [ourState release];
  373. }
  374. return [retval autorelease];
  375. }
  376. void setFullState (NSDictionary<NSString*, id>* state) override
  377. {
  378. if (state == nullptr)
  379. return;
  380. NSMutableDictionary<NSString*, id>* modifiedState = [[NSMutableDictionary<NSString*, id> alloc] init];
  381. [modifiedState addEntriesFromDictionary: state];
  382. NSString* nsPresetKey = [[NSString alloc] initWithUTF8String: kAUPresetDataKey];
  383. [modifiedState removeObjectForKey: nsPresetKey];
  384. [nsPresetKey release];
  385. JuceAudioUnitv3Base::setFullState (modifiedState);
  386. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  387. NSObject* obj = [modifiedState objectForKey: nsKey];
  388. [nsKey release];
  389. if (obj != nullptr)
  390. {
  391. if ([obj isKindOfClass:[NSData class]])
  392. {
  393. NSData* data = reinterpret_cast<NSData*> (obj);
  394. const int numBytes = static_cast<int> ([data length]);
  395. const juce::uint8* const rawBytes = reinterpret_cast< const juce::uint8* const> ([data bytes]);
  396. if (numBytes > 0)
  397. getAudioProcessor().setCurrentProgramStateInformation (rawBytes, numBytes);
  398. }
  399. }
  400. [modifiedState release];
  401. }
  402. //==============================================================================
  403. NSArray<NSNumber*>* parametersForOverviewWithCount (int count) override
  404. {
  405. const int n = static_cast<int> ([overviewParams count]);
  406. if (count >= n)
  407. return overviewParams;
  408. NSMutableArray<NSNumber*>* retval = [[NSMutableArray<NSNumber*>alloc] initWithArray: overviewParams];
  409. [retval removeObjectsInRange: NSMakeRange (static_cast<unsigned int> (count), static_cast<unsigned int> (n - count))];
  410. return [retval autorelease];
  411. }
  412. int getVirtualMIDICableCount() override
  413. {
  414. #if JucePlugin_WantsMidiInput
  415. return 1;
  416. #else
  417. return 0;
  418. #endif
  419. }
  420. //==============================================================================
  421. bool allocateRenderResourcesAndReturnError (NSError **outError) override
  422. {
  423. AudioProcessor& processor = getAudioProcessor();
  424. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  425. if (! JuceAudioUnitv3Base::allocateRenderResourcesAndReturnError (outError))
  426. return false;
  427. if (outError != nullptr)
  428. *outError = nullptr;
  429. AudioProcessor::BusesLayout layouts;
  430. for (int dir = 0; dir < 2; ++dir)
  431. {
  432. const bool isInput = (dir == 0);
  433. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  434. Array<AudioChannelSet>& channelSets = (isInput ? layouts.inputBuses : layouts.outputBuses);
  435. AUAudioUnitBusArray* auBuses = (isInput ? [getAudioUnit() inputBusses] : [getAudioUnit() outputBusses]);
  436. jassert ([auBuses count] == static_cast<NSUInteger> (n));
  437. for (int busIdx = 0; busIdx < n; ++busIdx)
  438. {
  439. AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx);
  440. AVAudioFormat* format = [[auBuses objectAtIndexedSubscript:static_cast<NSUInteger> (busIdx)] format];
  441. AudioChannelSet newLayout;
  442. const AVAudioChannelLayout* layout = [format channelLayout];
  443. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  444. if (layoutTag != 0)
  445. newLayout = AudioUnitHelpers::CALayoutTagToChannelSet (layoutTag);
  446. else
  447. newLayout = bus->supportedLayoutWithChannels (static_cast<int> ([format channelCount]));
  448. if (newLayout.isDisabled())
  449. return false;
  450. channelSets.add (newLayout);
  451. }
  452. }
  453. #ifdef JucePlugin_PreferredChannelConfigurations
  454. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  455. if (! AudioProcessor::containsLayout (layouts, configs))
  456. {
  457. if (outError != nullptr)
  458. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  459. return false;
  460. }
  461. #endif
  462. if (! AudioUnitHelpers::setBusesLayout (&getAudioProcessor(), layouts))
  463. {
  464. if (outError != nullptr)
  465. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  466. return false;
  467. }
  468. totalInChannels = processor.getTotalNumInputChannels();
  469. totalOutChannels = processor.getTotalNumOutputChannels();
  470. allocateBusBuffer (true);
  471. allocateBusBuffer (false);
  472. mapper.alloc();
  473. audioBuffer.prepare (totalInChannels, totalOutChannels, static_cast<int> (maxFrames));
  474. double sampleRate = (jmax (AudioUnitHelpers::getBusCount (&processor, true), AudioUnitHelpers::getBusCount (&processor, false)) > 0 ?
  475. [[[([inputBusses count] > 0 ? inputBusses : outputBusses) objectAtIndexedSubscript: 0] format] sampleRate] : 44100.0);
  476. processor.setRateAndBufferSizeDetails (sampleRate, static_cast<int> (maxFrames));
  477. processor.prepareToPlay (sampleRate, static_cast<int> (maxFrames));
  478. zeromem (&lastAudioHead, sizeof (lastAudioHead));
  479. hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
  480. hostTransportStateCallback = [getAudioUnit() transportStateBlock];
  481. reset();
  482. return true;
  483. }
  484. void deallocateRenderResources() override
  485. {
  486. hostMusicalContextCallback = nullptr;
  487. hostTransportStateCallback = nullptr;
  488. getAudioProcessor().releaseResources();
  489. audioBuffer.release();
  490. inBusBuffers. clear();
  491. outBusBuffers.clear();
  492. mapper.release();
  493. JuceAudioUnitv3Base::deallocateRenderResources();
  494. }
  495. void reset() override
  496. {
  497. midiMessages.clear();
  498. lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
  499. }
  500. //==============================================================================
  501. bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* auBus) override
  502. {
  503. const bool isInput = ([auBus busType] == AUAudioUnitBusTypeInput);
  504. const int busIdx = static_cast<int> ([auBus index]);
  505. const int newNumChannels = static_cast<int> ([format channelCount]);
  506. AudioProcessor& processor = getAudioProcessor();
  507. if (AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx))
  508. {
  509. #ifdef JucePlugin_PreferredChannelConfigurations
  510. ignoreUnused (bus);
  511. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  512. if (! AudioUnitHelpers::isLayoutSupported (processor, isInput, busIdx, newNumChannels, configs))
  513. return false;
  514. #else
  515. const AVAudioChannelLayout* layout = [format channelLayout];
  516. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  517. if (layoutTag != 0)
  518. {
  519. AudioChannelSet newLayout = AudioUnitHelpers::CALayoutTagToChannelSet (layoutTag);
  520. if (newLayout.size() != newNumChannels)
  521. return false;
  522. if (! bus->isLayoutSupported (newLayout))
  523. return false;
  524. }
  525. else
  526. {
  527. if (! bus->isNumberOfChannelsSupported (newNumChannels))
  528. return false;
  529. }
  530. #endif
  531. return true;
  532. }
  533. return false;
  534. }
  535. //==============================================================================
  536. void audioProcessorChanged (AudioProcessor* processor) override
  537. {
  538. ignoreUnused (processor);
  539. [au willChangeValueForKey: @"allParameterValues"];
  540. [au didChangeValueForKey: @"allParameterValues"];
  541. }
  542. void audioProcessorParameterChanged (AudioProcessor*, int idx, float newValue) override
  543. {
  544. if (isPositiveAndBelow (idx, getAudioProcessor().getNumParameters()))
  545. {
  546. if (AUParameter* param = [paramTree parameterWithAddress: getAUParameterAddressForIndex (idx)])
  547. {
  548. if (editorObserverToken != nullptr)
  549. [param setValue: newValue originator: editorObserverToken];
  550. else
  551. [param setValue: newValue];
  552. }
  553. }
  554. }
  555. //==============================================================================
  556. NSTimeInterval getLatency() override
  557. {
  558. auto& p = getAudioProcessor();
  559. return p.getLatencySamples() / p.getSampleRate();
  560. }
  561. NSTimeInterval getTailTime() override { return getAudioProcessor().getTailLengthSeconds(); }
  562. //==============================================================================
  563. bool getCurrentPosition (CurrentPositionInfo& info) override
  564. {
  565. bool musicContextCallSucceeded = false;
  566. bool transportStateCallSucceeded = false;
  567. info = lastAudioHead;
  568. info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
  569. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  570. switch (lastTimeStamp.mSMPTETime.mType)
  571. {
  572. case kSMPTETimeType24: info.frameRate = AudioPlayHead::fps24; break;
  573. case kSMPTETimeType25: info.frameRate = AudioPlayHead::fps25; break;
  574. case kSMPTETimeType2997: info.frameRate = AudioPlayHead::fps2997; break;
  575. case kSMPTETimeType2997Drop: info.frameRate = AudioPlayHead::fps2997drop; break;
  576. case kSMPTETimeType30Drop: info.frameRate = AudioPlayHead::fps30drop; break;
  577. case kSMPTETimeType30: info.frameRate = AudioPlayHead::fps30; break;
  578. case kSMPTETimeType60Drop: info.frameRate = AudioPlayHead::fps60drop; break;
  579. case kSMPTETimeType60: info.frameRate = AudioPlayHead::fps60; break;
  580. default: info.frameRate = AudioPlayHead::fpsUnknown; break;
  581. }
  582. double num;
  583. NSInteger den;
  584. NSInteger outDeltaSampleOffsetToNextBeat;
  585. double outCurrentMeasureDownBeat, bpm;
  586. double ppqPosition;
  587. if (hostMusicalContextCallback != nullptr)
  588. {
  589. AUHostMusicalContextBlock musicalContextCallback = hostMusicalContextCallback;
  590. if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
  591. {
  592. musicContextCallSucceeded = true;
  593. info.timeSigNumerator = (int) num;
  594. info.timeSigDenominator = (int) den;
  595. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  596. info.bpm = bpm;
  597. info.ppqPosition = ppqPosition;
  598. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  599. }
  600. }
  601. double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
  602. AUHostTransportStateFlags flags;
  603. if (hostTransportStateCallback != nullptr)
  604. {
  605. AUHostTransportStateBlock transportStateCallback = hostTransportStateCallback;
  606. if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
  607. {
  608. transportStateCallSucceeded = true;
  609. info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
  610. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  611. info.isPlaying = ((flags & AUHostTransportStateMoving) != 0);
  612. info.isLooping = ((flags & AUHostTransportStateCycling) != 0);
  613. info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
  614. info.ppqLoopStart = outCycleStartBeat;
  615. info.ppqLoopEnd = outCycleEndBeat;
  616. }
  617. }
  618. if (musicContextCallSucceeded && transportStateCallSucceeded)
  619. lastAudioHead = info;
  620. return true;
  621. }
  622. static void removeEditor (AudioProcessor& processor)
  623. {
  624. ScopedLock editorLock (processor.getCallbackLock());
  625. if (AudioProcessorEditor* editor = processor.getActiveEditor())
  626. {
  627. processor.editorBeingDeleted (editor);
  628. delete editor;
  629. }
  630. }
  631. private:
  632. //==============================================================================
  633. struct BusBuffer
  634. {
  635. BusBuffer (AUAudioUnitBus* bus, int maxFramesPerBuffer)
  636. : auBus (bus), bufferList (nullptr),
  637. maxFrames (maxFramesPerBuffer),
  638. numberOfChannels (static_cast<int> ([[auBus format] channelCount])),
  639. isInterleaved ([[auBus format] isInterleaved])
  640. {
  641. alloc();
  642. }
  643. //==============================================================================
  644. void alloc()
  645. {
  646. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  647. int bytes = static_cast<int> (sizeof (AudioBufferList))
  648. + ((numBuffers - 1) * static_cast<int> (sizeof (::AudioBuffer)));
  649. jassert (bytes > 0);
  650. bufferListStorage.calloc (static_cast<size_t> (bytes));
  651. bufferList = reinterpret_cast<AudioBufferList*> (bufferListStorage.getData());
  652. const int bufferChannels = isInterleaved ? numberOfChannels : 1;
  653. scratchBuffer.setSize (numBuffers, bufferChannels * maxFrames);
  654. }
  655. void dealloc()
  656. {
  657. bufferList = nullptr;
  658. bufferListStorage.free();
  659. scratchBuffer.setSize (0, 0);
  660. }
  661. //==============================================================================
  662. int numChannels() const noexcept { return numberOfChannels; }
  663. bool interleaved() const noexcept { return isInterleaved; }
  664. AudioBufferList* get() const noexcept { return bufferList; }
  665. //==============================================================================
  666. void prepare (UInt32 nFrames, const AudioBufferList* other = nullptr) noexcept
  667. {
  668. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  669. const bool isCompatible = isCompatibleWith (other);
  670. bufferList->mNumberBuffers = static_cast<UInt32> (numBuffers);
  671. for (int i = 0; i < numBuffers; ++i)
  672. {
  673. const UInt32 bufferChannels = static_cast<UInt32> (isInterleaved ? numberOfChannels : 1);
  674. bufferList->mBuffers[i].mNumberChannels = bufferChannels;
  675. bufferList->mBuffers[i].mData = (isCompatible ? other->mBuffers[i].mData
  676. : scratchBuffer.getWritePointer (i));
  677. bufferList->mBuffers[i].mDataByteSize = nFrames * bufferChannels * sizeof (float);
  678. }
  679. }
  680. //==============================================================================
  681. bool isCompatibleWith (const AudioBufferList* other) const noexcept
  682. {
  683. if (other == nullptr)
  684. return false;
  685. if (other->mNumberBuffers > 0)
  686. {
  687. const bool otherInterleaved = AudioUnitHelpers::isAudioBufferInterleaved (*other);
  688. const int otherChannels = static_cast<int> (otherInterleaved ? other->mBuffers[0].mNumberChannels
  689. : other->mNumberBuffers);
  690. return otherInterleaved == isInterleaved
  691. && numberOfChannels == otherChannels;
  692. }
  693. return numberOfChannels == 0;
  694. }
  695. private:
  696. AUAudioUnitBus* auBus;
  697. HeapBlock<char> bufferListStorage;
  698. AudioBufferList* bufferList;
  699. int maxFrames, numberOfChannels;
  700. bool isInterleaved;
  701. AudioSampleBuffer scratchBuffer;
  702. };
  703. //==============================================================================
  704. void addAudioUnitBusses (bool isInput)
  705. {
  706. ScopedPointer<NSMutableArray<AUAudioUnitBus*> > array = [[NSMutableArray<AUAudioUnitBus*> alloc] init];
  707. AudioProcessor& processor = getAudioProcessor();
  708. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  709. for (int i = 0; i < n; ++i)
  710. {
  711. ScopedPointer<AUAudioUnitBus> audioUnitBus;
  712. {
  713. ScopedPointer<AVAudioFormat> defaultFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate: kDefaultSampleRate
  714. channels: static_cast<AVAudioChannelCount> (processor.getChannelCountOfBus (isInput, i))];
  715. audioUnitBus = [[AUAudioUnitBus alloc] initWithFormat: defaultFormat
  716. error: nullptr];
  717. }
  718. [array addObject: audioUnitBus];
  719. }
  720. (isInput ? inputBusses : outputBusses) = [[AUAudioUnitBusArray alloc] initWithAudioUnit: au
  721. busType: (isInput ? AUAudioUnitBusTypeInput : AUAudioUnitBusTypeOutput)
  722. busses: array];
  723. }
  724. void addParameters()
  725. {
  726. ScopedPointer<NSMutableArray<AUParameterNode*> > params = [[NSMutableArray<AUParameterNode*> alloc] init];
  727. paramObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedFromHost);
  728. paramProvider = CreateObjCBlock (this, &JuceAudioUnitv3::getValue);
  729. overviewParams = [[NSMutableArray<NSNumber*> alloc] init];
  730. auto& processor = getAudioProcessor();
  731. const int n = processor.getNumParameters();
  732. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  733. // check if all parameters are managed?
  734. usingManagedParameter = (processor.getParameters().size() == processor.getNumParameters());
  735. #endif
  736. for (int idx = 0; idx < n; ++idx)
  737. {
  738. const String identifier (idx);
  739. const String name = processor.getParameterName (idx);
  740. AudioUnitParameterUnit unit = kAudioUnitParameterUnit_Generic;
  741. AudioUnitParameterOptions flags = (UInt32) (kAudioUnitParameterFlag_IsWritable
  742. | kAudioUnitParameterFlag_IsReadable
  743. | kAudioUnitParameterFlag_HasCFNameString
  744. | kAudioUnitParameterFlag_ValuesHaveStrings);
  745. #if JucePlugin_AUHighResolutionParameters
  746. flags |= (UInt32) kAudioUnitParameterFlag_IsHighResolution;
  747. #endif
  748. // set whether the param is automatable (unnamed parameters aren't allowed to be automated)
  749. if (name.isEmpty() || ! processor.isParameterAutomatable (idx))
  750. flags |= kAudioUnitParameterFlag_NonRealTime;
  751. if (processor.isMetaParameter (idx))
  752. flags |= kAudioUnitParameterFlag_IsGlobalMeta;
  753. // is this a meter?
  754. if (((processor.getParameterCategory (idx) & 0xffff0000) >> 16) == 2)
  755. {
  756. flags &= ~kAudioUnitParameterFlag_IsWritable;
  757. flags |= kAudioUnitParameterFlag_MeterReadOnly | kAudioUnitParameterFlag_DisplayLogarithmic;
  758. unit = kAudioUnitParameterUnit_LinearGain;
  759. }
  760. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  761. AUParameterAddress address = static_cast<AUParameterAddress> (idx);
  762. #else
  763. AUParameterAddress address = generateAUParameterAddressForIndex (idx);
  764. // Consider yourself very unlucky if you hit this assertion. The hash code of your
  765. // parameter ids are not unique.
  766. jassert (! paramMap.contains (static_cast<int64> (address)));
  767. paramAddresses.add (address);
  768. paramMap.set (static_cast<int64> (address), idx);
  769. #endif
  770. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  771. ScopedPointer<AUParameter> param = [[AUParameterTree createParameterWithIdentifier: juceStringToNS (identifier)
  772. name: juceStringToNS (name)
  773. address: address
  774. min: 0.0f
  775. max: 1.0f
  776. unit: unit
  777. unitName: nullptr
  778. flags: flags
  779. valueStrings: nullptr
  780. dependentParameters: nullptr] retain];
  781. [params addObject: param];
  782. [overviewParams addObject: [NSNumber numberWithUnsignedLongLong:address]];
  783. }
  784. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  785. paramTree = [[AUParameterTree createTreeWithChildren: params] retain];
  786. [paramTree setImplementorValueObserver: paramObserver];
  787. [paramTree setImplementorValueProvider: paramProvider];
  788. if (processor.hasEditor())
  789. {
  790. editorParamObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedForObserver);
  791. editorObserverToken = [paramTree tokenByAddingParameterObserver: editorParamObserver];
  792. }
  793. }
  794. void addPresets()
  795. {
  796. factoryPresets = [[NSMutableArray<AUAudioUnitPreset*> alloc] init];
  797. const int n = getAudioProcessor().getNumPrograms();
  798. for (int idx = 0; idx < n; ++idx)
  799. {
  800. String name = getAudioProcessor().getProgramName (idx);
  801. ScopedPointer<AUAudioUnitPreset> preset = [[AUAudioUnitPreset alloc] init];
  802. [preset setName: juceStringToNS (name)];
  803. [preset setNumber: static_cast<NSInteger> (idx)];
  804. [factoryPresets addObject: preset];
  805. }
  806. }
  807. //==============================================================================
  808. void allocateBusBuffer (bool isInput)
  809. {
  810. OwnedArray<BusBuffer>& busBuffers = isInput ? inBusBuffers : outBusBuffers;
  811. busBuffers.clear();
  812. const int n = AudioUnitHelpers::getBusCount (&getAudioProcessor(), isInput);
  813. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  814. for (int busIdx = 0; busIdx < n; ++busIdx)
  815. busBuffers.add (new BusBuffer ([(isInput ? inputBusses : outputBusses) objectAtIndexedSubscript: static_cast<unsigned int> (busIdx)],
  816. static_cast<int> (maxFrames)));
  817. }
  818. void processEvents (const AURenderEvent *__nullable realtimeEventListHead, int numParams, AUEventSampleTime startTime)
  819. {
  820. for (const AURenderEvent* event = realtimeEventListHead; event != nullptr; event = event->head.next)
  821. {
  822. switch (event->head.eventType)
  823. {
  824. case AURenderEventMIDI:
  825. {
  826. const AUMIDIEvent& midiEvent = event->MIDI;
  827. midiMessages.addEvent (midiEvent.data, midiEvent.length, static_cast<int> (midiEvent.eventSampleTime - startTime));
  828. }
  829. break;
  830. case AURenderEventParameter:
  831. case AURenderEventParameterRamp:
  832. {
  833. const AUParameterEvent& paramEvent = event->parameter;
  834. const int idx = getJuceParameterIndexForAUAddress (paramEvent.parameterAddress);
  835. if (isPositiveAndBelow (idx, numParams))
  836. getAudioProcessor().setParameter (idx, paramEvent.value);
  837. }
  838. break;
  839. default:
  840. break;
  841. }
  842. }
  843. }
  844. AUAudioUnitStatus renderCallback (AudioUnitRenderActionFlags* actionFlags, const AudioTimeStamp* timestamp, AUAudioFrameCount frameCount,
  845. NSInteger outputBusNumber, AudioBufferList* outputData, const AURenderEvent *__nullable realtimeEventListHead,
  846. AURenderPullInputBlock __nullable pullInputBlock)
  847. {
  848. auto& processor = getAudioProcessor();
  849. jassert (static_cast<int> (frameCount) <= getAudioProcessor().getBlockSize());
  850. // process params
  851. const int numParams = processor.getNumParameters();
  852. processEvents (realtimeEventListHead, numParams, static_cast<AUEventSampleTime> (timestamp->mSampleTime));
  853. if (lastTimeStamp.mSampleTime != timestamp->mSampleTime)
  854. {
  855. lastTimeStamp = *timestamp;
  856. const int numInputBuses = inBusBuffers. size();
  857. const int numOutputBuses = outBusBuffers.size();
  858. // prepare buffers
  859. {
  860. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  861. {
  862. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  863. const bool canUseDirectOutput =
  864. (busIdx == outputBusNumber && outputData != nullptr && outputData->mNumberBuffers > 0);
  865. busBuffer.prepare (frameCount, canUseDirectOutput ? outputData : nullptr);
  866. }
  867. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  868. {
  869. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  870. busBuffer.prepare (frameCount, busIdx < numOutputBuses ? outBusBuffers[busIdx]->get() : nullptr);
  871. }
  872. audioBuffer.reset();
  873. }
  874. // pull inputs
  875. {
  876. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  877. {
  878. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  879. AudioBufferList* buffer = busBuffer.get();
  880. if (pullInputBlock == nullptr || pullInputBlock (actionFlags, timestamp, frameCount, busIdx, buffer) != noErr)
  881. AudioUnitHelpers::clearAudioBuffer (*buffer);
  882. if (actionFlags != nullptr && (*actionFlags & kAudioUnitRenderAction_OutputIsSilence) != 0)
  883. AudioUnitHelpers::clearAudioBuffer (*buffer);
  884. }
  885. }
  886. // set buffer pointer to minimize copying
  887. {
  888. int chIdx = 0;
  889. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  890. {
  891. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  892. AudioBufferList* buffer = busBuffer.get();
  893. const bool interleaved = busBuffer.interleaved();
  894. const int numChannels = busBuffer.numChannels();
  895. const int* outLayoutMap = mapper.get (false, busIdx);
  896. for (int ch = 0; ch < numChannels; ++ch)
  897. audioBuffer.setBuffer (chIdx++, interleaved ? nullptr : static_cast<float*> (buffer->mBuffers[outLayoutMap[ch]].mData));
  898. }
  899. // use input pointers on remaining channels
  900. for (int busIdx = 0; chIdx < totalInChannels;)
  901. {
  902. const int channelOffset = processor.getOffsetInBusBufferForAbsoluteChannelIndex (true, chIdx, busIdx);
  903. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  904. AudioBufferList* buffer = busBuffer.get();
  905. const int* inLayoutMap = mapper.get (true, busIdx);
  906. audioBuffer.setBuffer (chIdx++, busBuffer.interleaved() ? nullptr : static_cast<float*> (buffer->mBuffers[inLayoutMap[channelOffset]].mData));
  907. }
  908. }
  909. // copy input
  910. {
  911. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  912. audioBuffer.push (*inBusBuffers[busIdx]->get(), mapper.get (true, busIdx));
  913. // clear remaining channels
  914. for (int i = totalInChannels; i < totalOutChannels; ++i)
  915. zeromem (audioBuffer.push(), sizeof (float) * frameCount);
  916. }
  917. // process audio
  918. processBlock (audioBuffer.getBuffer (frameCount), midiMessages);
  919. midiMessages.clear();
  920. }
  921. // copy back
  922. audioBuffer.pop (*outBusBuffers[(int) outputBusNumber]->get(),
  923. mapper.get (false, (int) outputBusNumber));
  924. return noErr;
  925. }
  926. void processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiBuffer) noexcept
  927. {
  928. auto& processor = getAudioProcessor();
  929. const ScopedLock sl (processor.getCallbackLock());
  930. if (processor.isSuspended())
  931. buffer.clear();
  932. else if ([au shouldBypassEffect])
  933. processor.processBlockBypassed (buffer, midiBuffer);
  934. else
  935. processor.processBlock (buffer, midiBuffer);
  936. }
  937. //==============================================================================
  938. void valueChangedFromHost (AUParameter* param, AUValue value)
  939. {
  940. if (param != nullptr)
  941. {
  942. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  943. auto& processor = getAudioProcessor();
  944. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  945. processor.setParameter (idx, value);
  946. }
  947. }
  948. AUValue getValue (AUParameter* param)
  949. {
  950. if (param != nullptr)
  951. {
  952. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  953. auto& processor = getAudioProcessor();
  954. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  955. return processor.getParameter (idx);
  956. }
  957. return 0;
  958. }
  959. void valueChangedForObserver(AUParameterAddress, AUValue)
  960. {
  961. // this will have already been handled bny valueChangedFromHost
  962. }
  963. //==============================================================================
  964. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  965. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept { return static_cast<AUParameterAddress> (paramIndex); }
  966. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept { return static_cast<int> (address); }
  967. #else
  968. AUParameterAddress generateAUParameterAddressForIndex (int paramIndex) const
  969. {
  970. auto& processor = getAudioProcessor();
  971. const int n = processor.getNumParameters();
  972. if (isPositiveAndBelow (paramIndex, n))
  973. {
  974. const String& juceParamID = processor.getParameterID (paramIndex);
  975. return usingManagedParameter ? static_cast<AUParameterAddress> (juceParamID.hashCode64())
  976. : static_cast<AUParameterAddress> (juceParamID.getIntValue());
  977. }
  978. return static_cast<AUParameterAddress> (-1);
  979. }
  980. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept
  981. {
  982. return usingManagedParameter ? paramAddresses.getReference (paramIndex)
  983. : static_cast<AUParameterAddress> (paramIndex);
  984. }
  985. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept
  986. {
  987. return usingManagedParameter ? paramMap[static_cast<int64> (address)]
  988. : static_cast<int> (address);
  989. }
  990. #endif
  991. //==============================================================================
  992. static const double kDefaultSampleRate;
  993. AudioProcessorHolder::Ptr processorHolder;
  994. int totalInChannels, totalOutChannels;
  995. ScopedPointer<AUAudioUnitBusArray> inputBusses;
  996. ScopedPointer<AUAudioUnitBusArray> outputBusses;
  997. ObjCBlock<AUImplementorValueObserver> paramObserver;
  998. ObjCBlock<AUImplementorValueProvider> paramProvider;
  999. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1000. bool usingManagedParameter;
  1001. Array<AUParameterAddress> paramAddresses;
  1002. HashMap<int64, int> paramMap;
  1003. #endif
  1004. // to avoid recursion on parameter changes, we need to add an
  1005. // editor observer to do the parameter changes
  1006. ObjCBlock<AUParameterObserver> editorParamObserver;
  1007. AUParameterObserverToken editorObserverToken;
  1008. ScopedPointer<AUParameterTree> paramTree;
  1009. ScopedPointer<NSMutableArray<NSNumber*> > overviewParams;
  1010. ScopedPointer<NSMutableArray<NSNumber*> > channelCapabilities;
  1011. ScopedPointer<NSMutableArray<AUAudioUnitPreset*> > factoryPresets;
  1012. ObjCBlock<AUInternalRenderBlock> internalRenderBlock;
  1013. AudioUnitHelpers::CoreAudioBufferList audioBuffer;
  1014. AudioUnitHelpers::ChannelRemapper mapper;
  1015. OwnedArray<BusBuffer> inBusBuffers, outBusBuffers;
  1016. MidiBuffer midiMessages;
  1017. ObjCBlock<AUHostMusicalContextBlock> hostMusicalContextCallback;
  1018. ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;
  1019. AudioTimeStamp lastTimeStamp;
  1020. CurrentPositionInfo lastAudioHead;
  1021. };
  1022. const double JuceAudioUnitv3::kDefaultSampleRate = 44100.0;
  1023. JuceAudioUnitv3Base* JuceAudioUnitv3Base::create (AUAudioUnit* audioUnit, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  1024. {
  1025. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1026. return new JuceAudioUnitv3 (audioUnit, descr, options, error);
  1027. }
  1028. //==============================================================================
  1029. class JuceAUViewController
  1030. {
  1031. public:
  1032. JuceAUViewController (AUViewController<AUAudioUnitFactory>* p)
  1033. : myself (p), processorHolder (nullptr), preferredSize (1.0f, 1.0f)
  1034. {
  1035. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1036. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1037. initialiseJuce_GUI();
  1038. }
  1039. ~JuceAUViewController()
  1040. {
  1041. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1042. if (processorHolder != nullptr)
  1043. JuceAudioUnitv3::removeEditor (getAudioProcessor());
  1044. }
  1045. //==============================================================================
  1046. void loadView()
  1047. {
  1048. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1049. if (AudioProcessor* p = createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))
  1050. {
  1051. processorHolder = new AudioProcessorHolder (p);
  1052. auto& processor = getAudioProcessor();
  1053. if (processor.hasEditor())
  1054. {
  1055. if (AudioProcessorEditor* editor = processor.createEditorIfNeeded())
  1056. {
  1057. preferredSize = editor->getBounds();
  1058. JUCE_IOS_MAC_VIEW* view = [[[JUCE_IOS_MAC_VIEW alloc] initWithFrame: convertToCGRect (editor->getBounds())] autorelease];
  1059. [myself setView: view];
  1060. editor->setVisible (true);
  1061. editor->addToDesktop (0, view);
  1062. }
  1063. }
  1064. }
  1065. }
  1066. void viewDidLayoutSubviews()
  1067. {
  1068. if (processorHolder != nullptr && [myself view] != nullptr)
  1069. {
  1070. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  1071. {
  1072. editor->setBounds (convertToRectInt ([[myself view] bounds]));
  1073. if (JUCE_IOS_MAC_VIEW* peerView = [[[myself view] subviews] objectAtIndex: 0])
  1074. {
  1075. #if JUCE_IOS
  1076. [peerView setNeedsDisplay];
  1077. #else
  1078. [peerView setNeedsDisplay: YES];
  1079. #endif
  1080. }
  1081. }
  1082. }
  1083. }
  1084. CGSize getPreferredContentSize() const
  1085. {
  1086. return CGSizeMake (static_cast<float> (preferredSize.getWidth()),
  1087. static_cast<float> (preferredSize.getHeight()));
  1088. }
  1089. //==============================================================================
  1090. AUAudioUnit* createAudioUnit (const AudioComponentDescription& descr, NSError** error)
  1091. {
  1092. AUAudioUnit* retval = nil;
  1093. if (! MessageManager::getInstance()->isThisTheMessageThread())
  1094. {
  1095. WaitableEvent creationEvent;
  1096. // AUv3 headers say that we may block this thread and that the message thread is guaranteed
  1097. // to be unblocked
  1098. struct AUCreator : public CallbackMessage
  1099. {
  1100. JuceAUViewController& owner;
  1101. AudioComponentDescription pDescr;
  1102. NSError** pError;
  1103. AUAudioUnit*& outAU;
  1104. WaitableEvent& e;
  1105. AUCreator (JuceAUViewController& parent, const AudioComponentDescription& paramDescr, NSError** paramError,
  1106. AUAudioUnit*& outputAU, WaitableEvent& event)
  1107. : owner (parent), pDescr (paramDescr), pError (paramError), outAU (outputAU), e (event)
  1108. {}
  1109. void messageCallback() override
  1110. {
  1111. outAU = owner.createAudioUnitOnMessageThread (pDescr, pError);
  1112. e.signal();
  1113. }
  1114. };
  1115. (new AUCreator (*this, descr, error, retval, creationEvent))->post();
  1116. creationEvent.wait (-1);
  1117. }
  1118. else
  1119. retval = createAudioUnitOnMessageThread (descr, error);
  1120. return [retval autorelease];
  1121. }
  1122. private:
  1123. //==============================================================================
  1124. AUViewController<AUAudioUnitFactory>* myself;
  1125. AudioProcessorHolder::Ptr processorHolder;
  1126. Rectangle<int> preferredSize;
  1127. //==============================================================================
  1128. AUAudioUnit* createAudioUnitOnMessageThread (const AudioComponentDescription& descr, NSError** error)
  1129. {
  1130. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1131. [myself view]; // this will call [view load] and ensure that the AudioProcessor has been instantiated
  1132. if (processorHolder == nullptr)
  1133. return nullptr;
  1134. return (new JuceAudioUnitv3 (processorHolder, descr, 0, error))->getAudioUnit();
  1135. }
  1136. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  1137. };
  1138. //==============================================================================
  1139. // necessary glue code
  1140. @interface JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix) : AUViewController<AUAudioUnitFactory>
  1141. @end
  1142. @implementation JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix)
  1143. {
  1144. ScopedPointer<JuceAUViewController> cpp;
  1145. }
  1146. - (instancetype) initWithNibName: (nullable NSString*) nib bundle: (nullable NSBundle*) bndl { self = [super initWithNibName: nib bundle: bndl]; cpp = new JuceAUViewController (self); return self;}
  1147. - (void) loadView { cpp->loadView(); }
  1148. - (AUAudioUnit *)createAudioUnitWithComponentDescription:(AudioComponentDescription)desc error:(NSError **)error { return cpp->createAudioUnit (desc, error); }
  1149. - (CGSize) preferredContentSize { return cpp->getPreferredContentSize(); }
  1150. - (void)viewDidLayoutSubviews { return cpp->viewDidLayoutSubviews(); }
  1151. @end
  1152. //==============================================================================
  1153. #if JUCE_IOS
  1154. bool JUCE_CALLTYPE juce_isInterAppAudioConnected() { return false; }
  1155. void JUCE_CALLTYPE juce_switchToHostApplication() {}
  1156. #if JUCE_MODULE_AVAILABLE_juce_gui_basics
  1157. Image JUCE_CALLTYPE juce_getIAAHostIcon (int) { return Image(); }
  1158. #endif
  1159. #endif
  1160. #pragma clang diagnostic pop
  1161. #endif