The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1659 lines
73KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2017 - ROLI Ltd.
  5. JUCE is an open source library subject to commercial or open-source
  6. licensing.
  7. By using JUCE, you agree to the terms of both the JUCE 5 End-User License
  8. Agreement and JUCE 5 Privacy Policy (both updated and effective as of the
  9. 27th April 2017).
  10. End User License Agreement: www.juce.com/juce-5-licence
  11. Privacy Policy: www.juce.com/juce-5-privacy-policy
  12. Or: You may also use this code under the terms of the GPL v3 (see
  13. www.gnu.org/licenses).
  14. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
  15. EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
  16. DISCLAIMED.
  17. ==============================================================================
  18. */
  19. #include "../../juce_core/system/juce_TargetPlatform.h"
  20. #include "../utility/juce_CheckSettingMacros.h"
  21. #if JucePlugin_Build_AUv3
  22. #import <CoreAudioKit/CoreAudioKit.h>
  23. #import <AudioToolbox/AudioToolbox.h>
  24. #import <AVFoundation/AVFoundation.h>
  25. #if JUCE_MAC
  26. #if (! defined MAC_OS_X_VERSION_MIN_REQUIRED) || (! defined MAC_OS_X_VERSION_10_11) || (MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_11)
  27. #error AUv3 needs Deployment Target OS X 10.11 or higher to compile
  28. #endif
  29. #if (defined MAC_OS_X_VERSION_10_13) && (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_13)
  30. #define JUCE_AUV3_MIDI_OUTPUT_SUPPORTED 1
  31. #define JUCE_AUV3_VIEW_CONFIG_SUPPORTED 1
  32. #endif
  33. #endif
  34. #if JUCE_IOS
  35. #if (! defined __IPHONE_OS_VERSION_MIN_REQUIRED) || (! defined __IPHONE_9_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_9_0)
  36. #error AUv3 needs Deployment Target iOS 9.0 or higher to compile
  37. #endif
  38. #if (defined __IPHONE_11_0) && (__IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_11_0)
  39. #define JUCE_AUV3_MIDI_OUTPUT_SUPPORTED 1
  40. #define JUCE_AUV3_VIEW_CONFIG_SUPPORTED 1
  41. #endif
  42. #endif
  43. #ifndef __OBJC2__
  44. #error AUv3 needs Objective-C 2 support (compile with 64-bit)
  45. #endif
  46. #define JUCE_CORE_INCLUDE_OBJC_HELPERS 1
  47. #include "../utility/juce_IncludeSystemHeaders.h"
  48. #include "../utility/juce_IncludeModuleHeaders.h"
  49. #include "../../juce_graphics/native/juce_mac_CoreGraphicsHelpers.h"
  50. #include "../../juce_audio_basics/native/juce_mac_CoreAudioLayouts.h"
  51. #include "../../juce_audio_processors/format_types/juce_AU_Shared.h"
  52. #define JUCE_VIEWCONTROLLER_OBJC_NAME(x) JUCE_JOIN_MACRO (x, FactoryAUv3)
  53. #if ! JUCE_COMPILER_SUPPORTS_VARIADIC_TEMPLATES
  54. #error AUv3 wrapper requires variadic template support
  55. #endif
  56. #if JUCE_IOS
  57. #define JUCE_IOS_MAC_VIEW UIView
  58. #else
  59. #define JUCE_IOS_MAC_VIEW NSView
  60. #endif
  61. #define JUCE_AUDIOUNIT_OBJC_NAME(x) JUCE_JOIN_MACRO (x, AUv3)
  62. #pragma clang diagnostic push
  63. #pragma clang diagnostic ignored "-Wnullability-completeness"
  64. using namespace juce;
  65. // TODO: use SFINAE to automatically generate this for all NSObjects
  66. template <> struct ContainerDeletePolicy<AUAudioUnitBusArray> { static void destroy (NSObject* o) { [o release]; } };
  67. template <> struct ContainerDeletePolicy<AUParameterTree> { static void destroy (NSObject* o) { [o release]; } };
  68. template <> struct ContainerDeletePolicy<NSMutableArray<AUParameterNode*>> { static void destroy (NSObject* o) { [o release]; } };
  69. template <> struct ContainerDeletePolicy<AUParameter> { static void destroy (NSObject* o) { [o release]; } };
  70. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitBus*>> { static void destroy (NSObject* o) { [o release]; } };
  71. template <> struct ContainerDeletePolicy<AUAudioUnitBus> { static void destroy (NSObject* o) { [o release]; } };
  72. template <> struct ContainerDeletePolicy<AVAudioFormat> { static void destroy (NSObject* o) { [o release]; } };
  73. template <> struct ContainerDeletePolicy<AVAudioPCMBuffer> { static void destroy (NSObject* o) { [o release]; } };
  74. template <> struct ContainerDeletePolicy<NSMutableArray<NSNumber*>> { static void destroy (NSObject* o) { [o release]; } };
  75. template <> struct ContainerDeletePolicy<NSNumber> { static void destroy (NSObject* o) { [o release]; } };
  76. template <> struct ContainerDeletePolicy<NSMutableArray<AUAudioUnitPreset*>> { static void destroy (NSObject* o) { [o release]; } };
  77. template <> struct ContainerDeletePolicy<AUAudioUnitPreset> { static void destroy (NSObject* o) { [o release]; } };
  78. //==============================================================================
  79. struct AudioProcessorHolder : public ReferenceCountedObject
  80. {
  81. AudioProcessorHolder() {}
  82. AudioProcessorHolder (AudioProcessor* p) : processor (p) {}
  83. AudioProcessor& operator*() noexcept { return *processor; }
  84. AudioProcessor* operator->() noexcept { return processor; }
  85. AudioProcessor* get() noexcept { return processor; }
  86. struct ViewConfig
  87. {
  88. double width;
  89. double height;
  90. bool hostHasMIDIController;
  91. };
  92. ScopedPointer<ViewConfig> viewConfiguration;
  93. typedef ReferenceCountedObjectPtr<AudioProcessorHolder> Ptr;
  94. private:
  95. ScopedPointer<AudioProcessor> processor;
  96. AudioProcessorHolder& operator= (AudioProcessor*) JUCE_DELETED_FUNCTION;
  97. AudioProcessorHolder (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  98. AudioProcessorHolder& operator= (AudioProcessorHolder&) JUCE_DELETED_FUNCTION;
  99. };
  100. //==============================================================================
  101. class JuceAudioUnitv3Base
  102. {
  103. public:
  104. JuceAudioUnitv3Base (const AudioComponentDescription& descr,
  105. AudioComponentInstantiationOptions options,
  106. NSError** error)
  107. #pragma clang diagnostic push
  108. #pragma clang diagnostic ignored "-Wobjc-method-access"
  109. : au ([audioUnitObjCClass.createInstance() initWithComponentDescription: descr
  110. options: options
  111. error: error
  112. juceClass: this])
  113. #pragma clang diagnostic pop
  114. {}
  115. JuceAudioUnitv3Base (AUAudioUnit* audioUnit) : au (audioUnit)
  116. {
  117. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  118. initialiseJuce_GUI();
  119. }
  120. virtual ~JuceAudioUnitv3Base() {}
  121. //==============================================================================
  122. AUAudioUnit* getAudioUnit() noexcept { return au; }
  123. //==============================================================================
  124. virtual void reset() = 0;
  125. //==============================================================================
  126. virtual AUAudioUnitPreset* getCurrentPreset() = 0;
  127. virtual void setCurrentPreset(AUAudioUnitPreset*) = 0;
  128. virtual NSArray<AUAudioUnitPreset*>* getFactoryPresets() = 0;
  129. virtual NSDictionary<NSString*, id>* getFullState()
  130. {
  131. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  132. return ObjCMsgSendSuper<NSDictionary<NSString*, id>*> (&s, @selector (fullState));
  133. }
  134. virtual void setFullState (NSDictionary<NSString*, id>* state)
  135. {
  136. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  137. ObjCMsgSendSuper<void, NSDictionary<NSString*, id>*> (&s, @selector (setFullState:), state);
  138. }
  139. virtual AUParameterTree* getParameterTree() = 0;
  140. virtual NSArray<NSNumber*>* parametersForOverviewWithCount (int) = 0;
  141. //==============================================================================
  142. virtual NSTimeInterval getLatency() = 0;
  143. virtual NSTimeInterval getTailTime() = 0;
  144. //==============================================================================
  145. virtual AUAudioUnitBusArray* getInputBusses() = 0;
  146. virtual AUAudioUnitBusArray* getOutputBusses() = 0;
  147. virtual NSArray<NSNumber*>* getChannelCapabilities() = 0;
  148. virtual bool shouldChangeToFormat (AVAudioFormat*, AUAudioUnitBus*) = 0;
  149. //==============================================================================
  150. virtual int getVirtualMIDICableCount() = 0;
  151. virtual bool getSupportsMPE() = 0;
  152. virtual NSArray<NSString*>* getMIDIOutputNames() = 0;
  153. //==============================================================================
  154. virtual AUInternalRenderBlock getInternalRenderBlock() = 0;
  155. virtual bool getCanProcessInPlace() { return false; }
  156. virtual bool getRenderingOffline() = 0;
  157. virtual void setRenderingOffline (bool offline) = 0;
  158. //==============================================================================
  159. virtual NSString* getContextName() const = 0;
  160. virtual void setContextName (NSString*) = 0;
  161. virtual bool allocateRenderResourcesAndReturnError (NSError **outError)
  162. {
  163. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  164. return (ObjCMsgSendSuper<BOOL, NSError**> (&s, @selector (allocateRenderResourcesAndReturnError:), outError) == YES);
  165. }
  166. virtual void deallocateRenderResources()
  167. {
  168. objc_super s = { getAudioUnit(), [AUAudioUnit class] };
  169. ObjCMsgSendSuper<void> (&s, @selector (deallocateRenderResources));
  170. }
  171. //==============================================================================
  172. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  173. virtual NSIndexSet* getSupportedViewConfigurations (NSArray<AUAudioUnitViewConfiguration*>*) = 0;
  174. virtual void selectViewConfiguration (AUAudioUnitViewConfiguration*) = 0;
  175. #endif
  176. private:
  177. struct Class : public ObjCClass<AUAudioUnit>
  178. {
  179. Class() : ObjCClass<AUAudioUnit> ("AUAudioUnit_")
  180. {
  181. addIvar<JuceAudioUnitv3Base*> ("cppObject");
  182. #pragma clang diagnostic push
  183. #pragma clang diagnostic ignored "-Wundeclared-selector"
  184. addMethod (@selector (initWithComponentDescription:options:error:juceClass:),
  185. initWithComponentDescriptionAndJuceClass, "@@:",
  186. @encode (AudioComponentDescription),
  187. @encode (AudioComponentInstantiationOptions), "^@@");
  188. #pragma clang diagnostic pop
  189. addMethod (@selector (initWithComponentDescription:options:error:),
  190. initWithComponentDescription, "@@:",
  191. @encode (AudioComponentDescription),
  192. @encode (AudioComponentInstantiationOptions), "^@");
  193. addMethod (@selector (dealloc), dealloc, "v@:");
  194. //==============================================================================
  195. addMethod (@selector (reset), reset, "v@:");
  196. //==============================================================================
  197. addMethod (@selector (currentPreset), getCurrentPreset, "@@:");
  198. addMethod (@selector (setCurrentPreset:), setCurrentPreset, "v@:@");
  199. addMethod (@selector (factoryPresets), getFactoryPresets, "@@:");
  200. addMethod (@selector (fullState), getFullState, "@@:");
  201. addMethod (@selector (setFullState:), setFullState, "v@:@");
  202. addMethod (@selector (parameterTree), getParameterTree, "@@:");
  203. addMethod (@selector (parametersForOverviewWithCount:), parametersForOverviewWithCount, "@@:", @encode (NSInteger));
  204. //==============================================================================
  205. addMethod (@selector (latency), getLatency, @encode (NSTimeInterval), "@:");
  206. addMethod (@selector (tailTime), getTailTime, @encode (NSTimeInterval), "@:");
  207. //==============================================================================
  208. addMethod (@selector (inputBusses), getInputBusses, "@@:");
  209. addMethod (@selector (outputBusses), getOutputBusses, "@@:");
  210. addMethod (@selector (channelCapabilities), getChannelCapabilities, "@@:");
  211. addMethod (@selector (shouldChangeToFormat:forBus:), shouldChangeToFormat, "B@:@@");
  212. //==============================================================================
  213. addMethod (@selector (virtualMIDICableCount), getVirtualMIDICableCount, @encode (NSInteger), "@:");
  214. addMethod (@selector (supportsMPE), getSupportsMPE, @encode (BOOL), "@:");
  215. #if JUCE_AUV3_MIDI_OUTPUT_SUPPORTED
  216. addMethod (@selector (MIDIOutputNames), getMIDIOutputNames, "@@:");
  217. #endif
  218. //==============================================================================
  219. addMethod (@selector (internalRenderBlock), getInternalRenderBlock, @encode (AUInternalRenderBlock), "@:");
  220. addMethod (@selector (canProcessInPlace), getCanProcessInPlace, @encode (BOOL), "@:");
  221. addMethod (@selector (isRenderingOffline), getRenderingOffline, @encode (BOOL), "@:");
  222. addMethod (@selector (setRenderingOffline:), setRenderingOffline, "v@:", @encode (BOOL));
  223. addMethod (@selector (allocateRenderResourcesAndReturnError:), allocateRenderResourcesAndReturnError, "B@:^@");
  224. addMethod (@selector (deallocateRenderResources), deallocateRenderResources, "v@:");
  225. //==============================================================================
  226. addMethod (@selector (contextName), getContextName, "@@:");
  227. addMethod (@selector (setContextName:), setContextName, "v@:@");
  228. //==============================================================================
  229. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  230. addMethod (@selector (supportedViewConfigurations:), getSupportedViewConfigurations, "@@:@");
  231. addMethod (@selector (selectViewConfiguration:), selectViewConfiguration, "v@:@");
  232. #endif
  233. registerClass();
  234. }
  235. //==============================================================================
  236. static JuceAudioUnitv3Base* _this (id self) { return getIvar<JuceAudioUnitv3Base*> (self, "cppObject"); }
  237. static void setThis (id self, JuceAudioUnitv3Base* cpp) { object_setInstanceVariable (self, "cppObject", cpp); }
  238. //==============================================================================
  239. static id initWithComponentDescription (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  240. {
  241. AUAudioUnit* self = _self;
  242. objc_super s = { self, [AUAudioUnit class] };
  243. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  244. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  245. JuceAudioUnitv3Base* juceAU = JuceAudioUnitv3Base::create (self, descr, options, error);
  246. setThis (self, juceAU);
  247. return self;
  248. }
  249. static id initWithComponentDescriptionAndJuceClass (id _self, SEL, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error, JuceAudioUnitv3Base* juceAU)
  250. {
  251. AUAudioUnit* self = _self;
  252. objc_super s = { self, [AUAudioUnit class] };
  253. self = ObjCMsgSendSuper<AUAudioUnit*, AudioComponentDescription,
  254. AudioComponentInstantiationOptions, NSError**> (&s, @selector(initWithComponentDescription:options:error:), descr, options, error);
  255. setThis (self, juceAU);
  256. return self;
  257. }
  258. static void dealloc (id self, SEL) { delete _this (self); }
  259. //==============================================================================
  260. static void reset (id self, SEL) { _this (self)->reset(); }
  261. //==============================================================================
  262. static AUAudioUnitPreset* getCurrentPreset (id self, SEL) { return _this (self)->getCurrentPreset(); }
  263. static void setCurrentPreset (id self, SEL, AUAudioUnitPreset* preset) { return _this (self)->setCurrentPreset (preset); }
  264. static NSArray<AUAudioUnitPreset*>* getFactoryPresets (id self, SEL) { return _this (self)->getFactoryPresets(); }
  265. static NSDictionary<NSString*, id>* getFullState (id self, SEL) { return _this (self)->getFullState(); }
  266. static void setFullState (id self, SEL, NSDictionary<NSString *, id>* state) { return _this (self)->setFullState (state); }
  267. static AUParameterTree* getParameterTree (id self, SEL) { return _this (self)->getParameterTree(); }
  268. static NSArray<NSNumber*>* parametersForOverviewWithCount (id self, SEL, NSInteger count) { return _this (self)->parametersForOverviewWithCount (static_cast<int> (count)); }
  269. //==============================================================================
  270. static NSTimeInterval getLatency (id self, SEL) { return _this (self)->getLatency(); }
  271. static NSTimeInterval getTailTime (id self, SEL) { return _this (self)->getTailTime(); }
  272. //==============================================================================
  273. static AUAudioUnitBusArray* getInputBusses (id self, SEL) { return _this (self)->getInputBusses(); }
  274. static AUAudioUnitBusArray* getOutputBusses (id self, SEL) { return _this (self)->getOutputBusses(); }
  275. static NSArray<NSNumber*>* getChannelCapabilities (id self, SEL) { return _this (self)->getChannelCapabilities(); }
  276. static BOOL shouldChangeToFormat (id self, SEL, AVAudioFormat* format, AUAudioUnitBus* bus) { return _this (self)->shouldChangeToFormat (format, bus) ? YES : NO; }
  277. //==============================================================================
  278. static NSInteger getVirtualMIDICableCount (id self, SEL) { return _this (self)->getVirtualMIDICableCount(); }
  279. static BOOL getSupportsMPE (id self, SEL) { return _this (self)->getSupportsMPE() ? YES : NO; }
  280. static NSArray<NSString*>* getMIDIOutputNames (id self, SEL) { return _this (self)->getMIDIOutputNames(); }
  281. //==============================================================================
  282. static AUInternalRenderBlock getInternalRenderBlock (id self, SEL) { return _this (self)->getInternalRenderBlock(); }
  283. static BOOL getCanProcessInPlace (id self, SEL) { return _this (self)->getCanProcessInPlace() ? YES : NO; }
  284. static BOOL getRenderingOffline (id self, SEL) { return _this (self)->getRenderingOffline() ? YES : NO; }
  285. static void setRenderingOffline (id self, SEL, BOOL renderingOffline) { _this (self)->setRenderingOffline (renderingOffline); }
  286. static BOOL allocateRenderResourcesAndReturnError (id self, SEL, NSError** error) { return _this (self)->allocateRenderResourcesAndReturnError (error) ? YES : NO; }
  287. static void deallocateRenderResources (id self, SEL) { _this (self)->deallocateRenderResources(); }
  288. //==============================================================================
  289. static NSString* getContextName (id self, SEL) { return _this (self)->getContextName(); }
  290. static void setContextName (id self, SEL, NSString* str) { return _this (self)->setContextName (str); }
  291. //==============================================================================
  292. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  293. static NSIndexSet* getSupportedViewConfigurations (id self, SEL, NSArray<AUAudioUnitViewConfiguration*>* configs) { return _this (self)->getSupportedViewConfigurations (configs); }
  294. static void selectViewConfiguration (id self, SEL, AUAudioUnitViewConfiguration* config) { _this (self)->selectViewConfiguration (config); }
  295. #endif
  296. };
  297. static JuceAudioUnitv3Base* create (AUAudioUnit*, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**);
  298. //==============================================================================
  299. static Class audioUnitObjCClass;
  300. protected:
  301. AUAudioUnit* au;
  302. };
  303. //==============================================================================
  304. JuceAudioUnitv3Base::Class JuceAudioUnitv3Base::audioUnitObjCClass;
  305. //==============================================================================
  306. //=========================== The actual AudioUnit =============================
  307. //==============================================================================
  308. class JuceAudioUnitv3 : public JuceAudioUnitv3Base,
  309. public AudioProcessorListener,
  310. public AudioPlayHead
  311. {
  312. public:
  313. JuceAudioUnitv3 (const AudioProcessorHolder::Ptr& processor,
  314. const AudioComponentDescription& descr,
  315. AudioComponentInstantiationOptions options,
  316. NSError** error)
  317. : JuceAudioUnitv3Base (descr, options, error),
  318. processorHolder (processor),
  319. mapper (*processorHolder->get())
  320. {
  321. init();
  322. }
  323. JuceAudioUnitv3 (AUAudioUnit* audioUnit, AudioComponentDescription, AudioComponentInstantiationOptions, NSError**)
  324. : JuceAudioUnitv3Base (audioUnit),
  325. processorHolder (new AudioProcessorHolder (createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))),
  326. mapper (*processorHolder->get())
  327. {
  328. init();
  329. }
  330. ~JuceAudioUnitv3()
  331. {
  332. auto& processor = getAudioProcessor();
  333. processor.removeListener (this);
  334. removeEditor (processor);
  335. if (editorObserverToken != nullptr)
  336. {
  337. [paramTree removeParameterObserver: editorObserverToken];
  338. editorObserverToken = nullptr;
  339. }
  340. }
  341. //==============================================================================
  342. void init()
  343. {
  344. AudioProcessor& processor = getAudioProcessor();
  345. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  346. #ifdef JucePlugin_PreferredChannelConfigurations
  347. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  348. const int numConfigs = sizeof (configs) / sizeof (short[2]);
  349. jassert (numConfigs > 0 && (configs[0][0] > 0 || configs[0][1] > 0));
  350. processor.setPlayConfigDetails (configs[0][0], configs[0][1], kDefaultSampleRate, static_cast<int> (maxFrames));
  351. Array<AUChannelInfo> channelInfos;
  352. for (int i = 0; i < numConfigs; ++i)
  353. {
  354. AUChannelInfo channelInfo;
  355. channelInfo.inChannels = configs[i][0];
  356. channelInfo.outChannels = configs[i][1];
  357. channelInfos.add (channelInfo);
  358. }
  359. #else
  360. Array<AUChannelInfo> channelInfos = AudioUnitHelpers::getAUChannelInfo (processor);
  361. #endif
  362. processor.setPlayHead (this);
  363. totalInChannels = processor.getTotalNumInputChannels();
  364. totalOutChannels = processor.getTotalNumOutputChannels();
  365. {
  366. channelCapabilities = [[NSMutableArray<NSNumber*> alloc] init];
  367. for (int i = 0; i < channelInfos.size(); ++i)
  368. {
  369. AUChannelInfo& info = channelInfos.getReference (i);
  370. [channelCapabilities addObject: [NSNumber numberWithInteger: info.inChannels]];
  371. [channelCapabilities addObject: [NSNumber numberWithInteger: info.outChannels]];
  372. }
  373. }
  374. editorObserverToken = nullptr;
  375. internalRenderBlock = CreateObjCBlock (this, &JuceAudioUnitv3::renderCallback);
  376. processor.setRateAndBufferSizeDetails (kDefaultSampleRate, static_cast<int> (maxFrames));
  377. processor.prepareToPlay (kDefaultSampleRate, static_cast<int> (maxFrames));
  378. processor.addListener (this);
  379. addParameters();
  380. addPresets();
  381. addAudioUnitBusses (true);
  382. addAudioUnitBusses (false);
  383. }
  384. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  385. //==============================================================================
  386. void reset() override
  387. {
  388. midiMessages.clear();
  389. lastTimeStamp.mSampleTime = std::numeric_limits<Float64>::max();
  390. }
  391. //==============================================================================
  392. AUAudioUnitPreset* getCurrentPreset() override
  393. {
  394. const int n = static_cast<int> ([factoryPresets count]);
  395. const int idx = static_cast<int> (getAudioProcessor().getCurrentProgram());
  396. if (idx < n)
  397. return [factoryPresets objectAtIndex:static_cast<unsigned int> (idx)];
  398. return nullptr;
  399. }
  400. void setCurrentPreset(AUAudioUnitPreset* preset) override
  401. {
  402. const int n = static_cast<int> ([factoryPresets count]);
  403. const int idx = static_cast<int> ([preset number]);
  404. if (isPositiveAndBelow (idx, n))
  405. getAudioProcessor().setCurrentProgram (idx);
  406. }
  407. NSArray<AUAudioUnitPreset*>* getFactoryPresets() override
  408. {
  409. return factoryPresets;
  410. }
  411. NSDictionary<NSString*, id>* getFullState() override
  412. {
  413. NSMutableDictionary<NSString*, id>* retval = [[NSMutableDictionary<NSString*, id> alloc] init];
  414. {
  415. NSDictionary<NSString*, id>* superRetval = JuceAudioUnitv3Base::getFullState();
  416. if (superRetval != nullptr)
  417. [retval addEntriesFromDictionary:superRetval];
  418. }
  419. juce::MemoryBlock state;
  420. getAudioProcessor().getCurrentProgramStateInformation (state);
  421. if (state.getSize() > 0)
  422. {
  423. NSData* ourState = [[NSData alloc] initWithBytes: state.getData()
  424. length: state.getSize()];
  425. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  426. [retval setObject: ourState
  427. forKey: nsKey];
  428. [nsKey release];
  429. [ourState release];
  430. }
  431. return [retval autorelease];
  432. }
  433. void setFullState (NSDictionary<NSString*, id>* state) override
  434. {
  435. if (state == nullptr)
  436. return;
  437. NSMutableDictionary<NSString*, id>* modifiedState = [[NSMutableDictionary<NSString*, id> alloc] init];
  438. [modifiedState addEntriesFromDictionary: state];
  439. NSString* nsPresetKey = [[NSString alloc] initWithUTF8String: kAUPresetDataKey];
  440. [modifiedState removeObjectForKey: nsPresetKey];
  441. [nsPresetKey release];
  442. JuceAudioUnitv3Base::setFullState (modifiedState);
  443. NSString* nsKey = [[NSString alloc] initWithUTF8String: JUCE_STATE_DICTIONARY_KEY];
  444. NSObject* obj = [modifiedState objectForKey: nsKey];
  445. [nsKey release];
  446. if (obj != nullptr)
  447. {
  448. if ([obj isKindOfClass:[NSData class]])
  449. {
  450. NSData* data = reinterpret_cast<NSData*> (obj);
  451. const int numBytes = static_cast<int> ([data length]);
  452. const juce::uint8* const rawBytes = reinterpret_cast< const juce::uint8* const> ([data bytes]);
  453. if (numBytes > 0)
  454. getAudioProcessor().setCurrentProgramStateInformation (rawBytes, numBytes);
  455. }
  456. }
  457. [modifiedState release];
  458. }
  459. AUParameterTree* getParameterTree() override
  460. {
  461. return paramTree;
  462. }
  463. NSArray<NSNumber*>* parametersForOverviewWithCount (int count) override
  464. {
  465. const int n = static_cast<int> ([overviewParams count]);
  466. if (count >= n)
  467. return overviewParams;
  468. NSMutableArray<NSNumber*>* retval = [[NSMutableArray<NSNumber*>alloc] initWithArray: overviewParams];
  469. [retval removeObjectsInRange: NSMakeRange (static_cast<unsigned int> (count), static_cast<unsigned int> (n - count))];
  470. return [retval autorelease];
  471. }
  472. //==============================================================================
  473. NSTimeInterval getLatency() override
  474. {
  475. auto& p = getAudioProcessor();
  476. return p.getLatencySamples() / p.getSampleRate();
  477. }
  478. NSTimeInterval getTailTime() override
  479. {
  480. return getAudioProcessor().getTailLengthSeconds();
  481. }
  482. //==============================================================================
  483. AUAudioUnitBusArray* getInputBusses() override { return inputBusses; }
  484. AUAudioUnitBusArray* getOutputBusses() override { return outputBusses; }
  485. NSArray<NSNumber*>* getChannelCapabilities() override { return channelCapabilities; }
  486. bool shouldChangeToFormat (AVAudioFormat* format, AUAudioUnitBus* auBus) override
  487. {
  488. const bool isInput = ([auBus busType] == AUAudioUnitBusTypeInput);
  489. const int busIdx = static_cast<int> ([auBus index]);
  490. const int newNumChannels = static_cast<int> ([format channelCount]);
  491. AudioProcessor& processor = getAudioProcessor();
  492. if (AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx))
  493. {
  494. #ifdef JucePlugin_PreferredChannelConfigurations
  495. ignoreUnused (bus);
  496. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  497. if (! AudioUnitHelpers::isLayoutSupported (processor, isInput, busIdx, newNumChannels, configs))
  498. return false;
  499. #else
  500. const AVAudioChannelLayout* layout = [format channelLayout];
  501. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  502. if (layoutTag != 0)
  503. {
  504. AudioChannelSet newLayout = CoreAudioLayouts::fromCoreAudio (layoutTag);
  505. if (newLayout.size() != newNumChannels)
  506. return false;
  507. if (! bus->isLayoutSupported (newLayout))
  508. return false;
  509. }
  510. else
  511. {
  512. if (! bus->isNumberOfChannelsSupported (newNumChannels))
  513. return false;
  514. }
  515. #endif
  516. return true;
  517. }
  518. return false;
  519. }
  520. //==============================================================================
  521. int getVirtualMIDICableCount() override
  522. {
  523. #if JucePlugin_WantsMidiInput
  524. return 1;
  525. #else
  526. return 0;
  527. #endif
  528. }
  529. bool getSupportsMPE() override
  530. {
  531. return getAudioProcessor().supportsMPE();
  532. }
  533. NSArray<NSString*>* getMIDIOutputNames() override
  534. {
  535. #if JucePlugin_ProducesMidiOutput
  536. return @[@"MIDI Out"];
  537. #else
  538. return @[];
  539. #endif
  540. }
  541. //==============================================================================
  542. AUInternalRenderBlock getInternalRenderBlock() override { return internalRenderBlock; }
  543. bool getRenderingOffline() override { return getAudioProcessor().isNonRealtime(); }
  544. void setRenderingOffline (bool offline) override
  545. {
  546. auto& processor = getAudioProcessor();
  547. auto isCurrentlyNonRealtime = processor.isNonRealtime();
  548. if (isCurrentlyNonRealtime != offline)
  549. {
  550. ScopedLock callbackLock (processor.getCallbackLock());
  551. processor.setNonRealtime (offline);
  552. processor.prepareToPlay (processor.getSampleRate(), processor.getBlockSize());
  553. }
  554. }
  555. //==============================================================================
  556. NSString* getContextName() const override { return juceStringToNS (contextName); }
  557. void setContextName (NSString* str) override
  558. {
  559. if (str != nullptr)
  560. {
  561. AudioProcessor::TrackProperties props;
  562. props.name = nsStringToJuce (str);
  563. getAudioProcessor().updateTrackProperties (props);
  564. }
  565. }
  566. //==============================================================================
  567. bool allocateRenderResourcesAndReturnError (NSError **outError) override
  568. {
  569. AudioProcessor& processor = getAudioProcessor();
  570. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  571. if (! JuceAudioUnitv3Base::allocateRenderResourcesAndReturnError (outError))
  572. return false;
  573. if (outError != nullptr)
  574. *outError = nullptr;
  575. AudioProcessor::BusesLayout layouts;
  576. for (int dir = 0; dir < 2; ++dir)
  577. {
  578. const bool isInput = (dir == 0);
  579. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  580. Array<AudioChannelSet>& channelSets = (isInput ? layouts.inputBuses : layouts.outputBuses);
  581. AUAudioUnitBusArray* auBuses = (isInput ? [getAudioUnit() inputBusses] : [getAudioUnit() outputBusses]);
  582. jassert ([auBuses count] == static_cast<NSUInteger> (n));
  583. for (int busIdx = 0; busIdx < n; ++busIdx)
  584. {
  585. AudioProcessor::Bus* bus = processor.getBus (isInput, busIdx);
  586. AVAudioFormat* format = [[auBuses objectAtIndexedSubscript:static_cast<NSUInteger> (busIdx)] format];
  587. AudioChannelSet newLayout;
  588. const AVAudioChannelLayout* layout = [format channelLayout];
  589. const AudioChannelLayoutTag layoutTag = (layout != nullptr ? [layout layoutTag] : 0);
  590. if (layoutTag != 0)
  591. newLayout = CoreAudioLayouts::fromCoreAudio (layoutTag);
  592. else
  593. newLayout = bus->supportedLayoutWithChannels (static_cast<int> ([format channelCount]));
  594. if (newLayout.isDisabled())
  595. return false;
  596. channelSets.add (newLayout);
  597. }
  598. }
  599. #ifdef JucePlugin_PreferredChannelConfigurations
  600. short configs[][2] = {JucePlugin_PreferredChannelConfigurations};
  601. if (! AudioProcessor::containsLayout (layouts, configs))
  602. {
  603. if (outError != nullptr)
  604. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  605. return false;
  606. }
  607. #endif
  608. if (! AudioUnitHelpers::setBusesLayout (&getAudioProcessor(), layouts))
  609. {
  610. if (outError != nullptr)
  611. *outError = [NSError errorWithDomain:NSOSStatusErrorDomain code:kAudioUnitErr_FormatNotSupported userInfo:nullptr];
  612. return false;
  613. }
  614. totalInChannels = processor.getTotalNumInputChannels();
  615. totalOutChannels = processor.getTotalNumOutputChannels();
  616. allocateBusBuffer (true);
  617. allocateBusBuffer (false);
  618. mapper.alloc();
  619. audioBuffer.prepare (totalInChannels, totalOutChannels, static_cast<int> (maxFrames));
  620. double sampleRate = (jmax (AudioUnitHelpers::getBusCount (&processor, true), AudioUnitHelpers::getBusCount (&processor, false)) > 0 ?
  621. [[[([inputBusses count] > 0 ? inputBusses : outputBusses) objectAtIndexedSubscript: 0] format] sampleRate] : 44100.0);
  622. processor.setRateAndBufferSizeDetails (sampleRate, static_cast<int> (maxFrames));
  623. processor.prepareToPlay (sampleRate, static_cast<int> (maxFrames));
  624. zeromem (&lastAudioHead, sizeof (lastAudioHead));
  625. hostMusicalContextCallback = [getAudioUnit() musicalContextBlock];
  626. hostTransportStateCallback = [getAudioUnit() transportStateBlock];
  627. reset();
  628. return true;
  629. }
  630. void deallocateRenderResources() override
  631. {
  632. hostMusicalContextCallback = nullptr;
  633. hostTransportStateCallback = nullptr;
  634. getAudioProcessor().releaseResources();
  635. audioBuffer.release();
  636. inBusBuffers. clear();
  637. outBusBuffers.clear();
  638. mapper.release();
  639. JuceAudioUnitv3Base::deallocateRenderResources();
  640. }
  641. //==============================================================================
  642. #if JUCE_AUV3_VIEW_CONFIG_SUPPORTED
  643. NSIndexSet* getSupportedViewConfigurations (NSArray<AUAudioUnitViewConfiguration*>* configs) override
  644. {
  645. auto supportedViewIndecies = [[NSMutableIndexSet alloc] init];
  646. auto n = [configs count];
  647. if (auto* editor = getAudioProcessor().createEditorIfNeeded())
  648. {
  649. // If you hit this assertion then your plug-in's editor is reporting that it doesn't support
  650. // any host MIDI controller configurations!
  651. jassert (editor->supportsHostMIDIControllerPresence (true) || editor->supportsHostMIDIControllerPresence (false));
  652. for (auto i = 0u; i < n; ++i)
  653. {
  654. if (auto* viewConfiguration = [configs objectAtIndex:i])
  655. {
  656. if (editor->supportsHostMIDIControllerPresence ([viewConfiguration hostHasController] == YES))
  657. {
  658. auto* constrainer = editor->getConstrainer();
  659. auto height = (int) [viewConfiguration height];
  660. auto width = (int) [viewConfiguration width];
  661. if (height <= constrainer->getMaximumHeight() && height >= constrainer->getMinimumHeight()
  662. && width <= constrainer->getMaximumWidth() && width >= constrainer->getMinimumWidth())
  663. [supportedViewIndecies addIndex: i];
  664. }
  665. }
  666. }
  667. }
  668. return [supportedViewIndecies autorelease];
  669. }
  670. void selectViewConfiguration (AUAudioUnitViewConfiguration* config) override
  671. {
  672. processorHolder->viewConfiguration = new AudioProcessorHolder::ViewConfig { [config width], [config height], [config hostHasController] == YES };
  673. }
  674. #endif
  675. //==============================================================================
  676. void audioProcessorChanged (AudioProcessor* processor) override
  677. {
  678. ignoreUnused (processor);
  679. [au willChangeValueForKey: @"allParameterValues"];
  680. addPresets();
  681. [au didChangeValueForKey: @"allParameterValues"];
  682. }
  683. void audioProcessorParameterChanged (AudioProcessor*, int idx, float newValue) override
  684. {
  685. if (isPositiveAndBelow (idx, getAudioProcessor().getNumParameters()))
  686. {
  687. if (AUParameter* param = [paramTree parameterWithAddress: getAUParameterAddressForIndex (idx)])
  688. {
  689. if (editorObserverToken != nullptr)
  690. [param setValue: newValue originator: editorObserverToken];
  691. else
  692. [param setValue: newValue];
  693. }
  694. }
  695. }
  696. //==============================================================================
  697. bool getCurrentPosition (CurrentPositionInfo& info) override
  698. {
  699. bool musicContextCallSucceeded = false;
  700. bool transportStateCallSucceeded = false;
  701. info = lastAudioHead;
  702. info.timeInSamples = (int64) (lastTimeStamp.mSampleTime + 0.5);
  703. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  704. switch (lastTimeStamp.mSMPTETime.mType)
  705. {
  706. case kSMPTETimeType2398: info.frameRate = AudioPlayHead::fps23976; break;
  707. case kSMPTETimeType24: info.frameRate = AudioPlayHead::fps24; break;
  708. case kSMPTETimeType25: info.frameRate = AudioPlayHead::fps25; break;
  709. case kSMPTETimeType2997: info.frameRate = AudioPlayHead::fps2997; break;
  710. case kSMPTETimeType2997Drop: info.frameRate = AudioPlayHead::fps2997drop; break;
  711. case kSMPTETimeType30Drop: info.frameRate = AudioPlayHead::fps30drop; break;
  712. case kSMPTETimeType30: info.frameRate = AudioPlayHead::fps30; break;
  713. case kSMPTETimeType60Drop: info.frameRate = AudioPlayHead::fps60drop; break;
  714. case kSMPTETimeType60: info.frameRate = AudioPlayHead::fps60; break;
  715. default: info.frameRate = AudioPlayHead::fpsUnknown; break;
  716. }
  717. double num;
  718. NSInteger den;
  719. NSInteger outDeltaSampleOffsetToNextBeat;
  720. double outCurrentMeasureDownBeat, bpm;
  721. double ppqPosition;
  722. if (hostMusicalContextCallback != nullptr)
  723. {
  724. AUHostMusicalContextBlock musicalContextCallback = hostMusicalContextCallback;
  725. if (musicalContextCallback (&bpm, &num, &den, &ppqPosition, &outDeltaSampleOffsetToNextBeat, &outCurrentMeasureDownBeat))
  726. {
  727. musicContextCallSucceeded = true;
  728. info.timeSigNumerator = (int) num;
  729. info.timeSigDenominator = (int) den;
  730. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  731. info.bpm = bpm;
  732. info.ppqPosition = ppqPosition;
  733. info.ppqPositionOfLastBarStart = outCurrentMeasureDownBeat;
  734. }
  735. }
  736. double outCurrentSampleInTimeLine, outCycleStartBeat = 0, outCycleEndBeat = 0;
  737. AUHostTransportStateFlags flags;
  738. if (hostTransportStateCallback != nullptr)
  739. {
  740. AUHostTransportStateBlock transportStateCallback = hostTransportStateCallback;
  741. if (transportStateCallback (&flags, &outCurrentSampleInTimeLine, &outCycleStartBeat, &outCycleEndBeat))
  742. {
  743. transportStateCallSucceeded = true;
  744. info.timeInSamples = (int64) (outCurrentSampleInTimeLine + 0.5);
  745. info.timeInSeconds = info.timeInSamples / getAudioProcessor().getSampleRate();
  746. info.isPlaying = ((flags & AUHostTransportStateMoving) != 0);
  747. info.isLooping = ((flags & AUHostTransportStateCycling) != 0);
  748. info.isRecording = ((flags & AUHostTransportStateRecording) != 0);
  749. info.ppqLoopStart = outCycleStartBeat;
  750. info.ppqLoopEnd = outCycleEndBeat;
  751. }
  752. }
  753. if (musicContextCallSucceeded && transportStateCallSucceeded)
  754. lastAudioHead = info;
  755. return true;
  756. }
  757. //==============================================================================
  758. static void removeEditor (AudioProcessor& processor)
  759. {
  760. ScopedLock editorLock (processor.getCallbackLock());
  761. if (AudioProcessorEditor* editor = processor.getActiveEditor())
  762. {
  763. processor.editorBeingDeleted (editor);
  764. delete editor;
  765. }
  766. }
  767. private:
  768. //==============================================================================
  769. struct BusBuffer
  770. {
  771. BusBuffer (AUAudioUnitBus* bus, int maxFramesPerBuffer)
  772. : auBus (bus),
  773. maxFrames (maxFramesPerBuffer),
  774. numberOfChannels (static_cast<int> ([[auBus format] channelCount])),
  775. isInterleaved ([[auBus format] isInterleaved])
  776. {
  777. alloc();
  778. }
  779. //==============================================================================
  780. void alloc()
  781. {
  782. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  783. int bytes = static_cast<int> (sizeof (AudioBufferList))
  784. + ((numBuffers - 1) * static_cast<int> (sizeof (::AudioBuffer)));
  785. jassert (bytes > 0);
  786. bufferListStorage.calloc (static_cast<size_t> (bytes));
  787. bufferList = reinterpret_cast<AudioBufferList*> (bufferListStorage.getData());
  788. const int bufferChannels = isInterleaved ? numberOfChannels : 1;
  789. scratchBuffer.setSize (numBuffers, bufferChannels * maxFrames);
  790. }
  791. void dealloc()
  792. {
  793. bufferList = nullptr;
  794. bufferListStorage.free();
  795. scratchBuffer.setSize (0, 0);
  796. }
  797. //==============================================================================
  798. int numChannels() const noexcept { return numberOfChannels; }
  799. bool interleaved() const noexcept { return isInterleaved; }
  800. AudioBufferList* get() const noexcept { return bufferList; }
  801. //==============================================================================
  802. void prepare (UInt32 nFrames, const AudioBufferList* other = nullptr) noexcept
  803. {
  804. const int numBuffers = isInterleaved ? 1 : numberOfChannels;
  805. const bool isCompatible = isCompatibleWith (other);
  806. bufferList->mNumberBuffers = static_cast<UInt32> (numBuffers);
  807. for (int i = 0; i < numBuffers; ++i)
  808. {
  809. const UInt32 bufferChannels = static_cast<UInt32> (isInterleaved ? numberOfChannels : 1);
  810. bufferList->mBuffers[i].mNumberChannels = bufferChannels;
  811. bufferList->mBuffers[i].mData = (isCompatible ? other->mBuffers[i].mData
  812. : scratchBuffer.getWritePointer (i));
  813. bufferList->mBuffers[i].mDataByteSize = nFrames * bufferChannels * sizeof (float);
  814. }
  815. }
  816. //==============================================================================
  817. bool isCompatibleWith (const AudioBufferList* other) const noexcept
  818. {
  819. if (other == nullptr)
  820. return false;
  821. if (other->mNumberBuffers > 0)
  822. {
  823. const bool otherInterleaved = AudioUnitHelpers::isAudioBufferInterleaved (*other);
  824. const int otherChannels = static_cast<int> (otherInterleaved ? other->mBuffers[0].mNumberChannels
  825. : other->mNumberBuffers);
  826. return otherInterleaved == isInterleaved
  827. && numberOfChannels == otherChannels;
  828. }
  829. return numberOfChannels == 0;
  830. }
  831. private:
  832. AUAudioUnitBus* auBus;
  833. HeapBlock<char> bufferListStorage;
  834. AudioBufferList* bufferList = nullptr;
  835. int maxFrames, numberOfChannels;
  836. bool isInterleaved;
  837. AudioBuffer<float> scratchBuffer;
  838. };
  839. //==============================================================================
  840. void addAudioUnitBusses (bool isInput)
  841. {
  842. ScopedPointer<NSMutableArray<AUAudioUnitBus*>> array = [[NSMutableArray<AUAudioUnitBus*> alloc] init];
  843. AudioProcessor& processor = getAudioProcessor();
  844. const int n = AudioUnitHelpers::getBusCount (&processor, isInput);
  845. for (int i = 0; i < n; ++i)
  846. {
  847. ScopedPointer<AUAudioUnitBus> audioUnitBus;
  848. {
  849. ScopedPointer<AVAudioFormat> defaultFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate: kDefaultSampleRate
  850. channels: static_cast<AVAudioChannelCount> (processor.getChannelCountOfBus (isInput, i))];
  851. audioUnitBus = [[AUAudioUnitBus alloc] initWithFormat: defaultFormat
  852. error: nullptr];
  853. }
  854. [array addObject: audioUnitBus];
  855. }
  856. (isInput ? inputBusses : outputBusses) = [[AUAudioUnitBusArray alloc] initWithAudioUnit: au
  857. busType: (isInput ? AUAudioUnitBusTypeInput : AUAudioUnitBusTypeOutput)
  858. busses: array];
  859. }
  860. void addParameters()
  861. {
  862. ScopedPointer<NSMutableArray<AUParameterNode*>> params = [[NSMutableArray<AUParameterNode*> alloc] init];
  863. paramObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedFromHost);
  864. paramProvider = CreateObjCBlock (this, &JuceAudioUnitv3::getValue);
  865. overviewParams = [[NSMutableArray<NSNumber*> alloc] init];
  866. auto& processor = getAudioProcessor();
  867. const int n = processor.getNumParameters();
  868. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  869. // check if all parameters are managed?
  870. usingManagedParameter = (processor.getParameters().size() == processor.getNumParameters());
  871. #endif
  872. for (int idx = 0; idx < n; ++idx)
  873. {
  874. const String identifier (idx);
  875. const String name = processor.getParameterName (idx);
  876. AudioUnitParameterUnit unit = kAudioUnitParameterUnit_Generic;
  877. AudioUnitParameterOptions flags = (UInt32) (kAudioUnitParameterFlag_IsWritable
  878. | kAudioUnitParameterFlag_IsReadable
  879. | kAudioUnitParameterFlag_HasCFNameString
  880. | kAudioUnitParameterFlag_ValuesHaveStrings);
  881. #if ! JUCE_FORCE_LEGACY_PARAMETER_AUTOMATION_TYPE
  882. flags |= (UInt32) kAudioUnitParameterFlag_IsHighResolution;
  883. #endif
  884. // set whether the param is automatable (unnamed parameters aren't allowed to be automated)
  885. if (name.isEmpty() || ! processor.isParameterAutomatable (idx))
  886. flags |= kAudioUnitParameterFlag_NonRealTime;
  887. if (processor.isMetaParameter (idx))
  888. flags |= kAudioUnitParameterFlag_IsGlobalMeta;
  889. // is this a meter?
  890. if (((processor.getParameterCategory (idx) & 0xffff0000) >> 16) == 2)
  891. {
  892. flags &= ~kAudioUnitParameterFlag_IsWritable;
  893. flags |= kAudioUnitParameterFlag_MeterReadOnly | kAudioUnitParameterFlag_DisplayLogarithmic;
  894. unit = kAudioUnitParameterUnit_LinearGain;
  895. }
  896. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  897. AUParameterAddress address = static_cast<AUParameterAddress> (idx);
  898. #else
  899. AUParameterAddress address = generateAUParameterAddressForIndex (idx);
  900. // Consider yourself very unlucky if you hit this assertion. The hash code of your
  901. // parameter ids are not unique.
  902. jassert (! paramMap.contains (static_cast<int64> (address)));
  903. paramAddresses.add (address);
  904. paramMap.set (static_cast<int64> (address), idx);
  905. #endif
  906. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  907. ScopedPointer<AUParameter> param = [[AUParameterTree createParameterWithIdentifier: juceStringToNS (identifier)
  908. name: juceStringToNS (name)
  909. address: address
  910. min: 0.0f
  911. max: 1.0f
  912. unit: unit
  913. unitName: nullptr
  914. flags: flags
  915. valueStrings: nullptr
  916. dependentParameters: nullptr] retain];
  917. [params addObject: param];
  918. [overviewParams addObject: [NSNumber numberWithUnsignedLongLong:address]];
  919. }
  920. // create methods in AUParameterTree return unretained objects (!) -> see Apple header AUAudioUnitImplementation.h
  921. paramTree = [[AUParameterTree createTreeWithChildren: params] retain];
  922. [paramTree setImplementorValueObserver: paramObserver];
  923. [paramTree setImplementorValueProvider: paramProvider];
  924. if (processor.hasEditor())
  925. {
  926. editorParamObserver = CreateObjCBlock (this, &JuceAudioUnitv3::valueChangedForObserver);
  927. editorObserverToken = [paramTree tokenByAddingParameterObserver: editorParamObserver];
  928. }
  929. }
  930. void addPresets()
  931. {
  932. factoryPresets = [[NSMutableArray<AUAudioUnitPreset*> alloc] init];
  933. const int n = getAudioProcessor().getNumPrograms();
  934. for (int idx = 0; idx < n; ++idx)
  935. {
  936. String name = getAudioProcessor().getProgramName (idx);
  937. ScopedPointer<AUAudioUnitPreset> preset = [[AUAudioUnitPreset alloc] init];
  938. [preset setName: juceStringToNS (name)];
  939. [preset setNumber: static_cast<NSInteger> (idx)];
  940. [factoryPresets addObject: preset];
  941. }
  942. }
  943. //==============================================================================
  944. void allocateBusBuffer (bool isInput)
  945. {
  946. OwnedArray<BusBuffer>& busBuffers = isInput ? inBusBuffers : outBusBuffers;
  947. busBuffers.clear();
  948. const int n = AudioUnitHelpers::getBusCount (&getAudioProcessor(), isInput);
  949. const AUAudioFrameCount maxFrames = [getAudioUnit() maximumFramesToRender];
  950. for (int busIdx = 0; busIdx < n; ++busIdx)
  951. busBuffers.add (new BusBuffer ([(isInput ? inputBusses : outputBusses) objectAtIndexedSubscript: static_cast<unsigned int> (busIdx)],
  952. static_cast<int> (maxFrames)));
  953. }
  954. //==============================================================================
  955. void processEvents (const AURenderEvent *__nullable realtimeEventListHead, int numParams, AUEventSampleTime startTime)
  956. {
  957. for (const AURenderEvent* event = realtimeEventListHead; event != nullptr; event = event->head.next)
  958. {
  959. switch (event->head.eventType)
  960. {
  961. case AURenderEventMIDI:
  962. {
  963. const AUMIDIEvent& midiEvent = event->MIDI;
  964. midiMessages.addEvent (midiEvent.data, midiEvent.length, static_cast<int> (midiEvent.eventSampleTime - startTime));
  965. }
  966. break;
  967. case AURenderEventParameter:
  968. case AURenderEventParameterRamp:
  969. {
  970. const AUParameterEvent& paramEvent = event->parameter;
  971. const int idx = getJuceParameterIndexForAUAddress (paramEvent.parameterAddress);
  972. if (isPositiveAndBelow (idx, numParams))
  973. getAudioProcessor().setParameter (idx, paramEvent.value);
  974. }
  975. break;
  976. default:
  977. break;
  978. }
  979. }
  980. }
  981. AUAudioUnitStatus renderCallback (AudioUnitRenderActionFlags* actionFlags, const AudioTimeStamp* timestamp, AUAudioFrameCount frameCount,
  982. NSInteger outputBusNumber, AudioBufferList* outputData, const AURenderEvent *__nullable realtimeEventListHead,
  983. AURenderPullInputBlock __nullable pullInputBlock)
  984. {
  985. auto& processor = getAudioProcessor();
  986. jassert (static_cast<int> (frameCount) <= getAudioProcessor().getBlockSize());
  987. // process params
  988. const int numParams = processor.getNumParameters();
  989. processEvents (realtimeEventListHead, numParams, static_cast<AUEventSampleTime> (timestamp->mSampleTime));
  990. if (lastTimeStamp.mSampleTime != timestamp->mSampleTime)
  991. {
  992. lastTimeStamp = *timestamp;
  993. const int numInputBuses = inBusBuffers. size();
  994. const int numOutputBuses = outBusBuffers.size();
  995. // prepare buffers
  996. {
  997. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  998. {
  999. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  1000. const bool canUseDirectOutput =
  1001. (busIdx == outputBusNumber && outputData != nullptr && outputData->mNumberBuffers > 0);
  1002. busBuffer.prepare (frameCount, canUseDirectOutput ? outputData : nullptr);
  1003. }
  1004. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1005. {
  1006. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1007. busBuffer.prepare (frameCount, busIdx < numOutputBuses ? outBusBuffers[busIdx]->get() : nullptr);
  1008. }
  1009. audioBuffer.reset();
  1010. }
  1011. // pull inputs
  1012. {
  1013. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1014. {
  1015. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1016. AudioBufferList* buffer = busBuffer.get();
  1017. if (pullInputBlock == nullptr || pullInputBlock (actionFlags, timestamp, frameCount, busIdx, buffer) != noErr)
  1018. AudioUnitHelpers::clearAudioBuffer (*buffer);
  1019. if (actionFlags != nullptr && (*actionFlags & kAudioUnitRenderAction_OutputIsSilence) != 0)
  1020. AudioUnitHelpers::clearAudioBuffer (*buffer);
  1021. }
  1022. }
  1023. // set buffer pointer to minimize copying
  1024. {
  1025. int chIdx = 0;
  1026. for (int busIdx = 0; busIdx < numOutputBuses; ++busIdx)
  1027. {
  1028. BusBuffer& busBuffer = *outBusBuffers[busIdx];
  1029. AudioBufferList* buffer = busBuffer.get();
  1030. const bool interleaved = busBuffer.interleaved();
  1031. const int numChannels = busBuffer.numChannels();
  1032. const int* outLayoutMap = mapper.get (false, busIdx);
  1033. for (int ch = 0; ch < numChannels; ++ch)
  1034. audioBuffer.setBuffer (chIdx++, interleaved ? nullptr : static_cast<float*> (buffer->mBuffers[outLayoutMap[ch]].mData));
  1035. }
  1036. // use input pointers on remaining channels
  1037. for (int busIdx = 0; chIdx < totalInChannels;)
  1038. {
  1039. const int channelOffset = processor.getOffsetInBusBufferForAbsoluteChannelIndex (true, chIdx, busIdx);
  1040. BusBuffer& busBuffer = *inBusBuffers[busIdx];
  1041. AudioBufferList* buffer = busBuffer.get();
  1042. const int* inLayoutMap = mapper.get (true, busIdx);
  1043. audioBuffer.setBuffer (chIdx++, busBuffer.interleaved() ? nullptr : static_cast<float*> (buffer->mBuffers[inLayoutMap[channelOffset]].mData));
  1044. }
  1045. }
  1046. // copy input
  1047. {
  1048. for (int busIdx = 0; busIdx < numInputBuses; ++busIdx)
  1049. audioBuffer.push (*inBusBuffers[busIdx]->get(), mapper.get (true, busIdx));
  1050. // clear remaining channels
  1051. for (int i = totalInChannels; i < totalOutChannels; ++i)
  1052. zeromem (audioBuffer.push(), sizeof (float) * frameCount);
  1053. }
  1054. // process audio
  1055. processBlock (audioBuffer.getBuffer (frameCount), midiMessages);
  1056. // send MIDI
  1057. #if JucePlugin_ProducesMidiOutput && JUCE_AUV3_MIDI_OUTPUT_SUPPORTED
  1058. auto midiOut = [au MIDIOutputEventBlock];
  1059. MidiMessage msg;
  1060. int samplePosition;
  1061. for (MidiBuffer::Iterator it (midiMessages); it.getNextEvent (msg, samplePosition);)
  1062. midiOut (samplePosition, 0, msg.getRawDataSize(), msg.getRawData());
  1063. #endif
  1064. midiMessages.clear();
  1065. }
  1066. // copy back
  1067. audioBuffer.pop (*outBusBuffers[(int) outputBusNumber]->get(),
  1068. mapper.get (false, (int) outputBusNumber));
  1069. return noErr;
  1070. }
  1071. void processBlock (AudioBuffer<float>& buffer, MidiBuffer& midiBuffer) noexcept
  1072. {
  1073. auto& processor = getAudioProcessor();
  1074. const ScopedLock sl (processor.getCallbackLock());
  1075. if (processor.isSuspended())
  1076. buffer.clear();
  1077. else if ([au shouldBypassEffect])
  1078. processor.processBlockBypassed (buffer, midiBuffer);
  1079. else
  1080. processor.processBlock (buffer, midiBuffer);
  1081. }
  1082. //==============================================================================
  1083. void valueChangedFromHost (AUParameter* param, AUValue value)
  1084. {
  1085. if (param != nullptr)
  1086. {
  1087. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  1088. auto& processor = getAudioProcessor();
  1089. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  1090. processor.setParameter (idx, value);
  1091. }
  1092. }
  1093. AUValue getValue (AUParameter* param)
  1094. {
  1095. if (param != nullptr)
  1096. {
  1097. const int idx = getJuceParameterIndexForAUAddress ([param address]);
  1098. auto& processor = getAudioProcessor();
  1099. if (isPositiveAndBelow (idx, processor.getNumParameters()))
  1100. return processor.getParameter (idx);
  1101. }
  1102. return 0;
  1103. }
  1104. void valueChangedForObserver(AUParameterAddress, AUValue)
  1105. {
  1106. // this will have already been handled by valueChangedFromHost
  1107. }
  1108. //==============================================================================
  1109. #if JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1110. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept { return static_cast<AUParameterAddress> (paramIndex); }
  1111. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept { return static_cast<int> (address); }
  1112. #else
  1113. AUParameterAddress generateAUParameterAddressForIndex (int paramIndex) const
  1114. {
  1115. auto& processor = getAudioProcessor();
  1116. const int n = processor.getNumParameters();
  1117. if (isPositiveAndBelow (paramIndex, n))
  1118. {
  1119. const String& juceParamID = processor.getParameterID (paramIndex);
  1120. return usingManagedParameter ? static_cast<AUParameterAddress> (juceParamID.hashCode64())
  1121. : static_cast<AUParameterAddress> (juceParamID.getIntValue());
  1122. }
  1123. return static_cast<AUParameterAddress> (-1);
  1124. }
  1125. inline AUParameterAddress getAUParameterAddressForIndex (int paramIndex) const noexcept
  1126. {
  1127. return usingManagedParameter ? paramAddresses.getReference (paramIndex)
  1128. : static_cast<AUParameterAddress> (paramIndex);
  1129. }
  1130. inline int getJuceParameterIndexForAUAddress (AUParameterAddress address) const noexcept
  1131. {
  1132. return usingManagedParameter ? paramMap[static_cast<int64> (address)]
  1133. : static_cast<int> (address);
  1134. }
  1135. #endif
  1136. //==============================================================================
  1137. static const double kDefaultSampleRate;
  1138. AudioProcessorHolder::Ptr processorHolder;
  1139. int totalInChannels, totalOutChannels;
  1140. ScopedPointer<AUAudioUnitBusArray> inputBusses;
  1141. ScopedPointer<AUAudioUnitBusArray> outputBusses;
  1142. ObjCBlock<AUImplementorValueObserver> paramObserver;
  1143. ObjCBlock<AUImplementorValueProvider> paramProvider;
  1144. #if ! JUCE_FORCE_USE_LEGACY_PARAM_IDS
  1145. bool usingManagedParameter;
  1146. Array<AUParameterAddress> paramAddresses;
  1147. HashMap<int64, int> paramMap;
  1148. #endif
  1149. // to avoid recursion on parameter changes, we need to add an
  1150. // editor observer to do the parameter changes
  1151. ObjCBlock<AUParameterObserver> editorParamObserver;
  1152. AUParameterObserverToken editorObserverToken;
  1153. ScopedPointer<AUParameterTree> paramTree;
  1154. ScopedPointer<NSMutableArray<NSNumber*>> overviewParams;
  1155. ScopedPointer<NSMutableArray<NSNumber*>> channelCapabilities;
  1156. ScopedPointer<NSMutableArray<AUAudioUnitPreset*>> factoryPresets;
  1157. ObjCBlock<AUInternalRenderBlock> internalRenderBlock;
  1158. AudioUnitHelpers::CoreAudioBufferList audioBuffer;
  1159. AudioUnitHelpers::ChannelRemapper mapper;
  1160. OwnedArray<BusBuffer> inBusBuffers, outBusBuffers;
  1161. MidiBuffer midiMessages;
  1162. ObjCBlock<AUHostMusicalContextBlock> hostMusicalContextCallback;
  1163. ObjCBlock<AUHostTransportStateBlock> hostTransportStateCallback;
  1164. AudioTimeStamp lastTimeStamp;
  1165. CurrentPositionInfo lastAudioHead;
  1166. String contextName;
  1167. };
  1168. const double JuceAudioUnitv3::kDefaultSampleRate = 44100.0;
  1169. JuceAudioUnitv3Base* JuceAudioUnitv3Base::create (AUAudioUnit* audioUnit, AudioComponentDescription descr, AudioComponentInstantiationOptions options, NSError** error)
  1170. {
  1171. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1172. return new JuceAudioUnitv3 (audioUnit, descr, options, error);
  1173. }
  1174. //==============================================================================
  1175. class JuceAUViewController
  1176. {
  1177. public:
  1178. JuceAUViewController (AUViewController<AUAudioUnitFactory>* p)
  1179. : myself (p)
  1180. {
  1181. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1182. PluginHostType::jucePlugInClientCurrentWrapperType = AudioProcessor::wrapperType_AudioUnitv3;
  1183. initialiseJuce_GUI();
  1184. }
  1185. ~JuceAUViewController()
  1186. {
  1187. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1188. if (processorHolder != nullptr)
  1189. JuceAudioUnitv3::removeEditor (getAudioProcessor());
  1190. }
  1191. //==============================================================================
  1192. void loadView()
  1193. {
  1194. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1195. if (AudioProcessor* p = createPluginFilterOfType (AudioProcessor::wrapperType_AudioUnitv3))
  1196. {
  1197. processorHolder = new AudioProcessorHolder (p);
  1198. auto& processor = getAudioProcessor();
  1199. if (processor.hasEditor())
  1200. {
  1201. if (AudioProcessorEditor* editor = processor.createEditorIfNeeded())
  1202. {
  1203. preferredSize = editor->getBounds();
  1204. JUCE_IOS_MAC_VIEW* view = [[[JUCE_IOS_MAC_VIEW alloc] initWithFrame: convertToCGRect (editor->getBounds())] autorelease];
  1205. [myself setView: view];
  1206. editor->setVisible (true);
  1207. editor->addToDesktop (0, view);
  1208. }
  1209. }
  1210. }
  1211. }
  1212. void viewDidLayoutSubviews()
  1213. {
  1214. if (processorHolder != nullptr && [myself view] != nullptr)
  1215. {
  1216. if (AudioProcessorEditor* editor = getAudioProcessor().getActiveEditor())
  1217. {
  1218. if (processorHolder->viewConfiguration != nullptr)
  1219. editor->hostMIDIControllerIsAvailable (processorHolder->viewConfiguration->hostHasMIDIController);
  1220. editor->setBounds (convertToRectInt ([[myself view] bounds]));
  1221. if (JUCE_IOS_MAC_VIEW* peerView = [[[myself view] subviews] objectAtIndex: 0])
  1222. {
  1223. #if JUCE_IOS
  1224. [peerView setNeedsDisplay];
  1225. #else
  1226. [peerView setNeedsDisplay: YES];
  1227. #endif
  1228. }
  1229. }
  1230. }
  1231. }
  1232. CGSize getPreferredContentSize() const
  1233. {
  1234. return CGSizeMake (static_cast<float> (preferredSize.getWidth()),
  1235. static_cast<float> (preferredSize.getHeight()));
  1236. }
  1237. //==============================================================================
  1238. AUAudioUnit* createAudioUnit (const AudioComponentDescription& descr, NSError** error)
  1239. {
  1240. AUAudioUnit* retval = nil;
  1241. if (! MessageManager::getInstance()->isThisTheMessageThread())
  1242. {
  1243. WaitableEvent creationEvent;
  1244. // AUv3 headers say that we may block this thread and that the message thread is guaranteed
  1245. // to be unblocked
  1246. struct AUCreator : public CallbackMessage
  1247. {
  1248. JuceAUViewController& owner;
  1249. AudioComponentDescription pDescr;
  1250. NSError** pError;
  1251. AUAudioUnit*& outAU;
  1252. WaitableEvent& e;
  1253. AUCreator (JuceAUViewController& parent, const AudioComponentDescription& paramDescr, NSError** paramError,
  1254. AUAudioUnit*& outputAU, WaitableEvent& event)
  1255. : owner (parent), pDescr (paramDescr), pError (paramError), outAU (outputAU), e (event)
  1256. {}
  1257. void messageCallback() override
  1258. {
  1259. outAU = owner.createAudioUnitOnMessageThread (pDescr, pError);
  1260. e.signal();
  1261. }
  1262. };
  1263. (new AUCreator (*this, descr, error, retval, creationEvent))->post();
  1264. creationEvent.wait (-1);
  1265. }
  1266. else
  1267. retval = createAudioUnitOnMessageThread (descr, error);
  1268. return [retval autorelease];
  1269. }
  1270. private:
  1271. //==============================================================================
  1272. AUViewController<AUAudioUnitFactory>* myself;
  1273. AudioProcessorHolder::Ptr processorHolder = nullptr;
  1274. Rectangle<int> preferredSize { 1, 1 };
  1275. //==============================================================================
  1276. AUAudioUnit* createAudioUnitOnMessageThread (const AudioComponentDescription& descr, NSError** error)
  1277. {
  1278. jassert (MessageManager::getInstance()->isThisTheMessageThread());
  1279. [myself view]; // this will call [view load] and ensure that the AudioProcessor has been instantiated
  1280. if (processorHolder == nullptr)
  1281. return nullptr;
  1282. return (new JuceAudioUnitv3 (processorHolder, descr, 0, error))->getAudioUnit();
  1283. }
  1284. AudioProcessor& getAudioProcessor() const noexcept { return **processorHolder; }
  1285. };
  1286. //==============================================================================
  1287. // necessary glue code
  1288. @interface JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix) : AUViewController<AUAudioUnitFactory>
  1289. @end
  1290. @implementation JUCE_VIEWCONTROLLER_OBJC_NAME (JucePlugin_AUExportPrefix)
  1291. {
  1292. ScopedPointer<JuceAUViewController> cpp;
  1293. }
  1294. - (instancetype) initWithNibName: (nullable NSString*) nib bundle: (nullable NSBundle*) bndl { self = [super initWithNibName: nib bundle: bndl]; cpp = new JuceAUViewController (self); return self;}
  1295. - (void) loadView { cpp->loadView(); }
  1296. - (AUAudioUnit *)createAudioUnitWithComponentDescription:(AudioComponentDescription)desc error:(NSError **)error { return cpp->createAudioUnit (desc, error); }
  1297. - (CGSize) preferredContentSize { return cpp->getPreferredContentSize(); }
  1298. - (void)viewDidLayoutSubviews { return cpp->viewDidLayoutSubviews(); }
  1299. @end
  1300. //==============================================================================
  1301. #if JUCE_IOS
  1302. bool JUCE_CALLTYPE juce_isInterAppAudioConnected() { return false; }
  1303. void JUCE_CALLTYPE juce_switchToHostApplication() {}
  1304. #if JUCE_MODULE_AVAILABLE_juce_gui_basics
  1305. Image JUCE_CALLTYPE juce_getIAAHostIcon (int) { return Image(); }
  1306. #endif
  1307. #endif
  1308. #pragma clang diagnostic pop
  1309. #endif