Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1798 lines
66KB

  1. /*
  2. ==============================================================================
  3. This file is part of the Water library.
  4. Copyright (c) 2015 ROLI Ltd.
  5. Copyright (C) 2017-2018 Filipe Coelho <falktx@falktx.com>
  6. Permission is granted to use this software under the terms of the GNU
  7. General Public License as published by the Free Software Foundation;
  8. either version 2 of the License, or any later version.
  9. This program is distributed in the hope that it will be useful, but WITHOUT
  10. ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
  11. FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. For a full copy of the GNU General Public License see the doc/GPL.txt file.
  13. ==============================================================================
  14. */
  15. #include "AudioProcessorGraph.h"
  16. #include "../containers/SortedSet.h"
  17. namespace water {
  18. const uint AudioProcessorGraph::midiChannelIndex = 0x4000;
  19. //==============================================================================
  20. namespace GraphRenderingOps
  21. {
  22. struct AudioGraphRenderingOpBase
  23. {
  24. AudioGraphRenderingOpBase() noexcept {}
  25. virtual ~AudioGraphRenderingOpBase() {}
  26. virtual void perform (AudioSampleBuffer& sharedAudioBufferChans,
  27. AudioSampleBuffer& sharedCVBufferChans,
  28. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  29. const int numSamples) = 0;
  30. };
  31. // use CRTP
  32. template <class Child>
  33. struct AudioGraphRenderingOp : public AudioGraphRenderingOpBase
  34. {
  35. void perform (AudioSampleBuffer& sharedAudioBufferChans,
  36. AudioSampleBuffer& sharedCVBufferChans,
  37. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  38. const int numSamples) override
  39. {
  40. static_cast<Child*> (this)->perform (sharedAudioBufferChans,
  41. sharedCVBufferChans,
  42. sharedMidiBuffers,
  43. numSamples);
  44. }
  45. };
  46. //==============================================================================
  47. struct ClearChannelOp : public AudioGraphRenderingOp<ClearChannelOp>
  48. {
  49. ClearChannelOp (const int channel, const bool cv) noexcept
  50. : channelNum (channel), isCV (cv) {}
  51. void perform (AudioSampleBuffer& sharedAudioBufferChans,
  52. AudioSampleBuffer& sharedCVBufferChans,
  53. const OwnedArray<MidiBuffer>&,
  54. const int numSamples)
  55. {
  56. if (isCV)
  57. sharedCVBufferChans.clear (channelNum, 0, numSamples);
  58. else
  59. sharedAudioBufferChans.clear (channelNum, 0, numSamples);
  60. }
  61. const int channelNum;
  62. const bool isCV;
  63. CARLA_DECLARE_NON_COPY_CLASS (ClearChannelOp)
  64. };
  65. //==============================================================================
  66. struct CopyChannelOp : public AudioGraphRenderingOp<CopyChannelOp>
  67. {
  68. CopyChannelOp (const int srcChan, const int dstChan, const bool cv) noexcept
  69. : srcChannelNum (srcChan), dstChannelNum (dstChan), isCV (cv) {}
  70. void perform (AudioSampleBuffer& sharedAudioBufferChans,
  71. AudioSampleBuffer& sharedCVBufferChans,
  72. const OwnedArray<MidiBuffer>&,
  73. const int numSamples)
  74. {
  75. if (isCV)
  76. sharedCVBufferChans.copyFrom (dstChannelNum, 0, sharedCVBufferChans, srcChannelNum, 0, numSamples);
  77. else
  78. sharedAudioBufferChans.copyFrom (dstChannelNum, 0, sharedAudioBufferChans, srcChannelNum, 0, numSamples);
  79. }
  80. const int srcChannelNum, dstChannelNum;
  81. const bool isCV;
  82. CARLA_DECLARE_NON_COPY_CLASS (CopyChannelOp)
  83. };
  84. //==============================================================================
  85. struct AddChannelOp : public AudioGraphRenderingOp<AddChannelOp>
  86. {
  87. AddChannelOp (const int srcChan, const int dstChan, const bool cv) noexcept
  88. : srcChannelNum (srcChan), dstChannelNum (dstChan), isCV (cv) {}
  89. void perform (AudioSampleBuffer& sharedAudioBufferChans,
  90. AudioSampleBuffer& sharedCVBufferChans,
  91. const OwnedArray<MidiBuffer>&,
  92. const int numSamples)
  93. {
  94. if (isCV)
  95. sharedCVBufferChans.addFrom (dstChannelNum, 0, sharedCVBufferChans, srcChannelNum, 0, numSamples);
  96. else
  97. sharedAudioBufferChans.addFrom (dstChannelNum, 0, sharedAudioBufferChans, srcChannelNum, 0, numSamples);
  98. }
  99. const int srcChannelNum, dstChannelNum;
  100. const bool isCV;
  101. CARLA_DECLARE_NON_COPY_CLASS (AddChannelOp)
  102. };
  103. //==============================================================================
  104. struct ClearMidiBufferOp : public AudioGraphRenderingOp<ClearMidiBufferOp>
  105. {
  106. ClearMidiBufferOp (const int buffer) noexcept : bufferNum (buffer) {}
  107. void perform (AudioSampleBuffer&, AudioSampleBuffer&,
  108. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  109. const int)
  110. {
  111. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  112. }
  113. const int bufferNum;
  114. CARLA_DECLARE_NON_COPY_CLASS (ClearMidiBufferOp)
  115. };
  116. //==============================================================================
  117. struct CopyMidiBufferOp : public AudioGraphRenderingOp<CopyMidiBufferOp>
  118. {
  119. CopyMidiBufferOp (const int srcBuffer, const int dstBuffer) noexcept
  120. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  121. {}
  122. void perform (AudioSampleBuffer&, AudioSampleBuffer&,
  123. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  124. const int)
  125. {
  126. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  127. }
  128. const int srcBufferNum, dstBufferNum;
  129. CARLA_DECLARE_NON_COPY_CLASS (CopyMidiBufferOp)
  130. };
  131. //==============================================================================
  132. struct AddMidiBufferOp : public AudioGraphRenderingOp<AddMidiBufferOp>
  133. {
  134. AddMidiBufferOp (const int srcBuffer, const int dstBuffer)
  135. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  136. {}
  137. void perform (AudioSampleBuffer&, AudioSampleBuffer&,
  138. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  139. const int numSamples)
  140. {
  141. sharedMidiBuffers.getUnchecked (dstBufferNum)
  142. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  143. }
  144. const int srcBufferNum, dstBufferNum;
  145. CARLA_DECLARE_NON_COPY_CLASS (AddMidiBufferOp)
  146. };
  147. //==============================================================================
  148. struct DelayChannelOp : public AudioGraphRenderingOp<DelayChannelOp>
  149. {
  150. DelayChannelOp (const int chan, const int delaySize, const bool cv)
  151. : channel (chan),
  152. bufferSize (delaySize + 1),
  153. readIndex (0), writeIndex (delaySize),
  154. isCV (cv)
  155. {
  156. buffer.calloc ((size_t) bufferSize);
  157. }
  158. void perform (AudioSampleBuffer& sharedAudioBufferChans,
  159. AudioSampleBuffer& sharedCVBufferChans,
  160. const OwnedArray<MidiBuffer>&,
  161. const int numSamples)
  162. {
  163. float* data = isCV
  164. ? sharedCVBufferChans.getWritePointer (channel, 0)
  165. : sharedAudioBufferChans.getWritePointer (channel, 0);
  166. HeapBlock<float>& block = buffer;
  167. for (int i = numSamples; --i >= 0;)
  168. {
  169. block [writeIndex] = *data;
  170. *data++ = block [readIndex];
  171. if (++readIndex >= bufferSize) readIndex = 0;
  172. if (++writeIndex >= bufferSize) writeIndex = 0;
  173. }
  174. }
  175. private:
  176. HeapBlock<float> buffer;
  177. const int channel, bufferSize;
  178. int readIndex, writeIndex;
  179. const bool isCV;
  180. CARLA_DECLARE_NON_COPY_CLASS (DelayChannelOp)
  181. };
  182. //==============================================================================
  183. struct ProcessBufferOp : public AudioGraphRenderingOp<ProcessBufferOp>
  184. {
  185. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& n,
  186. const Array<uint>& audioChannelsUsed,
  187. const uint totalNumChans,
  188. const Array<uint>& cvInChannelsUsed,
  189. const Array<uint>& cvOutChannelsUsed,
  190. const int midiBuffer)
  191. : node (n),
  192. processor (n->getProcessor()),
  193. audioChannelsToUse (audioChannelsUsed),
  194. cvInChannelsToUse (cvInChannelsUsed),
  195. cvOutChannelsToUse (cvOutChannelsUsed),
  196. totalAudioChans (jmax (1U, totalNumChans)),
  197. totalCVIns (cvInChannelsUsed.size()),
  198. totalCVOuts (cvOutChannelsUsed.size()),
  199. midiBufferToUse (midiBuffer)
  200. {
  201. audioChannels.calloc (totalAudioChans);
  202. cvInChannels.calloc (totalCVIns);
  203. cvOutChannels.calloc (totalCVOuts);
  204. while (audioChannelsToUse.size() < static_cast<int>(totalAudioChans))
  205. audioChannelsToUse.add (0);
  206. }
  207. void perform (AudioSampleBuffer& sharedAudioBufferChans,
  208. AudioSampleBuffer& sharedCVBufferChans,
  209. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  210. const int numSamples)
  211. {
  212. HeapBlock<float*>& audioChannelsCopy = audioChannels;
  213. HeapBlock<float*>& cvInChannelsCopy = cvInChannels;
  214. HeapBlock<float*>& cvOutChannelsCopy = cvOutChannels;
  215. for (uint i = 0; i < totalAudioChans; ++i)
  216. audioChannelsCopy[i] = sharedAudioBufferChans.getWritePointer (audioChannelsToUse.getUnchecked (i), 0);
  217. for (uint i = 0; i < totalCVIns; ++i)
  218. cvInChannels[i] = sharedCVBufferChans.getWritePointer (cvInChannelsToUse.getUnchecked (i), 0);
  219. for (uint i = 0; i < totalCVOuts; ++i)
  220. cvOutChannels[i] = sharedCVBufferChans.getWritePointer (cvOutChannelsToUse.getUnchecked (i), 0);
  221. AudioSampleBuffer audioBuffer (audioChannelsCopy, totalAudioChans, numSamples);
  222. AudioSampleBuffer cvInBuffer (cvInChannelsCopy, totalCVIns, numSamples);
  223. AudioSampleBuffer cvOutBuffer (cvOutChannelsCopy, totalCVOuts, numSamples);
  224. if (processor->isSuspended())
  225. {
  226. audioBuffer.clear();
  227. cvOutBuffer.clear();
  228. }
  229. else
  230. {
  231. const CarlaRecursiveMutexLocker cml (processor->getCallbackLock());
  232. callProcess (audioBuffer, cvInBuffer, cvOutBuffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  233. }
  234. }
  235. void callProcess (AudioSampleBuffer& audioBuffer,
  236. AudioSampleBuffer& cvInBuffer,
  237. AudioSampleBuffer& cvOutBuffer,
  238. MidiBuffer& midiMessages)
  239. {
  240. processor->processBlockWithCV (audioBuffer, cvInBuffer, cvOutBuffer, midiMessages);
  241. }
  242. const AudioProcessorGraph::Node::Ptr node;
  243. AudioProcessor* const processor;
  244. private:
  245. Array<uint> audioChannelsToUse;
  246. Array<uint> cvInChannelsToUse;
  247. Array<uint> cvOutChannelsToUse;
  248. HeapBlock<float*> audioChannels;
  249. HeapBlock<float*> cvInChannels;
  250. HeapBlock<float*> cvOutChannels;
  251. AudioSampleBuffer tempBuffer;
  252. const uint totalAudioChans;
  253. const uint totalCVIns;
  254. const uint totalCVOuts;
  255. const int midiBufferToUse;
  256. CARLA_DECLARE_NON_COPY_CLASS (ProcessBufferOp)
  257. };
  258. //==============================================================================
  259. /** Used to calculate the correct sequence of rendering ops needed, based on
  260. the best re-use of shared buffers at each stage.
  261. */
  262. struct RenderingOpSequenceCalculator
  263. {
  264. RenderingOpSequenceCalculator (AudioProcessorGraph& g,
  265. const Array<AudioProcessorGraph::Node*>& nodes,
  266. Array<void*>& renderingOps)
  267. : graph (g),
  268. orderedNodes (nodes),
  269. totalLatency (0)
  270. {
  271. audioNodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros
  272. audioChannels.add (0);
  273. cvNodeIds.add ((uint32) zeroNodeID);
  274. cvChannels.add (0);
  275. midiNodeIds.add ((uint32) zeroNodeID);
  276. for (int i = 0; i < orderedNodes.size(); ++i)
  277. {
  278. createRenderingOpsForNode (*orderedNodes.getUnchecked(i), renderingOps, i);
  279. markAnyUnusedBuffersAsFree (i);
  280. }
  281. graph.setLatencySamples (totalLatency);
  282. }
  283. int getNumAudioBuffersNeeded() const noexcept { return audioNodeIds.size(); }
  284. int getNumCVBuffersNeeded() const noexcept { return cvNodeIds.size(); }
  285. int getNumMidiBuffersNeeded() const noexcept { return midiNodeIds.size(); }
  286. private:
  287. //==============================================================================
  288. AudioProcessorGraph& graph;
  289. const Array<AudioProcessorGraph::Node*>& orderedNodes;
  290. Array<uint> audioChannels, cvChannels;
  291. Array<uint32> audioNodeIds, cvNodeIds, midiNodeIds;
  292. enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe };
  293. static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; }
  294. Array<uint32> nodeDelayIDs;
  295. Array<int> nodeDelays;
  296. int totalLatency;
  297. int getNodeDelay (const uint32 nodeID) const { return nodeDelays [nodeDelayIDs.indexOf (nodeID)]; }
  298. void setNodeDelay (const uint32 nodeID, const int latency)
  299. {
  300. const int index = nodeDelayIDs.indexOf (nodeID);
  301. if (index >= 0)
  302. {
  303. nodeDelays.set (index, latency);
  304. }
  305. else
  306. {
  307. nodeDelayIDs.add (nodeID);
  308. nodeDelays.add (latency);
  309. }
  310. }
  311. int getInputLatencyForNode (const uint32 nodeID) const
  312. {
  313. int maxLatency = 0;
  314. for (int i = graph.getNumConnections(); --i >= 0;)
  315. {
  316. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  317. if (c->destNodeId == nodeID)
  318. maxLatency = jmax (maxLatency, getNodeDelay (c->sourceNodeId));
  319. }
  320. return maxLatency;
  321. }
  322. //==============================================================================
  323. void createRenderingOpsForNode (AudioProcessorGraph::Node& node,
  324. Array<void*>& renderingOps,
  325. const int ourRenderingIndex)
  326. {
  327. AudioProcessor& processor = *node.getProcessor();
  328. const uint numAudioIns = processor.getTotalNumInputChannels(AudioProcessor::ChannelTypeAudio);
  329. const uint numAudioOuts = processor.getTotalNumOutputChannels(AudioProcessor::ChannelTypeAudio);
  330. const uint numCVIns = processor.getTotalNumInputChannels(AudioProcessor::ChannelTypeCV);
  331. const uint numCVOuts = processor.getTotalNumOutputChannels(AudioProcessor::ChannelTypeCV);
  332. const uint totalAudioChans = jmax (numAudioIns, numAudioOuts);
  333. Array<uint> audioChannelsToUse, cvInChannelsToUse, cvOutChannelsToUse;
  334. int midiBufferToUse = -1;
  335. int maxLatency = getInputLatencyForNode (node.nodeId);
  336. for (uint inputChan = 0; inputChan < numAudioIns; ++inputChan)
  337. {
  338. // get a list of all the inputs to this node
  339. Array<uint32> sourceNodes;
  340. Array<uint> sourceOutputChans;
  341. for (int i = graph.getNumConnections(); --i >= 0;)
  342. {
  343. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  344. if (c->destNodeId == node.nodeId
  345. && c->destChannelIndex == inputChan
  346. && c->channelType == AudioProcessor::ChannelTypeAudio)
  347. {
  348. sourceNodes.add (c->sourceNodeId);
  349. sourceOutputChans.add (c->sourceChannelIndex);
  350. }
  351. }
  352. int bufIndex = -1;
  353. if (sourceNodes.size() == 0)
  354. {
  355. // unconnected input channel
  356. if (inputChan >= numAudioOuts)
  357. {
  358. bufIndex = getReadOnlyEmptyBuffer();
  359. wassert (bufIndex >= 0);
  360. }
  361. else
  362. {
  363. bufIndex = getFreeBuffer (AudioProcessor::ChannelTypeAudio);
  364. renderingOps.add (new ClearChannelOp (bufIndex, false));
  365. }
  366. }
  367. else if (sourceNodes.size() == 1)
  368. {
  369. // channel with a straightforward single input..
  370. const uint32 srcNode = sourceNodes.getUnchecked(0);
  371. const uint srcChan = sourceOutputChans.getUnchecked(0);
  372. bufIndex = getBufferContaining (AudioProcessor::ChannelTypeAudio, srcNode, srcChan);
  373. if (bufIndex < 0)
  374. {
  375. // if not found, this is probably a feedback loop
  376. bufIndex = getReadOnlyEmptyBuffer();
  377. wassert (bufIndex >= 0);
  378. }
  379. if (inputChan < numAudioOuts
  380. && isBufferNeededLater (ourRenderingIndex,
  381. inputChan,
  382. srcNode, srcChan))
  383. {
  384. // can't mess up this channel because it's needed later by another node, so we
  385. // need to use a copy of it..
  386. const int newFreeBuffer = getFreeBuffer (AudioProcessor::ChannelTypeAudio);
  387. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer, false));
  388. bufIndex = newFreeBuffer;
  389. }
  390. const int nodeDelay = getNodeDelay (srcNode);
  391. if (nodeDelay < maxLatency)
  392. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay, false));
  393. }
  394. else
  395. {
  396. // channel with a mix of several inputs..
  397. // try to find a re-usable channel from our inputs..
  398. int reusableInputIndex = -1;
  399. for (int i = 0; i < sourceNodes.size(); ++i)
  400. {
  401. const int sourceBufIndex = getBufferContaining (AudioProcessor::ChannelTypeAudio,
  402. sourceNodes.getUnchecked(i),
  403. sourceOutputChans.getUnchecked(i));
  404. if (sourceBufIndex >= 0
  405. && ! isBufferNeededLater (ourRenderingIndex,
  406. inputChan,
  407. sourceNodes.getUnchecked(i),
  408. sourceOutputChans.getUnchecked(i)))
  409. {
  410. // we've found one of our input chans that can be re-used..
  411. reusableInputIndex = i;
  412. bufIndex = sourceBufIndex;
  413. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (i));
  414. if (nodeDelay < maxLatency)
  415. renderingOps.add (new DelayChannelOp (sourceBufIndex, maxLatency - nodeDelay, false));
  416. break;
  417. }
  418. }
  419. if (reusableInputIndex < 0)
  420. {
  421. // can't re-use any of our input chans, so get a new one and copy everything into it..
  422. bufIndex = getFreeBuffer (AudioProcessor::ChannelTypeAudio);
  423. wassert (bufIndex != 0);
  424. const int srcIndex = getBufferContaining (AudioProcessor::ChannelTypeAudio,
  425. sourceNodes.getUnchecked (0),
  426. sourceOutputChans.getUnchecked (0));
  427. if (srcIndex < 0)
  428. {
  429. // if not found, this is probably a feedback loop
  430. renderingOps.add (new ClearChannelOp (bufIndex, false));
  431. }
  432. else
  433. {
  434. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex, false));
  435. }
  436. reusableInputIndex = 0;
  437. const int nodeDelay = getNodeDelay (sourceNodes.getFirst());
  438. if (nodeDelay < maxLatency)
  439. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay, false));
  440. }
  441. for (int j = 0; j < sourceNodes.size(); ++j)
  442. {
  443. if (j != reusableInputIndex)
  444. {
  445. int srcIndex = getBufferContaining (AudioProcessor::ChannelTypeAudio,
  446. sourceNodes.getUnchecked(j),
  447. sourceOutputChans.getUnchecked(j));
  448. if (srcIndex >= 0)
  449. {
  450. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (j));
  451. if (nodeDelay < maxLatency)
  452. {
  453. if (! isBufferNeededLater (ourRenderingIndex, inputChan,
  454. sourceNodes.getUnchecked(j),
  455. sourceOutputChans.getUnchecked(j)))
  456. {
  457. renderingOps.add (new DelayChannelOp (srcIndex, maxLatency - nodeDelay, false));
  458. }
  459. else // buffer is reused elsewhere, can't be delayed
  460. {
  461. const int bufferToDelay = getFreeBuffer (AudioProcessor::ChannelTypeAudio);
  462. renderingOps.add (new CopyChannelOp (srcIndex, bufferToDelay, false));
  463. renderingOps.add (new DelayChannelOp (bufferToDelay, maxLatency - nodeDelay, false));
  464. srcIndex = bufferToDelay;
  465. }
  466. }
  467. renderingOps.add (new AddChannelOp (srcIndex, bufIndex, false));
  468. }
  469. }
  470. }
  471. }
  472. CARLA_SAFE_ASSERT_CONTINUE (bufIndex >= 0);
  473. audioChannelsToUse.add (bufIndex);
  474. if (inputChan < numAudioOuts)
  475. markBufferAsContaining (AudioProcessor::ChannelTypeAudio, bufIndex, node.nodeId, inputChan);
  476. }
  477. for (uint outputChan = numAudioIns; outputChan < numAudioOuts; ++outputChan)
  478. {
  479. const int bufIndex = getFreeBuffer (AudioProcessor::ChannelTypeAudio);
  480. CARLA_SAFE_ASSERT_CONTINUE (bufIndex > 0);
  481. audioChannelsToUse.add (bufIndex);
  482. markBufferAsContaining (AudioProcessor::ChannelTypeAudio, bufIndex, node.nodeId, outputChan);
  483. }
  484. for (uint inputChan = 0; inputChan < numCVIns; ++inputChan)
  485. {
  486. // get a list of all the inputs to this node
  487. Array<uint32> sourceNodes;
  488. Array<uint> sourceOutputChans;
  489. for (int i = graph.getNumConnections(); --i >= 0;)
  490. {
  491. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  492. if (c->destNodeId == node.nodeId
  493. && c->destChannelIndex == inputChan
  494. && c->channelType == AudioProcessor::ChannelTypeCV)
  495. {
  496. sourceNodes.add (c->sourceNodeId);
  497. sourceOutputChans.add (c->sourceChannelIndex);
  498. }
  499. }
  500. int bufIndex = -1;
  501. if (sourceNodes.size() == 0)
  502. {
  503. // unconnected input channel
  504. bufIndex = getReadOnlyEmptyBuffer();
  505. wassert (bufIndex >= 0);
  506. }
  507. else if (sourceNodes.size() == 1)
  508. {
  509. // channel with a straightforward single input..
  510. const uint32 srcNode = sourceNodes.getUnchecked(0);
  511. const uint srcChan = sourceOutputChans.getUnchecked(0);
  512. bufIndex = getBufferContaining (AudioProcessor::ChannelTypeCV, srcNode, srcChan);
  513. if (bufIndex < 0)
  514. {
  515. // if not found, this is probably a feedback loop
  516. bufIndex = getReadOnlyEmptyBuffer();
  517. wassert (bufIndex >= 0);
  518. }
  519. const int newFreeBuffer = getFreeBuffer (AudioProcessor::ChannelTypeCV);
  520. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer, true));
  521. bufIndex = newFreeBuffer;
  522. const int nodeDelay = getNodeDelay (srcNode);
  523. if (nodeDelay < maxLatency)
  524. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay, true));
  525. }
  526. else
  527. {
  528. // channel with a mix of several inputs..
  529. {
  530. bufIndex = getFreeBuffer (AudioProcessor::ChannelTypeCV);
  531. wassert (bufIndex != 0);
  532. const int srcIndex = getBufferContaining (AudioProcessor::ChannelTypeCV,
  533. sourceNodes.getUnchecked (0),
  534. sourceOutputChans.getUnchecked (0));
  535. if (srcIndex < 0)
  536. {
  537. // if not found, this is probably a feedback loop
  538. renderingOps.add (new ClearChannelOp (bufIndex, true));
  539. }
  540. else
  541. {
  542. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex, true));
  543. }
  544. const int nodeDelay = getNodeDelay (sourceNodes.getFirst());
  545. if (nodeDelay < maxLatency)
  546. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay, true));
  547. }
  548. for (int j = 1; j < sourceNodes.size(); ++j)
  549. {
  550. int srcIndex = getBufferContaining (AudioProcessor::ChannelTypeCV,
  551. sourceNodes.getUnchecked(j),
  552. sourceOutputChans.getUnchecked(j));
  553. if (srcIndex >= 0)
  554. {
  555. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (j));
  556. if (nodeDelay < maxLatency)
  557. {
  558. const int bufferToDelay = getFreeBuffer (AudioProcessor::ChannelTypeCV);
  559. renderingOps.add (new CopyChannelOp (srcIndex, bufferToDelay, true));
  560. renderingOps.add (new DelayChannelOp (bufferToDelay, maxLatency - nodeDelay, true));
  561. srcIndex = bufferToDelay;
  562. }
  563. renderingOps.add (new AddChannelOp (srcIndex, bufIndex, true));
  564. }
  565. }
  566. }
  567. CARLA_SAFE_ASSERT_CONTINUE (bufIndex >= 0);
  568. cvInChannelsToUse.add (bufIndex);
  569. markBufferAsContaining (AudioProcessor::ChannelTypeCV, bufIndex, node.nodeId, inputChan);
  570. }
  571. for (uint outputChan = 0; outputChan < numCVOuts; ++outputChan)
  572. {
  573. const int bufIndex = getFreeBuffer (AudioProcessor::ChannelTypeCV);
  574. CARLA_SAFE_ASSERT_CONTINUE (bufIndex > 0);
  575. cvOutChannelsToUse.add (bufIndex);
  576. markBufferAsContaining (AudioProcessor::ChannelTypeCV, bufIndex, node.nodeId, outputChan);
  577. }
  578. // Now the same thing for midi..
  579. Array<uint32> midiSourceNodes;
  580. for (int i = graph.getNumConnections(); --i >= 0;)
  581. {
  582. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  583. if (c->destNodeId == node.nodeId && c->channelType == AudioProcessor::ChannelTypeMIDI)
  584. midiSourceNodes.add (c->sourceNodeId);
  585. }
  586. if (midiSourceNodes.size() == 0)
  587. {
  588. // No midi inputs..
  589. midiBufferToUse = getFreeBuffer (AudioProcessor::ChannelTypeMIDI); // need to pick a buffer even if the processor doesn't use midi
  590. if (processor.acceptsMidi() || processor.producesMidi())
  591. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  592. }
  593. else if (midiSourceNodes.size() == 1)
  594. {
  595. // One midi input..
  596. midiBufferToUse = getBufferContaining (AudioProcessor::ChannelTypeMIDI,
  597. midiSourceNodes.getUnchecked(0),
  598. 0);
  599. if (midiBufferToUse >= 0)
  600. {
  601. if (isBufferNeededLater (ourRenderingIndex,
  602. AudioProcessorGraph::midiChannelIndex,
  603. midiSourceNodes.getUnchecked(0),
  604. AudioProcessorGraph::midiChannelIndex))
  605. {
  606. // can't mess up this channel because it's needed later by another node, so we
  607. // need to use a copy of it..
  608. const int newFreeBuffer = getFreeBuffer (AudioProcessor::ChannelTypeMIDI);
  609. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  610. midiBufferToUse = newFreeBuffer;
  611. }
  612. }
  613. else
  614. {
  615. // probably a feedback loop, so just use an empty one..
  616. midiBufferToUse = getFreeBuffer (AudioProcessor::ChannelTypeMIDI); // need to pick a buffer even if the processor doesn't use midi
  617. }
  618. }
  619. else
  620. {
  621. // More than one midi input being mixed..
  622. int reusableInputIndex = -1;
  623. for (int i = 0; i < midiSourceNodes.size(); ++i)
  624. {
  625. const int sourceBufIndex = getBufferContaining (AudioProcessor::ChannelTypeMIDI,
  626. midiSourceNodes.getUnchecked(i),
  627. 0);
  628. if (sourceBufIndex >= 0
  629. && ! isBufferNeededLater (ourRenderingIndex,
  630. AudioProcessorGraph::midiChannelIndex,
  631. midiSourceNodes.getUnchecked(i),
  632. AudioProcessorGraph::midiChannelIndex))
  633. {
  634. // we've found one of our input buffers that can be re-used..
  635. reusableInputIndex = i;
  636. midiBufferToUse = sourceBufIndex;
  637. break;
  638. }
  639. }
  640. if (reusableInputIndex < 0)
  641. {
  642. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  643. midiBufferToUse = getFreeBuffer (AudioProcessor::ChannelTypeMIDI);
  644. wassert (midiBufferToUse >= 0);
  645. const int srcIndex = getBufferContaining (AudioProcessor::ChannelTypeMIDI,
  646. midiSourceNodes.getUnchecked(0),
  647. 0);
  648. if (srcIndex >= 0)
  649. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  650. else
  651. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  652. reusableInputIndex = 0;
  653. }
  654. for (int j = 0; j < midiSourceNodes.size(); ++j)
  655. {
  656. if (j != reusableInputIndex)
  657. {
  658. const int srcIndex = getBufferContaining (AudioProcessor::ChannelTypeMIDI,
  659. midiSourceNodes.getUnchecked(j),
  660. 0);
  661. if (srcIndex >= 0)
  662. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  663. }
  664. }
  665. }
  666. if (processor.producesMidi())
  667. markBufferAsContaining (AudioProcessor::ChannelTypeMIDI,
  668. midiBufferToUse, node.nodeId,
  669. 0);
  670. setNodeDelay (node.nodeId, maxLatency + processor.getLatencySamples());
  671. if (numAudioOuts == 0)
  672. totalLatency = maxLatency;
  673. renderingOps.add (new ProcessBufferOp (&node,
  674. audioChannelsToUse,
  675. totalAudioChans,
  676. cvInChannelsToUse,
  677. cvOutChannelsToUse,
  678. midiBufferToUse));
  679. }
  680. //==============================================================================
  681. int getFreeBuffer (const AudioProcessor::ChannelType channelType)
  682. {
  683. switch (channelType)
  684. {
  685. case AudioProcessor::ChannelTypeAudio:
  686. for (int i = 1; i < audioNodeIds.size(); ++i)
  687. if (audioNodeIds.getUnchecked(i) == freeNodeID)
  688. return i;
  689. audioNodeIds.add ((uint32) freeNodeID);
  690. audioChannels.add (0);
  691. return audioNodeIds.size() - 1;
  692. case AudioProcessor::ChannelTypeCV:
  693. for (int i = 1; i < cvNodeIds.size(); ++i)
  694. if (cvNodeIds.getUnchecked(i) == freeNodeID)
  695. return i;
  696. cvNodeIds.add ((uint32) freeNodeID);
  697. cvChannels.add (0);
  698. return cvNodeIds.size() - 1;
  699. case AudioProcessor::ChannelTypeMIDI:
  700. for (int i = 1; i < midiNodeIds.size(); ++i)
  701. if (midiNodeIds.getUnchecked(i) == freeNodeID)
  702. return i;
  703. midiNodeIds.add ((uint32) freeNodeID);
  704. return midiNodeIds.size() - 1;
  705. }
  706. return -1;
  707. }
  708. int getReadOnlyEmptyBuffer() const noexcept
  709. {
  710. return 0;
  711. }
  712. int getBufferContaining (const AudioProcessor::ChannelType channelType,
  713. const uint32 nodeId,
  714. const uint outputChannel) const noexcept
  715. {
  716. switch (channelType)
  717. {
  718. case AudioProcessor::ChannelTypeAudio:
  719. for (int i = audioNodeIds.size(); --i >= 0;)
  720. if (audioNodeIds.getUnchecked(i) == nodeId && audioChannels.getUnchecked(i) == outputChannel)
  721. return i;
  722. break;
  723. case AudioProcessor::ChannelTypeCV:
  724. for (int i = cvNodeIds.size(); --i >= 0;)
  725. if (cvNodeIds.getUnchecked(i) == nodeId && cvChannels.getUnchecked(i) == outputChannel)
  726. return i;
  727. break;
  728. case AudioProcessor::ChannelTypeMIDI:
  729. for (int i = midiNodeIds.size(); --i >= 0;)
  730. if (midiNodeIds.getUnchecked(i) == nodeId)
  731. return i;
  732. break;
  733. }
  734. return -1;
  735. }
  736. void markAnyUnusedBuffersAsFree (const int stepIndex)
  737. {
  738. for (int i = 0; i < audioNodeIds.size(); ++i)
  739. {
  740. if (isNodeBusy (audioNodeIds.getUnchecked(i))
  741. && ! isBufferNeededLater (stepIndex, -1,
  742. audioNodeIds.getUnchecked(i),
  743. audioChannels.getUnchecked(i)))
  744. {
  745. audioNodeIds.set (i, (uint32) freeNodeID);
  746. }
  747. }
  748. // NOTE: CV skipped on purpose
  749. for (int i = 0; i < midiNodeIds.size(); ++i)
  750. {
  751. if (isNodeBusy (midiNodeIds.getUnchecked(i))
  752. && ! isBufferNeededLater (stepIndex, -1,
  753. midiNodeIds.getUnchecked(i),
  754. AudioProcessorGraph::midiChannelIndex))
  755. {
  756. midiNodeIds.set (i, (uint32) freeNodeID);
  757. }
  758. }
  759. }
  760. bool isBufferNeededLater (int stepIndexToSearchFrom,
  761. uint inputChannelOfIndexToIgnore,
  762. const uint32 nodeId,
  763. const uint outputChanIndex) const
  764. {
  765. while (stepIndexToSearchFrom < orderedNodes.size())
  766. {
  767. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  768. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  769. {
  770. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  771. && graph.getConnectionBetween (AudioProcessor::ChannelTypeAudio,
  772. nodeId, AudioProcessorGraph::midiChannelIndex,
  773. node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr)
  774. return true;
  775. }
  776. else
  777. {
  778. for (uint i = 0; i < node->getProcessor()->getTotalNumInputChannels(AudioProcessor::ChannelTypeAudio); ++i)
  779. if (i != inputChannelOfIndexToIgnore
  780. && graph.getConnectionBetween (AudioProcessor::ChannelTypeAudio,
  781. nodeId, outputChanIndex,
  782. node->nodeId, i) != nullptr)
  783. return true;
  784. }
  785. inputChannelOfIndexToIgnore = (uint)-1;
  786. ++stepIndexToSearchFrom;
  787. }
  788. return false;
  789. }
  790. void markBufferAsContaining (const AudioProcessor::ChannelType channelType,
  791. int bufferNum, uint32 nodeId, int outputIndex)
  792. {
  793. switch (channelType)
  794. {
  795. case AudioProcessor::ChannelTypeAudio:
  796. CARLA_SAFE_ASSERT_BREAK (bufferNum >= 0 && bufferNum < audioNodeIds.size());
  797. audioNodeIds.set (bufferNum, nodeId);
  798. audioChannels.set (bufferNum, outputIndex);
  799. break;
  800. case AudioProcessor::ChannelTypeCV:
  801. CARLA_SAFE_ASSERT_BREAK (bufferNum >= 0 && bufferNum < cvNodeIds.size());
  802. cvNodeIds.set (bufferNum, nodeId);
  803. cvChannels.set (bufferNum, outputIndex);
  804. break;
  805. case AudioProcessor::ChannelTypeMIDI:
  806. CARLA_SAFE_ASSERT_BREAK (bufferNum > 0 && bufferNum < midiNodeIds.size());
  807. midiNodeIds.set (bufferNum, nodeId);
  808. break;
  809. }
  810. }
  811. CARLA_DECLARE_NON_COPY_CLASS (RenderingOpSequenceCalculator)
  812. };
  813. //==============================================================================
  814. // Holds a fast lookup table for checking which nodes are inputs to others.
  815. class ConnectionLookupTable
  816. {
  817. public:
  818. explicit ConnectionLookupTable (const OwnedArray<AudioProcessorGraph::Connection>& connections)
  819. {
  820. for (int i = 0; i < static_cast<int>(connections.size()); ++i)
  821. {
  822. const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i);
  823. int index;
  824. Entry* entry = findEntry (c->destNodeId, index);
  825. if (entry == nullptr)
  826. {
  827. entry = new Entry (c->destNodeId);
  828. entries.insert (index, entry);
  829. }
  830. entry->srcNodes.add (c->sourceNodeId);
  831. }
  832. }
  833. bool isAnInputTo (const uint32 possibleInputId,
  834. const uint32 possibleDestinationId) const noexcept
  835. {
  836. return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size());
  837. }
  838. private:
  839. //==============================================================================
  840. struct Entry
  841. {
  842. explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {}
  843. const uint32 destNodeId;
  844. SortedSet<uint32> srcNodes;
  845. CARLA_DECLARE_NON_COPY_CLASS (Entry)
  846. };
  847. OwnedArray<Entry> entries;
  848. bool isAnInputToRecursive (const uint32 possibleInputId,
  849. const uint32 possibleDestinationId,
  850. int recursionCheck) const noexcept
  851. {
  852. int index;
  853. if (const Entry* const entry = findEntry (possibleDestinationId, index))
  854. {
  855. const SortedSet<uint32>& srcNodes = entry->srcNodes;
  856. if (srcNodes.contains (possibleInputId))
  857. return true;
  858. if (--recursionCheck >= 0)
  859. {
  860. for (int i = 0; i < srcNodes.size(); ++i)
  861. if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck))
  862. return true;
  863. }
  864. }
  865. return false;
  866. }
  867. Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept
  868. {
  869. Entry* result = nullptr;
  870. int start = 0;
  871. int end = entries.size();
  872. for (;;)
  873. {
  874. if (start >= end)
  875. {
  876. break;
  877. }
  878. else if (destNodeId == entries.getUnchecked (start)->destNodeId)
  879. {
  880. result = entries.getUnchecked (start);
  881. break;
  882. }
  883. else
  884. {
  885. const int halfway = (start + end) / 2;
  886. if (halfway == start)
  887. {
  888. if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  889. ++start;
  890. break;
  891. }
  892. else if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  893. start = halfway;
  894. else
  895. end = halfway;
  896. }
  897. }
  898. insertIndex = start;
  899. return result;
  900. }
  901. CARLA_DECLARE_NON_COPY_CLASS (ConnectionLookupTable)
  902. };
  903. //==============================================================================
  904. struct ConnectionSorter
  905. {
  906. static int compareElements (const AudioProcessorGraph::Connection* const first,
  907. const AudioProcessorGraph::Connection* const second) noexcept
  908. {
  909. if (first->sourceNodeId < second->sourceNodeId) return -1;
  910. if (first->sourceNodeId > second->sourceNodeId) return 1;
  911. if (first->destNodeId < second->destNodeId) return -1;
  912. if (first->destNodeId > second->destNodeId) return 1;
  913. if (first->sourceChannelIndex < second->sourceChannelIndex) return -1;
  914. if (first->sourceChannelIndex > second->sourceChannelIndex) return 1;
  915. if (first->destChannelIndex < second->destChannelIndex) return -1;
  916. if (first->destChannelIndex > second->destChannelIndex) return 1;
  917. return 0;
  918. }
  919. };
  920. }
  921. //==============================================================================
  922. AudioProcessorGraph::Connection::Connection (ChannelType ct,
  923. const uint32 sourceID, const uint sourceChannel,
  924. const uint32 destID, const uint destChannel) noexcept
  925. : channelType (ct),
  926. sourceNodeId (sourceID), sourceChannelIndex (sourceChannel),
  927. destNodeId (destID), destChannelIndex (destChannel)
  928. {
  929. }
  930. //==============================================================================
  931. AudioProcessorGraph::Node::Node (const uint32 nodeID, AudioProcessor* const p) noexcept
  932. : nodeId (nodeID), processor (p), isPrepared (false)
  933. {
  934. wassert (processor != nullptr);
  935. }
  936. void AudioProcessorGraph::Node::prepare (const double newSampleRate, const int newBlockSize,
  937. AudioProcessorGraph* const graph)
  938. {
  939. if (! isPrepared)
  940. {
  941. isPrepared = true;
  942. setParentGraph (graph);
  943. processor->setRateAndBufferSizeDetails (newSampleRate, newBlockSize);
  944. processor->prepareToPlay (newSampleRate, newBlockSize);
  945. }
  946. }
  947. void AudioProcessorGraph::Node::unprepare()
  948. {
  949. if (isPrepared)
  950. {
  951. isPrepared = false;
  952. processor->releaseResources();
  953. }
  954. }
  955. void AudioProcessorGraph::Node::setParentGraph (AudioProcessorGraph* const graph) const
  956. {
  957. if (AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  958. = dynamic_cast<AudioProcessorGraph::AudioGraphIOProcessor*> (processor.get()))
  959. ioProc->setParentGraph (graph);
  960. }
  961. //==============================================================================
  962. struct AudioProcessorGraph::AudioProcessorGraphBufferHelpers
  963. {
  964. AudioProcessorGraphBufferHelpers() noexcept
  965. : currentAudioInputBuffer (nullptr) {}
  966. void setRenderingBufferSize (int newNumAudioChannels, int newNumCVChannels, int newNumSamples) noexcept
  967. {
  968. renderingAudioBuffers.setSize (newNumAudioChannels, newNumSamples);
  969. renderingAudioBuffers.clear();
  970. renderingCVBuffers.setSize (newNumCVChannels, newNumSamples);
  971. renderingCVBuffers.clear();
  972. }
  973. void release() noexcept
  974. {
  975. renderingAudioBuffers.setSize (1, 1);
  976. currentAudioInputBuffer = nullptr;
  977. currentAudioOutputBuffer.setSize (1, 1);
  978. renderingCVBuffers.setSize (1, 1);
  979. }
  980. void prepareInOutBuffers(int newNumChannels, int newNumSamples) noexcept
  981. {
  982. currentAudioInputBuffer = nullptr;
  983. currentAudioOutputBuffer.setSize (newNumChannels, newNumSamples);
  984. }
  985. AudioSampleBuffer renderingAudioBuffers;
  986. AudioSampleBuffer renderingCVBuffers;
  987. AudioSampleBuffer* currentAudioInputBuffer;
  988. AudioSampleBuffer currentAudioOutputBuffer;
  989. };
  990. //==============================================================================
  991. AudioProcessorGraph::AudioProcessorGraph()
  992. : lastNodeId (0), audioBuffers (new AudioProcessorGraphBufferHelpers),
  993. currentMidiInputBuffer (nullptr), isPrepared (false), needsReorder (false)
  994. {
  995. }
  996. AudioProcessorGraph::~AudioProcessorGraph()
  997. {
  998. clearRenderingSequence();
  999. clear();
  1000. }
  1001. const String AudioProcessorGraph::getName() const
  1002. {
  1003. return "Audio Graph";
  1004. }
  1005. //==============================================================================
  1006. void AudioProcessorGraph::clear()
  1007. {
  1008. nodes.clear();
  1009. connections.clear();
  1010. needsReorder = true;
  1011. }
  1012. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const
  1013. {
  1014. for (int i = nodes.size(); --i >= 0;)
  1015. if (nodes.getUnchecked(i)->nodeId == nodeId)
  1016. return nodes.getUnchecked(i);
  1017. return nullptr;
  1018. }
  1019. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, uint32 nodeId)
  1020. {
  1021. CARLA_SAFE_ASSERT_RETURN (newProcessor != nullptr && newProcessor != this, nullptr);
  1022. for (int i = nodes.size(); --i >= 0;)
  1023. {
  1024. CARLA_SAFE_ASSERT_RETURN (nodes.getUnchecked(i)->getProcessor() != newProcessor, nullptr);
  1025. }
  1026. if (nodeId == 0)
  1027. {
  1028. nodeId = ++lastNodeId;
  1029. }
  1030. else
  1031. {
  1032. // you can't add a node with an id that already exists in the graph..
  1033. CARLA_SAFE_ASSERT_RETURN (getNodeForId (nodeId) == nullptr, nullptr);
  1034. removeNode (nodeId);
  1035. if (nodeId > lastNodeId)
  1036. lastNodeId = nodeId;
  1037. }
  1038. Node* const n = new Node (nodeId, newProcessor);
  1039. nodes.add (n);
  1040. if (isPrepared)
  1041. needsReorder = true;
  1042. n->setParentGraph (this);
  1043. return n;
  1044. }
  1045. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  1046. {
  1047. disconnectNode (nodeId);
  1048. for (int i = nodes.size(); --i >= 0;)
  1049. {
  1050. if (nodes.getUnchecked(i)->nodeId == nodeId)
  1051. {
  1052. nodes.remove (i);
  1053. if (isPrepared)
  1054. needsReorder = true;
  1055. return true;
  1056. }
  1057. }
  1058. return false;
  1059. }
  1060. bool AudioProcessorGraph::removeNode (Node* node)
  1061. {
  1062. CARLA_SAFE_ASSERT_RETURN(node != nullptr, false);
  1063. return removeNode (node->nodeId);
  1064. }
  1065. //==============================================================================
  1066. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const ChannelType ct,
  1067. const uint32 sourceNodeId,
  1068. const uint sourceChannelIndex,
  1069. const uint32 destNodeId,
  1070. const uint destChannelIndex) const
  1071. {
  1072. const Connection c (ct, sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex);
  1073. GraphRenderingOps::ConnectionSorter sorter;
  1074. return connections [connections.indexOfSorted (sorter, &c)];
  1075. }
  1076. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  1077. const uint32 possibleDestNodeId) const
  1078. {
  1079. for (int i = connections.size(); --i >= 0;)
  1080. {
  1081. const Connection* const c = connections.getUnchecked(i);
  1082. if (c->sourceNodeId == possibleSourceNodeId
  1083. && c->destNodeId == possibleDestNodeId)
  1084. {
  1085. return true;
  1086. }
  1087. }
  1088. return false;
  1089. }
  1090. bool AudioProcessorGraph::canConnect (ChannelType ct,
  1091. const uint32 sourceNodeId,
  1092. const uint sourceChannelIndex,
  1093. const uint32 destNodeId,
  1094. const uint destChannelIndex) const
  1095. {
  1096. if (sourceNodeId == destNodeId)
  1097. return false;
  1098. if ((destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  1099. return false;
  1100. const Node* const source = getNodeForId (sourceNodeId);
  1101. if (source == nullptr
  1102. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getTotalNumOutputChannels(ct))
  1103. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  1104. return false;
  1105. const Node* const dest = getNodeForId (destNodeId);
  1106. if (dest == nullptr
  1107. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getTotalNumInputChannels(ct))
  1108. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  1109. return false;
  1110. return getConnectionBetween (ct,
  1111. sourceNodeId, sourceChannelIndex,
  1112. destNodeId, destChannelIndex) == nullptr;
  1113. }
  1114. bool AudioProcessorGraph::addConnection (const ChannelType ct,
  1115. const uint32 sourceNodeId,
  1116. const uint sourceChannelIndex,
  1117. const uint32 destNodeId,
  1118. const uint destChannelIndex)
  1119. {
  1120. if (! canConnect (ct, sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  1121. return false;
  1122. GraphRenderingOps::ConnectionSorter sorter;
  1123. connections.addSorted (sorter, new Connection (ct,
  1124. sourceNodeId, sourceChannelIndex,
  1125. destNodeId, destChannelIndex));
  1126. if (isPrepared)
  1127. needsReorder = true;
  1128. return true;
  1129. }
  1130. void AudioProcessorGraph::removeConnection (const int index)
  1131. {
  1132. connections.remove (index);
  1133. if (isPrepared)
  1134. needsReorder = true;
  1135. }
  1136. bool AudioProcessorGraph::removeConnection (const ChannelType ct,
  1137. const uint32 sourceNodeId, const uint sourceChannelIndex,
  1138. const uint32 destNodeId, const uint destChannelIndex)
  1139. {
  1140. bool doneAnything = false;
  1141. for (int i = connections.size(); --i >= 0;)
  1142. {
  1143. const Connection* const c = connections.getUnchecked(i);
  1144. if (c->channelType == ct
  1145. && c->sourceNodeId == sourceNodeId
  1146. && c->destNodeId == destNodeId
  1147. && c->sourceChannelIndex == sourceChannelIndex
  1148. && c->destChannelIndex == destChannelIndex)
  1149. {
  1150. removeConnection (i);
  1151. doneAnything = true;
  1152. }
  1153. }
  1154. return doneAnything;
  1155. }
  1156. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  1157. {
  1158. bool doneAnything = false;
  1159. for (int i = connections.size(); --i >= 0;)
  1160. {
  1161. const Connection* const c = connections.getUnchecked(i);
  1162. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  1163. {
  1164. removeConnection (i);
  1165. doneAnything = true;
  1166. }
  1167. }
  1168. return doneAnything;
  1169. }
  1170. bool AudioProcessorGraph::isConnectionLegal (const Connection* const c) const
  1171. {
  1172. CARLA_SAFE_ASSERT_RETURN (c != nullptr, false);
  1173. const Node* const source = getNodeForId (c->sourceNodeId);
  1174. const Node* const dest = getNodeForId (c->destNodeId);
  1175. return source != nullptr
  1176. && dest != nullptr
  1177. && (c->sourceChannelIndex != midiChannelIndex ? (c->sourceChannelIndex < source->processor->getTotalNumOutputChannels(c->channelType))
  1178. : source->processor->producesMidi())
  1179. && (c->destChannelIndex != midiChannelIndex ? (c->destChannelIndex < dest->processor->getTotalNumInputChannels(c->channelType))
  1180. : dest->processor->acceptsMidi());
  1181. }
  1182. bool AudioProcessorGraph::removeIllegalConnections()
  1183. {
  1184. bool doneAnything = false;
  1185. for (int i = connections.size(); --i >= 0;)
  1186. {
  1187. if (! isConnectionLegal (connections.getUnchecked(i)))
  1188. {
  1189. removeConnection (i);
  1190. doneAnything = true;
  1191. }
  1192. }
  1193. return doneAnything;
  1194. }
  1195. //==============================================================================
  1196. static void deleteRenderOpArray (Array<void*>& ops)
  1197. {
  1198. for (int i = ops.size(); --i >= 0;)
  1199. delete static_cast<GraphRenderingOps::AudioGraphRenderingOpBase*> (ops.getUnchecked(i));
  1200. }
  1201. void AudioProcessorGraph::clearRenderingSequence()
  1202. {
  1203. Array<void*> oldOps;
  1204. {
  1205. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1206. renderingOps.swapWith (oldOps);
  1207. }
  1208. deleteRenderOpArray (oldOps);
  1209. }
  1210. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  1211. const uint32 possibleDestinationId,
  1212. const int recursionCheck) const
  1213. {
  1214. if (recursionCheck > 0)
  1215. {
  1216. for (int i = connections.size(); --i >= 0;)
  1217. {
  1218. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  1219. if (c->destNodeId == possibleDestinationId
  1220. && (c->sourceNodeId == possibleInputId
  1221. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  1222. return true;
  1223. }
  1224. }
  1225. return false;
  1226. }
  1227. void AudioProcessorGraph::buildRenderingSequence()
  1228. {
  1229. Array<void*> newRenderingOps;
  1230. int numAudioRenderingBuffersNeeded = 2;
  1231. int numCVRenderingBuffersNeeded = 0;
  1232. int numMidiBuffersNeeded = 1;
  1233. {
  1234. const CarlaRecursiveMutexLocker cml (reorderMutex);
  1235. Array<Node*> orderedNodes;
  1236. {
  1237. const GraphRenderingOps::ConnectionLookupTable table (connections);
  1238. for (int i = 0; i < nodes.size(); ++i)
  1239. {
  1240. Node* const node = nodes.getUnchecked(i);
  1241. node->prepare (getSampleRate(), getBlockSize(), this);
  1242. int j = 0;
  1243. for (; j < orderedNodes.size(); ++j)
  1244. if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId))
  1245. break;
  1246. orderedNodes.insert (j, node);
  1247. }
  1248. }
  1249. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  1250. numAudioRenderingBuffersNeeded = calculator.getNumAudioBuffersNeeded();
  1251. numCVRenderingBuffersNeeded = calculator.getNumCVBuffersNeeded();
  1252. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  1253. }
  1254. {
  1255. // swap over to the new rendering sequence..
  1256. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1257. audioBuffers->setRenderingBufferSize (numAudioRenderingBuffersNeeded,
  1258. numCVRenderingBuffersNeeded,
  1259. getBlockSize());
  1260. for (int i = static_cast<int>(midiBuffers.size()); --i >= 0;)
  1261. midiBuffers.getUnchecked(i)->clear();
  1262. while (static_cast<int>(midiBuffers.size()) < numMidiBuffersNeeded)
  1263. midiBuffers.add (new MidiBuffer());
  1264. renderingOps.swapWith (newRenderingOps);
  1265. }
  1266. // delete the old ones..
  1267. deleteRenderOpArray (newRenderingOps);
  1268. }
  1269. //==============================================================================
  1270. void AudioProcessorGraph::prepareToPlay (double sampleRate, int estimatedSamplesPerBlock)
  1271. {
  1272. setRateAndBufferSizeDetails(sampleRate, estimatedSamplesPerBlock);
  1273. audioBuffers->prepareInOutBuffers(jmax(1U, getTotalNumOutputChannels(AudioProcessor::ChannelTypeAudio)), estimatedSamplesPerBlock);
  1274. currentMidiInputBuffer = nullptr;
  1275. currentMidiOutputBuffer.clear();
  1276. clearRenderingSequence();
  1277. buildRenderingSequence();
  1278. isPrepared = true;
  1279. }
  1280. void AudioProcessorGraph::releaseResources()
  1281. {
  1282. isPrepared = false;
  1283. for (int i = 0; i < nodes.size(); ++i)
  1284. nodes.getUnchecked(i)->unprepare();
  1285. audioBuffers->release();
  1286. midiBuffers.clear();
  1287. currentMidiInputBuffer = nullptr;
  1288. currentMidiOutputBuffer.clear();
  1289. }
  1290. void AudioProcessorGraph::reset()
  1291. {
  1292. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1293. for (int i = 0; i < nodes.size(); ++i)
  1294. nodes.getUnchecked(i)->getProcessor()->reset();
  1295. }
  1296. void AudioProcessorGraph::setNonRealtime (bool isProcessingNonRealtime) noexcept
  1297. {
  1298. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1299. AudioProcessor::setNonRealtime (isProcessingNonRealtime);
  1300. for (int i = 0; i < nodes.size(); ++i)
  1301. nodes.getUnchecked(i)->getProcessor()->setNonRealtime (isProcessingNonRealtime);
  1302. }
  1303. void AudioProcessorGraph::processAudio (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1304. {
  1305. AudioSampleBuffer*& currentAudioInputBuffer = audioBuffers->currentAudioInputBuffer;
  1306. AudioSampleBuffer& currentAudioOutputBuffer = audioBuffers->currentAudioOutputBuffer;
  1307. AudioSampleBuffer& renderingAudioBuffers = audioBuffers->renderingAudioBuffers;
  1308. AudioSampleBuffer& renderingCVBuffers = audioBuffers->renderingCVBuffers;
  1309. const int numSamples = buffer.getNumSamples();
  1310. if (! audioBuffers->currentAudioOutputBuffer.setSizeRT(numSamples))
  1311. return;
  1312. if (! audioBuffers->renderingAudioBuffers.setSizeRT(numSamples))
  1313. return;
  1314. if (! audioBuffers->renderingCVBuffers.setSizeRT(numSamples))
  1315. return;
  1316. currentAudioInputBuffer = &buffer;
  1317. currentAudioOutputBuffer.clear();
  1318. currentMidiInputBuffer = &midiMessages;
  1319. currentMidiOutputBuffer.clear();
  1320. for (int i = 0; i < renderingOps.size(); ++i)
  1321. {
  1322. GraphRenderingOps::AudioGraphRenderingOpBase* const op
  1323. = (GraphRenderingOps::AudioGraphRenderingOpBase*) renderingOps.getUnchecked(i);
  1324. op->perform (renderingAudioBuffers, renderingCVBuffers, midiBuffers, numSamples);
  1325. }
  1326. for (uint32_t i = 0; i < buffer.getNumChannels(); ++i)
  1327. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  1328. midiMessages.clear();
  1329. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  1330. }
  1331. bool AudioProcessorGraph::acceptsMidi() const { return true; }
  1332. bool AudioProcessorGraph::producesMidi() const { return true; }
  1333. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1334. {
  1335. processAudio (buffer, midiMessages);
  1336. }
  1337. void AudioProcessorGraph::processBlockWithCV (AudioSampleBuffer& buffer,
  1338. const AudioSampleBuffer&,
  1339. AudioSampleBuffer&,
  1340. MidiBuffer& midiMessages)
  1341. {
  1342. processAudio (buffer, midiMessages);
  1343. }
  1344. void AudioProcessorGraph::reorderNowIfNeeded()
  1345. {
  1346. if (needsReorder)
  1347. {
  1348. needsReorder = false;
  1349. buildRenderingSequence();
  1350. }
  1351. }
  1352. const CarlaRecursiveMutex& AudioProcessorGraph::getReorderMutex() const
  1353. {
  1354. return reorderMutex;
  1355. }
  1356. //==============================================================================
  1357. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType deviceType)
  1358. : type (deviceType), graph (nullptr)
  1359. {
  1360. }
  1361. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  1362. {
  1363. }
  1364. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  1365. {
  1366. switch (type)
  1367. {
  1368. case audioOutputNode: return "Audio Output";
  1369. case audioInputNode: return "Audio Input";
  1370. case cvOutputNode: return "CV Output";
  1371. case cvInputNode: return "CV Input";
  1372. case midiOutputNode: return "Midi Output";
  1373. case midiInputNode: return "Midi Input";
  1374. default: break;
  1375. }
  1376. return String();
  1377. }
  1378. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  1379. {
  1380. CARLA_SAFE_ASSERT (graph != nullptr);
  1381. }
  1382. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  1383. {
  1384. }
  1385. void AudioProcessorGraph::AudioGraphIOProcessor::processAudio (AudioSampleBuffer& buffer,
  1386. MidiBuffer& midiMessages)
  1387. {
  1388. CARLA_SAFE_ASSERT_RETURN(graph != nullptr,);
  1389. AudioSampleBuffer*& currentAudioInputBuffer =
  1390. graph->audioBuffers->currentAudioInputBuffer;
  1391. AudioSampleBuffer& currentAudioOutputBuffer =
  1392. graph->audioBuffers->currentAudioOutputBuffer;
  1393. switch (type)
  1394. {
  1395. case audioOutputNode:
  1396. {
  1397. for (int i = jmin (currentAudioOutputBuffer.getNumChannels(),
  1398. buffer.getNumChannels()); --i >= 0;)
  1399. {
  1400. currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  1401. }
  1402. break;
  1403. }
  1404. case audioInputNode:
  1405. {
  1406. for (int i = jmin (currentAudioInputBuffer->getNumChannels(),
  1407. buffer.getNumChannels()); --i >= 0;)
  1408. {
  1409. buffer.copyFrom (i, 0, *currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1410. }
  1411. break;
  1412. }
  1413. case midiOutputNode:
  1414. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1415. break;
  1416. case midiInputNode:
  1417. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1418. break;
  1419. default:
  1420. break;
  1421. }
  1422. }
  1423. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  1424. MidiBuffer& midiMessages)
  1425. {
  1426. processAudio (buffer, midiMessages);
  1427. }
  1428. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1429. {
  1430. return type == midiOutputNode;
  1431. }
  1432. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1433. {
  1434. return type == midiInputNode;
  1435. }
  1436. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const noexcept
  1437. {
  1438. return type == audioInputNode || type == cvInputNode || type == midiInputNode;
  1439. }
  1440. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const noexcept
  1441. {
  1442. return type == audioOutputNode || type == cvOutputNode || type == midiOutputNode;
  1443. }
  1444. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph)
  1445. {
  1446. graph = newGraph;
  1447. if (graph != nullptr)
  1448. {
  1449. setPlayConfigDetails (type == audioOutputNode
  1450. ? graph->getTotalNumOutputChannels(AudioProcessor::ChannelTypeAudio)
  1451. : 0,
  1452. type == audioInputNode
  1453. ? graph->getTotalNumInputChannels(AudioProcessor::ChannelTypeAudio)
  1454. : 0,
  1455. type == cvOutputNode
  1456. ? graph->getTotalNumOutputChannels(AudioProcessor::ChannelTypeCV)
  1457. : 0,
  1458. type == cvInputNode
  1459. ? graph->getTotalNumInputChannels(AudioProcessor::ChannelTypeCV)
  1460. : 0,
  1461. type == midiOutputNode
  1462. ? graph->getTotalNumOutputChannels(AudioProcessor::ChannelTypeMIDI)
  1463. : 0,
  1464. type == midiInputNode
  1465. ? graph->getTotalNumInputChannels(AudioProcessor::ChannelTypeMIDI)
  1466. : 0,
  1467. getSampleRate(),
  1468. getBlockSize());
  1469. }
  1470. }
  1471. }