Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1543 lines
53KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. #include "juce_AudioProcessorGraph.h"
  18. const int AudioProcessorGraph::midiChannelIndex = 0x1000;
  19. //==============================================================================
  20. namespace GraphRenderingOps
  21. {
  22. struct AudioGraphRenderingOpBase
  23. {
  24. AudioGraphRenderingOpBase() noexcept {}
  25. virtual ~AudioGraphRenderingOpBase() {}
  26. virtual void perform (AudioSampleBuffer& sharedBufferChans,
  27. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  28. const int numSamples) = 0;
  29. JUCE_LEAK_DETECTOR (AudioGraphRenderingOpBase)
  30. };
  31. // use CRTP
  32. template <class Child>
  33. struct AudioGraphRenderingOp : public AudioGraphRenderingOpBase
  34. {
  35. void perform (AudioSampleBuffer& sharedBufferChans,
  36. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  37. const int numSamples) override
  38. {
  39. static_cast<Child*> (this)->perform (sharedBufferChans, sharedMidiBuffers, numSamples);
  40. }
  41. };
  42. //==============================================================================
  43. struct ClearChannelOp : public AudioGraphRenderingOp<ClearChannelOp>
  44. {
  45. ClearChannelOp (const int channel) noexcept : channelNum (channel) {}
  46. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  47. {
  48. sharedBufferChans.clear (channelNum, 0, numSamples);
  49. }
  50. const int channelNum;
  51. JUCE_DECLARE_NON_COPYABLE (ClearChannelOp)
  52. };
  53. //==============================================================================
  54. struct CopyChannelOp : public AudioGraphRenderingOp<CopyChannelOp>
  55. {
  56. CopyChannelOp (const int srcChan, const int dstChan) noexcept
  57. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  58. {}
  59. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  60. {
  61. sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  62. }
  63. const int srcChannelNum, dstChannelNum;
  64. JUCE_DECLARE_NON_COPYABLE (CopyChannelOp)
  65. };
  66. //==============================================================================
  67. struct AddChannelOp : public AudioGraphRenderingOp<AddChannelOp>
  68. {
  69. AddChannelOp (const int srcChan, const int dstChan) noexcept
  70. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  71. {}
  72. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  73. {
  74. sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  75. }
  76. const int srcChannelNum, dstChannelNum;
  77. JUCE_DECLARE_NON_COPYABLE (AddChannelOp)
  78. };
  79. //==============================================================================
  80. struct ClearMidiBufferOp : public AudioGraphRenderingOp<ClearMidiBufferOp>
  81. {
  82. ClearMidiBufferOp (const int buffer) noexcept : bufferNum (buffer) {}
  83. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  84. {
  85. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  86. }
  87. const int bufferNum;
  88. JUCE_DECLARE_NON_COPYABLE (ClearMidiBufferOp)
  89. };
  90. //==============================================================================
  91. struct CopyMidiBufferOp : public AudioGraphRenderingOp<CopyMidiBufferOp>
  92. {
  93. CopyMidiBufferOp (const int srcBuffer, const int dstBuffer) noexcept
  94. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  95. {}
  96. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  97. {
  98. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  99. }
  100. const int srcBufferNum, dstBufferNum;
  101. JUCE_DECLARE_NON_COPYABLE (CopyMidiBufferOp)
  102. };
  103. //==============================================================================
  104. struct AddMidiBufferOp : public AudioGraphRenderingOp<AddMidiBufferOp>
  105. {
  106. AddMidiBufferOp (const int srcBuffer, const int dstBuffer)
  107. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  108. {}
  109. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  110. {
  111. sharedMidiBuffers.getUnchecked (dstBufferNum)
  112. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  113. }
  114. const int srcBufferNum, dstBufferNum;
  115. JUCE_DECLARE_NON_COPYABLE (AddMidiBufferOp)
  116. };
  117. //==============================================================================
  118. struct DelayChannelOp : public AudioGraphRenderingOp<DelayChannelOp>
  119. {
  120. DelayChannelOp (const int chan, const int delaySize)
  121. : channel (chan),
  122. bufferSize (delaySize + 1),
  123. readIndex (0), writeIndex (delaySize)
  124. {
  125. buffer.calloc ((size_t) bufferSize);
  126. }
  127. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  128. {
  129. float* data = sharedBufferChans.getWritePointer (channel, 0);
  130. HeapBlock<float>& block = buffer;
  131. for (int i = numSamples; --i >= 0;)
  132. {
  133. block [writeIndex] = *data;
  134. *data++ = block [readIndex];
  135. if (++readIndex >= bufferSize) readIndex = 0;
  136. if (++writeIndex >= bufferSize) writeIndex = 0;
  137. }
  138. }
  139. private:
  140. HeapBlock<float> buffer;
  141. const int channel, bufferSize;
  142. int readIndex, writeIndex;
  143. JUCE_DECLARE_NON_COPYABLE (DelayChannelOp)
  144. };
  145. //==============================================================================
  146. struct ProcessBufferOp : public AudioGraphRenderingOp<ProcessBufferOp>
  147. {
  148. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& n,
  149. const Array<int>& audioChannelsUsed,
  150. const int totalNumChans,
  151. const int midiBuffer)
  152. : node (n),
  153. processor (n->getProcessor()),
  154. audioChannelsToUse (audioChannelsUsed),
  155. totalChans (jmax (1, totalNumChans)),
  156. midiBufferToUse (midiBuffer)
  157. {
  158. audioChannels.calloc ((size_t) totalChans);
  159. while (audioChannelsToUse.size() < totalChans)
  160. audioChannelsToUse.add (0);
  161. }
  162. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  163. {
  164. HeapBlock<float*>& channels = audioChannels;
  165. for (int i = totalChans; --i >= 0;)
  166. channels[i] = sharedBufferChans.getWritePointer (audioChannelsToUse.getUnchecked (i), 0);
  167. AudioSampleBuffer buffer (channels, totalChans, numSamples);
  168. if (processor->isSuspended())
  169. {
  170. buffer.clear();
  171. }
  172. else
  173. {
  174. const CarlaRecursiveMutexLocker cml (processor->getCallbackLock());
  175. callProcess (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  176. }
  177. }
  178. void callProcess (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  179. {
  180. processor->processBlock (buffer, midiMessages);
  181. }
  182. const AudioProcessorGraph::Node::Ptr node;
  183. AudioProcessor* const processor;
  184. private:
  185. Array<int> audioChannelsToUse;
  186. HeapBlock<float*> audioChannels;
  187. AudioSampleBuffer tempBuffer;
  188. const int totalChans;
  189. const int midiBufferToUse;
  190. JUCE_DECLARE_NON_COPYABLE (ProcessBufferOp)
  191. };
  192. //==============================================================================
  193. /** Used to calculate the correct sequence of rendering ops needed, based on
  194. the best re-use of shared buffers at each stage.
  195. */
  196. struct RenderingOpSequenceCalculator
  197. {
  198. RenderingOpSequenceCalculator (AudioProcessorGraph& g,
  199. const Array<AudioProcessorGraph::Node*>& nodes,
  200. Array<void*>& renderingOps)
  201. : graph (g),
  202. orderedNodes (nodes),
  203. totalLatency (0)
  204. {
  205. nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros
  206. channels.add (0);
  207. midiNodeIds.add ((uint32) zeroNodeID);
  208. for (int i = 0; i < orderedNodes.size(); ++i)
  209. {
  210. createRenderingOpsForNode (*orderedNodes.getUnchecked(i), renderingOps, i);
  211. markAnyUnusedBuffersAsFree (i);
  212. }
  213. graph.setLatencySamples (totalLatency);
  214. }
  215. int getNumBuffersNeeded() const noexcept { return nodeIds.size(); }
  216. int getNumMidiBuffersNeeded() const noexcept { return midiNodeIds.size(); }
  217. private:
  218. //==============================================================================
  219. AudioProcessorGraph& graph;
  220. const Array<AudioProcessorGraph::Node*>& orderedNodes;
  221. Array<int> channels;
  222. Array<uint32> nodeIds, midiNodeIds;
  223. enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe };
  224. static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; }
  225. Array<uint32> nodeDelayIDs;
  226. Array<int> nodeDelays;
  227. int totalLatency;
  228. int getNodeDelay (const uint32 nodeID) const { return nodeDelays [nodeDelayIDs.indexOf (nodeID)]; }
  229. void setNodeDelay (const uint32 nodeID, const int latency)
  230. {
  231. const int index = nodeDelayIDs.indexOf (nodeID);
  232. if (index >= 0)
  233. {
  234. nodeDelays.set (index, latency);
  235. }
  236. else
  237. {
  238. nodeDelayIDs.add (nodeID);
  239. nodeDelays.add (latency);
  240. }
  241. }
  242. int getInputLatencyForNode (const uint32 nodeID) const
  243. {
  244. int maxLatency = 0;
  245. for (int i = graph.getNumConnections(); --i >= 0;)
  246. {
  247. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  248. if (c->destNodeId == nodeID)
  249. maxLatency = jmax (maxLatency, getNodeDelay (c->sourceNodeId));
  250. }
  251. return maxLatency;
  252. }
  253. //==============================================================================
  254. void createRenderingOpsForNode (AudioProcessorGraph::Node& node,
  255. Array<void*>& renderingOps,
  256. const int ourRenderingIndex)
  257. {
  258. AudioProcessor& processor = *node.getProcessor();
  259. const int numIns = processor.getTotalNumInputChannels();
  260. const int numOuts = processor.getTotalNumOutputChannels();
  261. const int totalChans = jmax (numIns, numOuts);
  262. Array<int> audioChannelsToUse;
  263. int midiBufferToUse = -1;
  264. int maxLatency = getInputLatencyForNode (node.nodeId);
  265. for (int inputChan = 0; inputChan < numIns; ++inputChan)
  266. {
  267. // get a list of all the inputs to this node
  268. Array<uint32> sourceNodes;
  269. Array<int> sourceOutputChans;
  270. for (int i = graph.getNumConnections(); --i >= 0;)
  271. {
  272. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  273. if (c->destNodeId == node.nodeId && c->destChannelIndex == inputChan)
  274. {
  275. sourceNodes.add (c->sourceNodeId);
  276. sourceOutputChans.add (c->sourceChannelIndex);
  277. }
  278. }
  279. int bufIndex = -1;
  280. if (sourceNodes.size() == 0)
  281. {
  282. // unconnected input channel
  283. if (inputChan >= numOuts)
  284. {
  285. bufIndex = getReadOnlyEmptyBuffer();
  286. jassert (bufIndex >= 0);
  287. }
  288. else
  289. {
  290. bufIndex = getFreeBuffer (false);
  291. renderingOps.add (new ClearChannelOp (bufIndex));
  292. }
  293. }
  294. else if (sourceNodes.size() == 1)
  295. {
  296. // channel with a straightforward single input..
  297. const uint32 srcNode = sourceNodes.getUnchecked(0);
  298. const int srcChan = sourceOutputChans.getUnchecked(0);
  299. bufIndex = getBufferContaining (srcNode, srcChan);
  300. if (bufIndex < 0)
  301. {
  302. // if not found, this is probably a feedback loop
  303. bufIndex = getReadOnlyEmptyBuffer();
  304. jassert (bufIndex >= 0);
  305. }
  306. if (inputChan < numOuts
  307. && isBufferNeededLater (ourRenderingIndex,
  308. inputChan,
  309. srcNode, srcChan))
  310. {
  311. // can't mess up this channel because it's needed later by another node, so we
  312. // need to use a copy of it..
  313. const int newFreeBuffer = getFreeBuffer (false);
  314. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer));
  315. bufIndex = newFreeBuffer;
  316. }
  317. const int nodeDelay = getNodeDelay (srcNode);
  318. if (nodeDelay < maxLatency)
  319. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  320. }
  321. else
  322. {
  323. // channel with a mix of several inputs..
  324. // try to find a re-usable channel from our inputs..
  325. int reusableInputIndex = -1;
  326. for (int i = 0; i < sourceNodes.size(); ++i)
  327. {
  328. const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i),
  329. sourceOutputChans.getUnchecked(i));
  330. if (sourceBufIndex >= 0
  331. && ! isBufferNeededLater (ourRenderingIndex,
  332. inputChan,
  333. sourceNodes.getUnchecked(i),
  334. sourceOutputChans.getUnchecked(i)))
  335. {
  336. // we've found one of our input chans that can be re-used..
  337. reusableInputIndex = i;
  338. bufIndex = sourceBufIndex;
  339. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (i));
  340. if (nodeDelay < maxLatency)
  341. renderingOps.add (new DelayChannelOp (sourceBufIndex, maxLatency - nodeDelay));
  342. break;
  343. }
  344. }
  345. if (reusableInputIndex < 0)
  346. {
  347. // can't re-use any of our input chans, so get a new one and copy everything into it..
  348. bufIndex = getFreeBuffer (false);
  349. jassert (bufIndex != 0);
  350. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0),
  351. sourceOutputChans.getUnchecked (0));
  352. if (srcIndex < 0)
  353. {
  354. // if not found, this is probably a feedback loop
  355. renderingOps.add (new ClearChannelOp (bufIndex));
  356. }
  357. else
  358. {
  359. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex));
  360. }
  361. reusableInputIndex = 0;
  362. const int nodeDelay = getNodeDelay (sourceNodes.getFirst());
  363. if (nodeDelay < maxLatency)
  364. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  365. }
  366. for (int j = 0; j < sourceNodes.size(); ++j)
  367. {
  368. if (j != reusableInputIndex)
  369. {
  370. int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j),
  371. sourceOutputChans.getUnchecked(j));
  372. if (srcIndex >= 0)
  373. {
  374. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (j));
  375. if (nodeDelay < maxLatency)
  376. {
  377. if (! isBufferNeededLater (ourRenderingIndex, inputChan,
  378. sourceNodes.getUnchecked(j),
  379. sourceOutputChans.getUnchecked(j)))
  380. {
  381. renderingOps.add (new DelayChannelOp (srcIndex, maxLatency - nodeDelay));
  382. }
  383. else // buffer is reused elsewhere, can't be delayed
  384. {
  385. const int bufferToDelay = getFreeBuffer (false);
  386. renderingOps.add (new CopyChannelOp (srcIndex, bufferToDelay));
  387. renderingOps.add (new DelayChannelOp (bufferToDelay, maxLatency - nodeDelay));
  388. srcIndex = bufferToDelay;
  389. }
  390. }
  391. renderingOps.add (new AddChannelOp (srcIndex, bufIndex));
  392. }
  393. }
  394. }
  395. }
  396. jassert (bufIndex >= 0);
  397. audioChannelsToUse.add (bufIndex);
  398. if (inputChan < numOuts)
  399. markBufferAsContaining (bufIndex, node.nodeId, inputChan);
  400. }
  401. for (int outputChan = numIns; outputChan < numOuts; ++outputChan)
  402. {
  403. const int bufIndex = getFreeBuffer (false);
  404. jassert (bufIndex != 0);
  405. audioChannelsToUse.add (bufIndex);
  406. markBufferAsContaining (bufIndex, node.nodeId, outputChan);
  407. }
  408. // Now the same thing for midi..
  409. Array<uint32> midiSourceNodes;
  410. for (int i = graph.getNumConnections(); --i >= 0;)
  411. {
  412. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  413. if (c->destNodeId == node.nodeId && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex)
  414. midiSourceNodes.add (c->sourceNodeId);
  415. }
  416. if (midiSourceNodes.size() == 0)
  417. {
  418. // No midi inputs..
  419. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  420. if (processor.acceptsMidi() || processor.producesMidi())
  421. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  422. }
  423. else if (midiSourceNodes.size() == 1)
  424. {
  425. // One midi input..
  426. midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0),
  427. AudioProcessorGraph::midiChannelIndex);
  428. if (midiBufferToUse >= 0)
  429. {
  430. if (isBufferNeededLater (ourRenderingIndex,
  431. AudioProcessorGraph::midiChannelIndex,
  432. midiSourceNodes.getUnchecked(0),
  433. AudioProcessorGraph::midiChannelIndex))
  434. {
  435. // can't mess up this channel because it's needed later by another node, so we
  436. // need to use a copy of it..
  437. const int newFreeBuffer = getFreeBuffer (true);
  438. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  439. midiBufferToUse = newFreeBuffer;
  440. }
  441. }
  442. else
  443. {
  444. // probably a feedback loop, so just use an empty one..
  445. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  446. }
  447. }
  448. else
  449. {
  450. // More than one midi input being mixed..
  451. int reusableInputIndex = -1;
  452. for (int i = 0; i < midiSourceNodes.size(); ++i)
  453. {
  454. const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i),
  455. AudioProcessorGraph::midiChannelIndex);
  456. if (sourceBufIndex >= 0
  457. && ! isBufferNeededLater (ourRenderingIndex,
  458. AudioProcessorGraph::midiChannelIndex,
  459. midiSourceNodes.getUnchecked(i),
  460. AudioProcessorGraph::midiChannelIndex))
  461. {
  462. // we've found one of our input buffers that can be re-used..
  463. reusableInputIndex = i;
  464. midiBufferToUse = sourceBufIndex;
  465. break;
  466. }
  467. }
  468. if (reusableInputIndex < 0)
  469. {
  470. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  471. midiBufferToUse = getFreeBuffer (true);
  472. jassert (midiBufferToUse >= 0);
  473. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0),
  474. AudioProcessorGraph::midiChannelIndex);
  475. if (srcIndex >= 0)
  476. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  477. else
  478. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  479. reusableInputIndex = 0;
  480. }
  481. for (int j = 0; j < midiSourceNodes.size(); ++j)
  482. {
  483. if (j != reusableInputIndex)
  484. {
  485. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j),
  486. AudioProcessorGraph::midiChannelIndex);
  487. if (srcIndex >= 0)
  488. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  489. }
  490. }
  491. }
  492. if (processor.producesMidi())
  493. markBufferAsContaining (midiBufferToUse, node.nodeId,
  494. AudioProcessorGraph::midiChannelIndex);
  495. setNodeDelay (node.nodeId, maxLatency + processor.getLatencySamples());
  496. if (numOuts == 0)
  497. totalLatency = maxLatency;
  498. renderingOps.add (new ProcessBufferOp (&node, audioChannelsToUse,
  499. totalChans, midiBufferToUse));
  500. }
  501. //==============================================================================
  502. int getFreeBuffer (const bool forMidi)
  503. {
  504. if (forMidi)
  505. {
  506. for (int i = 1; i < midiNodeIds.size(); ++i)
  507. if (midiNodeIds.getUnchecked(i) == freeNodeID)
  508. return i;
  509. midiNodeIds.add ((uint32) freeNodeID);
  510. return midiNodeIds.size() - 1;
  511. }
  512. else
  513. {
  514. for (int i = 1; i < nodeIds.size(); ++i)
  515. if (nodeIds.getUnchecked(i) == freeNodeID)
  516. return i;
  517. nodeIds.add ((uint32) freeNodeID);
  518. channels.add (0);
  519. return nodeIds.size() - 1;
  520. }
  521. }
  522. int getReadOnlyEmptyBuffer() const noexcept
  523. {
  524. return 0;
  525. }
  526. int getBufferContaining (const uint32 nodeId, const int outputChannel) const noexcept
  527. {
  528. if (outputChannel == AudioProcessorGraph::midiChannelIndex)
  529. {
  530. for (int i = midiNodeIds.size(); --i >= 0;)
  531. if (midiNodeIds.getUnchecked(i) == nodeId)
  532. return i;
  533. }
  534. else
  535. {
  536. for (int i = nodeIds.size(); --i >= 0;)
  537. if (nodeIds.getUnchecked(i) == nodeId
  538. && channels.getUnchecked(i) == outputChannel)
  539. return i;
  540. }
  541. return -1;
  542. }
  543. void markAnyUnusedBuffersAsFree (const int stepIndex)
  544. {
  545. for (int i = 0; i < nodeIds.size(); ++i)
  546. {
  547. if (isNodeBusy (nodeIds.getUnchecked(i))
  548. && ! isBufferNeededLater (stepIndex, -1,
  549. nodeIds.getUnchecked(i),
  550. channels.getUnchecked(i)))
  551. {
  552. nodeIds.set (i, (uint32) freeNodeID);
  553. }
  554. }
  555. for (int i = 0; i < midiNodeIds.size(); ++i)
  556. {
  557. if (isNodeBusy (midiNodeIds.getUnchecked(i))
  558. && ! isBufferNeededLater (stepIndex, -1,
  559. midiNodeIds.getUnchecked(i),
  560. AudioProcessorGraph::midiChannelIndex))
  561. {
  562. midiNodeIds.set (i, (uint32) freeNodeID);
  563. }
  564. }
  565. }
  566. bool isBufferNeededLater (int stepIndexToSearchFrom,
  567. int inputChannelOfIndexToIgnore,
  568. const uint32 nodeId,
  569. const int outputChanIndex) const
  570. {
  571. while (stepIndexToSearchFrom < orderedNodes.size())
  572. {
  573. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  574. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  575. {
  576. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  577. && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex,
  578. node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr)
  579. return true;
  580. }
  581. else
  582. {
  583. for (int i = 0; i < node->getProcessor()->getTotalNumInputChannels(); ++i)
  584. if (i != inputChannelOfIndexToIgnore
  585. && graph.getConnectionBetween (nodeId, outputChanIndex,
  586. node->nodeId, i) != nullptr)
  587. return true;
  588. }
  589. inputChannelOfIndexToIgnore = -1;
  590. ++stepIndexToSearchFrom;
  591. }
  592. return false;
  593. }
  594. void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex)
  595. {
  596. if (outputIndex == AudioProcessorGraph::midiChannelIndex)
  597. {
  598. jassert (bufferNum > 0 && bufferNum < midiNodeIds.size());
  599. midiNodeIds.set (bufferNum, nodeId);
  600. }
  601. else
  602. {
  603. jassert (bufferNum >= 0 && bufferNum < nodeIds.size());
  604. nodeIds.set (bufferNum, nodeId);
  605. channels.set (bufferNum, outputIndex);
  606. }
  607. }
  608. JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (RenderingOpSequenceCalculator)
  609. };
  610. //==============================================================================
  611. // Holds a fast lookup table for checking which nodes are inputs to others.
  612. class ConnectionLookupTable
  613. {
  614. public:
  615. explicit ConnectionLookupTable (const OwnedArray<AudioProcessorGraph::Connection>& connections)
  616. {
  617. for (int i = 0; i < connections.size(); ++i)
  618. {
  619. const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i);
  620. int index;
  621. Entry* entry = findEntry (c->destNodeId, index);
  622. if (entry == nullptr)
  623. {
  624. entry = new Entry (c->destNodeId);
  625. entries.insert (index, entry);
  626. }
  627. entry->srcNodes.add (c->sourceNodeId);
  628. }
  629. }
  630. bool isAnInputTo (const uint32 possibleInputId,
  631. const uint32 possibleDestinationId) const noexcept
  632. {
  633. return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size());
  634. }
  635. private:
  636. //==============================================================================
  637. struct Entry
  638. {
  639. explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {}
  640. const uint32 destNodeId;
  641. SortedSet<uint32> srcNodes;
  642. JUCE_DECLARE_NON_COPYABLE (Entry)
  643. };
  644. OwnedArray<Entry> entries;
  645. bool isAnInputToRecursive (const uint32 possibleInputId,
  646. const uint32 possibleDestinationId,
  647. int recursionCheck) const noexcept
  648. {
  649. int index;
  650. if (const Entry* const entry = findEntry (possibleDestinationId, index))
  651. {
  652. const SortedSet<uint32>& srcNodes = entry->srcNodes;
  653. if (srcNodes.contains (possibleInputId))
  654. return true;
  655. if (--recursionCheck >= 0)
  656. {
  657. for (int i = 0; i < srcNodes.size(); ++i)
  658. if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck))
  659. return true;
  660. }
  661. }
  662. return false;
  663. }
  664. Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept
  665. {
  666. Entry* result = nullptr;
  667. int start = 0;
  668. int end = entries.size();
  669. for (;;)
  670. {
  671. if (start >= end)
  672. {
  673. break;
  674. }
  675. else if (destNodeId == entries.getUnchecked (start)->destNodeId)
  676. {
  677. result = entries.getUnchecked (start);
  678. break;
  679. }
  680. else
  681. {
  682. const int halfway = (start + end) / 2;
  683. if (halfway == start)
  684. {
  685. if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  686. ++start;
  687. break;
  688. }
  689. else if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  690. start = halfway;
  691. else
  692. end = halfway;
  693. }
  694. }
  695. insertIndex = start;
  696. return result;
  697. }
  698. JUCE_DECLARE_NON_COPYABLE (ConnectionLookupTable)
  699. };
  700. //==============================================================================
  701. struct ConnectionSorter
  702. {
  703. static int compareElements (const AudioProcessorGraph::Connection* const first,
  704. const AudioProcessorGraph::Connection* const second) noexcept
  705. {
  706. if (first->sourceNodeId < second->sourceNodeId) return -1;
  707. if (first->sourceNodeId > second->sourceNodeId) return 1;
  708. if (first->destNodeId < second->destNodeId) return -1;
  709. if (first->destNodeId > second->destNodeId) return 1;
  710. if (first->sourceChannelIndex < second->sourceChannelIndex) return -1;
  711. if (first->sourceChannelIndex > second->sourceChannelIndex) return 1;
  712. if (first->destChannelIndex < second->destChannelIndex) return -1;
  713. if (first->destChannelIndex > second->destChannelIndex) return 1;
  714. return 0;
  715. }
  716. };
  717. }
  718. //==============================================================================
  719. AudioProcessorGraph::Connection::Connection (const uint32 sourceID, const int sourceChannel,
  720. const uint32 destID, const int destChannel) noexcept
  721. : sourceNodeId (sourceID), sourceChannelIndex (sourceChannel),
  722. destNodeId (destID), destChannelIndex (destChannel)
  723. {
  724. }
  725. //==============================================================================
  726. AudioProcessorGraph::Node::Node (const uint32 nodeID, AudioProcessor* const p) noexcept
  727. : nodeId (nodeID), processor (p), isPrepared (false)
  728. {
  729. jassert (processor != nullptr);
  730. }
  731. void AudioProcessorGraph::Node::prepare (const double newSampleRate, const int newBlockSize,
  732. AudioProcessorGraph* const graph)
  733. {
  734. if (! isPrepared)
  735. {
  736. isPrepared = true;
  737. setParentGraph (graph);
  738. processor->setRateAndBufferSizeDetails (newSampleRate, newBlockSize);
  739. processor->prepareToPlay (newSampleRate, newBlockSize);
  740. }
  741. }
  742. void AudioProcessorGraph::Node::unprepare()
  743. {
  744. if (isPrepared)
  745. {
  746. isPrepared = false;
  747. processor->releaseResources();
  748. }
  749. }
  750. void AudioProcessorGraph::Node::setParentGraph (AudioProcessorGraph* const graph) const
  751. {
  752. if (AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  753. = dynamic_cast<AudioProcessorGraph::AudioGraphIOProcessor*> (processor.get()))
  754. ioProc->setParentGraph (graph);
  755. }
  756. //==============================================================================
  757. struct AudioProcessorGraph::AudioProcessorGraphBufferHelpers
  758. {
  759. AudioProcessorGraphBufferHelpers()
  760. {
  761. currentAudioInputBuffer = nullptr;
  762. }
  763. void setRenderingBufferSize (int newNumChannels, int newNumSamples)
  764. {
  765. renderingBuffers.setSize (newNumChannels, newNumSamples);
  766. renderingBuffers.clear();
  767. }
  768. void release()
  769. {
  770. renderingBuffers.setSize (1, 1);
  771. currentAudioInputBuffer = nullptr;
  772. currentAudioOutputBuffer.setSize (1, 1);
  773. }
  774. void prepareInOutBuffers(int newNumChannels, int newNumSamples)
  775. {
  776. currentAudioInputBuffer = nullptr;
  777. currentAudioOutputBuffer.setSize (newNumChannels, newNumSamples);
  778. }
  779. AudioSampleBuffer renderingBuffers;
  780. AudioSampleBuffer* currentAudioInputBuffer;
  781. AudioSampleBuffer currentAudioOutputBuffer;
  782. };
  783. //==============================================================================
  784. AudioProcessorGraph::AudioProcessorGraph()
  785. : lastNodeId (0), audioBuffers (new AudioProcessorGraphBufferHelpers),
  786. currentMidiInputBuffer (nullptr), isPrepared (false), needsReorder (false)
  787. {
  788. }
  789. AudioProcessorGraph::~AudioProcessorGraph()
  790. {
  791. clearRenderingSequence();
  792. clear();
  793. }
  794. const String AudioProcessorGraph::getName() const
  795. {
  796. return "Audio Graph";
  797. }
  798. //==============================================================================
  799. void AudioProcessorGraph::clear()
  800. {
  801. nodes.clear();
  802. connections.clear();
  803. needsReorder = true;
  804. }
  805. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const
  806. {
  807. for (int i = nodes.size(); --i >= 0;)
  808. if (nodes.getUnchecked(i)->nodeId == nodeId)
  809. return nodes.getUnchecked(i);
  810. return nullptr;
  811. }
  812. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, uint32 nodeId)
  813. {
  814. if (newProcessor == nullptr || newProcessor == this)
  815. {
  816. jassertfalse;
  817. return nullptr;
  818. }
  819. for (int i = nodes.size(); --i >= 0;)
  820. {
  821. if (nodes.getUnchecked(i)->getProcessor() == newProcessor)
  822. {
  823. jassertfalse; // Cannot add the same object to the graph twice!
  824. return nullptr;
  825. }
  826. }
  827. if (nodeId == 0)
  828. {
  829. nodeId = ++lastNodeId;
  830. }
  831. else
  832. {
  833. // you can't add a node with an id that already exists in the graph..
  834. jassert (getNodeForId (nodeId) == nullptr);
  835. removeNode (nodeId);
  836. if (nodeId > lastNodeId)
  837. lastNodeId = nodeId;
  838. }
  839. newProcessor->setPlayHead (getPlayHead());
  840. Node* const n = new Node (nodeId, newProcessor);
  841. nodes.add (n);
  842. if (isPrepared)
  843. needsReorder = true;
  844. n->setParentGraph (this);
  845. return n;
  846. }
  847. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  848. {
  849. disconnectNode (nodeId);
  850. for (int i = nodes.size(); --i >= 0;)
  851. {
  852. if (nodes.getUnchecked(i)->nodeId == nodeId)
  853. {
  854. nodes.remove (i);
  855. if (isPrepared)
  856. needsReorder = true;
  857. return true;
  858. }
  859. }
  860. return false;
  861. }
  862. bool AudioProcessorGraph::removeNode (Node* node)
  863. {
  864. if (node != nullptr)
  865. return removeNode (node->nodeId);
  866. jassertfalse;
  867. return false;
  868. }
  869. //==============================================================================
  870. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId,
  871. const int sourceChannelIndex,
  872. const uint32 destNodeId,
  873. const int destChannelIndex) const
  874. {
  875. const Connection c (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex);
  876. GraphRenderingOps::ConnectionSorter sorter;
  877. return connections [connections.indexOfSorted (sorter, &c)];
  878. }
  879. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  880. const uint32 possibleDestNodeId) const
  881. {
  882. for (int i = connections.size(); --i >= 0;)
  883. {
  884. const Connection* const c = connections.getUnchecked(i);
  885. if (c->sourceNodeId == possibleSourceNodeId
  886. && c->destNodeId == possibleDestNodeId)
  887. {
  888. return true;
  889. }
  890. }
  891. return false;
  892. }
  893. bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId,
  894. const int sourceChannelIndex,
  895. const uint32 destNodeId,
  896. const int destChannelIndex) const
  897. {
  898. if (sourceChannelIndex < 0
  899. || destChannelIndex < 0
  900. || sourceNodeId == destNodeId
  901. || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  902. return false;
  903. const Node* const source = getNodeForId (sourceNodeId);
  904. if (source == nullptr
  905. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getTotalNumOutputChannels())
  906. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  907. return false;
  908. const Node* const dest = getNodeForId (destNodeId);
  909. if (dest == nullptr
  910. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getTotalNumInputChannels())
  911. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  912. return false;
  913. return getConnectionBetween (sourceNodeId, sourceChannelIndex,
  914. destNodeId, destChannelIndex) == nullptr;
  915. }
  916. bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId,
  917. const int sourceChannelIndex,
  918. const uint32 destNodeId,
  919. const int destChannelIndex)
  920. {
  921. if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  922. return false;
  923. GraphRenderingOps::ConnectionSorter sorter;
  924. connections.addSorted (sorter, new Connection (sourceNodeId, sourceChannelIndex,
  925. destNodeId, destChannelIndex));
  926. if (isPrepared)
  927. needsReorder = true;
  928. return true;
  929. }
  930. void AudioProcessorGraph::removeConnection (const int index)
  931. {
  932. connections.remove (index);
  933. if (isPrepared)
  934. needsReorder = true;
  935. }
  936. bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex,
  937. const uint32 destNodeId, const int destChannelIndex)
  938. {
  939. bool doneAnything = false;
  940. for (int i = connections.size(); --i >= 0;)
  941. {
  942. const Connection* const c = connections.getUnchecked(i);
  943. if (c->sourceNodeId == sourceNodeId
  944. && c->destNodeId == destNodeId
  945. && c->sourceChannelIndex == sourceChannelIndex
  946. && c->destChannelIndex == destChannelIndex)
  947. {
  948. removeConnection (i);
  949. doneAnything = true;
  950. }
  951. }
  952. return doneAnything;
  953. }
  954. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  955. {
  956. bool doneAnything = false;
  957. for (int i = connections.size(); --i >= 0;)
  958. {
  959. const Connection* const c = connections.getUnchecked(i);
  960. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  961. {
  962. removeConnection (i);
  963. doneAnything = true;
  964. }
  965. }
  966. return doneAnything;
  967. }
  968. bool AudioProcessorGraph::isConnectionLegal (const Connection* const c) const
  969. {
  970. jassert (c != nullptr);
  971. const Node* const source = getNodeForId (c->sourceNodeId);
  972. const Node* const dest = getNodeForId (c->destNodeId);
  973. return source != nullptr
  974. && dest != nullptr
  975. && (c->sourceChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->sourceChannelIndex, source->processor->getTotalNumOutputChannels())
  976. : source->processor->producesMidi())
  977. && (c->destChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->destChannelIndex, dest->processor->getTotalNumInputChannels())
  978. : dest->processor->acceptsMidi());
  979. }
  980. bool AudioProcessorGraph::removeIllegalConnections()
  981. {
  982. bool doneAnything = false;
  983. for (int i = connections.size(); --i >= 0;)
  984. {
  985. if (! isConnectionLegal (connections.getUnchecked(i)))
  986. {
  987. removeConnection (i);
  988. doneAnything = true;
  989. }
  990. }
  991. return doneAnything;
  992. }
  993. //==============================================================================
  994. static void deleteRenderOpArray (Array<void*>& ops)
  995. {
  996. for (int i = ops.size(); --i >= 0;)
  997. delete static_cast<GraphRenderingOps::AudioGraphRenderingOpBase*> (ops.getUnchecked(i));
  998. }
  999. void AudioProcessorGraph::clearRenderingSequence()
  1000. {
  1001. Array<void*> oldOps;
  1002. {
  1003. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1004. renderingOps.swapWith (oldOps);
  1005. }
  1006. deleteRenderOpArray (oldOps);
  1007. }
  1008. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  1009. const uint32 possibleDestinationId,
  1010. const int recursionCheck) const
  1011. {
  1012. if (recursionCheck > 0)
  1013. {
  1014. for (int i = connections.size(); --i >= 0;)
  1015. {
  1016. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  1017. if (c->destNodeId == possibleDestinationId
  1018. && (c->sourceNodeId == possibleInputId
  1019. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  1020. return true;
  1021. }
  1022. }
  1023. return false;
  1024. }
  1025. void AudioProcessorGraph::buildRenderingSequence()
  1026. {
  1027. Array<void*> newRenderingOps;
  1028. int numRenderingBuffersNeeded = 2;
  1029. int numMidiBuffersNeeded = 1;
  1030. {
  1031. const CarlaRecursiveMutexLocker cml (reorderMutex);
  1032. Array<Node*> orderedNodes;
  1033. {
  1034. const GraphRenderingOps::ConnectionLookupTable table (connections);
  1035. for (int i = 0; i < nodes.size(); ++i)
  1036. {
  1037. Node* const node = nodes.getUnchecked(i);
  1038. node->prepare (getSampleRate(), getBlockSize(), this);
  1039. int j = 0;
  1040. for (; j < orderedNodes.size(); ++j)
  1041. if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId))
  1042. break;
  1043. orderedNodes.insert (j, node);
  1044. }
  1045. }
  1046. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  1047. numRenderingBuffersNeeded = calculator.getNumBuffersNeeded();
  1048. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  1049. }
  1050. {
  1051. // swap over to the new rendering sequence..
  1052. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1053. audioBuffers->setRenderingBufferSize (numRenderingBuffersNeeded, getBlockSize());
  1054. for (int i = midiBuffers.size(); --i >= 0;)
  1055. midiBuffers.getUnchecked(i)->clear();
  1056. while (midiBuffers.size() < numMidiBuffersNeeded)
  1057. midiBuffers.add (new MidiBuffer());
  1058. renderingOps.swapWith (newRenderingOps);
  1059. }
  1060. // delete the old ones..
  1061. deleteRenderOpArray (newRenderingOps);
  1062. }
  1063. //==============================================================================
  1064. void AudioProcessorGraph::prepareToPlay (double /*sampleRate*/, int estimatedSamplesPerBlock)
  1065. {
  1066. audioBuffers->prepareInOutBuffers (jmax (1, getTotalNumOutputChannels()), estimatedSamplesPerBlock);
  1067. currentMidiInputBuffer = nullptr;
  1068. currentMidiOutputBuffer.clear();
  1069. clearRenderingSequence();
  1070. buildRenderingSequence();
  1071. isPrepared = true;
  1072. }
  1073. void AudioProcessorGraph::releaseResources()
  1074. {
  1075. isPrepared = false;
  1076. for (int i = 0; i < nodes.size(); ++i)
  1077. nodes.getUnchecked(i)->unprepare();
  1078. audioBuffers->release();
  1079. midiBuffers.clear();
  1080. currentMidiInputBuffer = nullptr;
  1081. currentMidiOutputBuffer.clear();
  1082. }
  1083. void AudioProcessorGraph::reset()
  1084. {
  1085. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1086. for (int i = 0; i < nodes.size(); ++i)
  1087. nodes.getUnchecked(i)->getProcessor()->reset();
  1088. }
  1089. void AudioProcessorGraph::setNonRealtime (bool isProcessingNonRealtime) noexcept
  1090. {
  1091. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1092. AudioProcessor::setNonRealtime (isProcessingNonRealtime);
  1093. for (int i = 0; i < nodes.size(); ++i)
  1094. nodes.getUnchecked(i)->getProcessor()->setNonRealtime (isProcessingNonRealtime);
  1095. }
  1096. void AudioProcessorGraph::setPlayHead (AudioPlayHead* audioPlayHead)
  1097. {
  1098. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1099. AudioProcessor::setPlayHead (audioPlayHead);
  1100. for (int i = 0; i < nodes.size(); ++i)
  1101. nodes.getUnchecked(i)->getProcessor()->setPlayHead (audioPlayHead);
  1102. }
  1103. void AudioProcessorGraph::processAudio (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1104. {
  1105. AudioSampleBuffer& renderingBuffers = audioBuffers->renderingBuffers;
  1106. AudioSampleBuffer*& currentAudioInputBuffer = audioBuffers->currentAudioInputBuffer;
  1107. AudioSampleBuffer& currentAudioOutputBuffer = audioBuffers->currentAudioOutputBuffer;
  1108. const int numSamples = buffer.getNumSamples();
  1109. currentAudioInputBuffer = &buffer;
  1110. currentAudioOutputBuffer.setSize (jmax (1, buffer.getNumChannels()), numSamples);
  1111. currentAudioOutputBuffer.clear();
  1112. currentMidiInputBuffer = &midiMessages;
  1113. currentMidiOutputBuffer.clear();
  1114. for (int i = 0; i < renderingOps.size(); ++i)
  1115. {
  1116. GraphRenderingOps::AudioGraphRenderingOpBase* const op
  1117. = (GraphRenderingOps::AudioGraphRenderingOpBase*) renderingOps.getUnchecked(i);
  1118. op->perform (renderingBuffers, midiBuffers, numSamples);
  1119. }
  1120. for (int i = 0; i < buffer.getNumChannels(); ++i)
  1121. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  1122. midiMessages.clear();
  1123. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  1124. }
  1125. bool AudioProcessorGraph::acceptsMidi() const { return true; }
  1126. bool AudioProcessorGraph::producesMidi() const { return true; }
  1127. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1128. {
  1129. processAudio (buffer, midiMessages);
  1130. }
  1131. void AudioProcessorGraph::reorderNowIfNeeded()
  1132. {
  1133. if (needsReorder)
  1134. {
  1135. needsReorder = false;
  1136. buildRenderingSequence();
  1137. }
  1138. }
  1139. //==============================================================================
  1140. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType deviceType)
  1141. : type (deviceType), graph (nullptr)
  1142. {
  1143. }
  1144. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  1145. {
  1146. }
  1147. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  1148. {
  1149. switch (type)
  1150. {
  1151. case audioOutputNode: return "Audio Output";
  1152. case audioInputNode: return "Audio Input";
  1153. case midiOutputNode: return "Midi Output";
  1154. case midiInputNode: return "Midi Input";
  1155. default: break;
  1156. }
  1157. return String();
  1158. }
  1159. #if 0
  1160. void AudioProcessorGraph::AudioGraphIOProcessor::fillInPluginDescription (PluginDescription& d) const
  1161. {
  1162. d.name = getName();
  1163. d.uid = d.name.hashCode();
  1164. d.category = "I/O devices";
  1165. d.pluginFormatName = "Internal";
  1166. d.manufacturerName = "ROLI Ltd.";
  1167. d.version = "1.0";
  1168. d.isInstrument = false;
  1169. d.numInputChannels = getTotalNumInputChannels();
  1170. if (type == audioOutputNode && graph != nullptr)
  1171. d.numInputChannels = graph->getTotalNumInputChannels();
  1172. d.numOutputChannels = getTotalNumOutputChannels();
  1173. if (type == audioInputNode && graph != nullptr)
  1174. d.numOutputChannels = graph->getTotalNumOutputChannels();
  1175. }
  1176. #endif
  1177. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  1178. {
  1179. jassert (graph != nullptr);
  1180. }
  1181. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  1182. {
  1183. }
  1184. void AudioProcessorGraph::AudioGraphIOProcessor::processAudio (AudioSampleBuffer& buffer,
  1185. MidiBuffer& midiMessages)
  1186. {
  1187. AudioSampleBuffer*& currentAudioInputBuffer =
  1188. graph->audioBuffers->currentAudioInputBuffer;
  1189. AudioSampleBuffer& currentAudioOutputBuffer =
  1190. graph->audioBuffers->currentAudioOutputBuffer;
  1191. jassert (graph != nullptr);
  1192. switch (type)
  1193. {
  1194. case audioOutputNode:
  1195. {
  1196. for (int i = jmin (currentAudioOutputBuffer.getNumChannels(),
  1197. buffer.getNumChannels()); --i >= 0;)
  1198. {
  1199. currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  1200. }
  1201. break;
  1202. }
  1203. case audioInputNode:
  1204. {
  1205. for (int i = jmin (currentAudioInputBuffer->getNumChannels(),
  1206. buffer.getNumChannels()); --i >= 0;)
  1207. {
  1208. buffer.copyFrom (i, 0, *currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1209. }
  1210. break;
  1211. }
  1212. case midiOutputNode:
  1213. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1214. break;
  1215. case midiInputNode:
  1216. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1217. break;
  1218. default:
  1219. break;
  1220. }
  1221. }
  1222. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  1223. MidiBuffer& midiMessages)
  1224. {
  1225. processAudio (buffer, midiMessages);
  1226. }
  1227. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1228. {
  1229. return type == midiOutputNode;
  1230. }
  1231. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1232. {
  1233. return type == midiInputNode;
  1234. }
  1235. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const noexcept { return type == audioInputNode || type == midiInputNode; }
  1236. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const noexcept { return type == audioOutputNode || type == midiOutputNode; }
  1237. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph)
  1238. {
  1239. graph = newGraph;
  1240. if (graph != nullptr)
  1241. {
  1242. setPlayConfigDetails (type == audioOutputNode ? graph->getTotalNumOutputChannels() : 0,
  1243. type == audioInputNode ? graph->getTotalNumInputChannels() : 0,
  1244. getSampleRate(),
  1245. getBlockSize());
  1246. }
  1247. }