Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1547 lines
53KB

  1. /*
  2. ==============================================================================
  3. This file is part of the Water library.
  4. Copyright (c) 2015 ROLI Ltd.
  5. Copyright (C) 2017 Filipe Coelho <falktx@falktx.com>
  6. Permission is granted to use this software under the terms of either:
  7. a) the GPL v2 (or any later version)
  8. b) the Affero GPL v3
  9. Details of these licenses can be found at: www.gnu.org/licenses
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.juce.com for more information.
  16. ==============================================================================
  17. */
  18. #include "AudioProcessorGraph.h"
  19. #include "../containers/SortedSet.h"
  20. namespace water {
  21. const int AudioProcessorGraph::midiChannelIndex = 0x1000;
  22. //==============================================================================
  23. namespace GraphRenderingOps
  24. {
  25. struct AudioGraphRenderingOpBase
  26. {
  27. AudioGraphRenderingOpBase() noexcept {}
  28. virtual ~AudioGraphRenderingOpBase() {}
  29. virtual void perform (AudioSampleBuffer& sharedBufferChans,
  30. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  31. const int numSamples) = 0;
  32. };
  33. // use CRTP
  34. template <class Child>
  35. struct AudioGraphRenderingOp : public AudioGraphRenderingOpBase
  36. {
  37. void perform (AudioSampleBuffer& sharedBufferChans,
  38. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  39. const int numSamples) override
  40. {
  41. static_cast<Child*> (this)->perform (sharedBufferChans, sharedMidiBuffers, numSamples);
  42. }
  43. };
  44. //==============================================================================
  45. struct ClearChannelOp : public AudioGraphRenderingOp<ClearChannelOp>
  46. {
  47. ClearChannelOp (const int channel) noexcept : channelNum (channel) {}
  48. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  49. {
  50. sharedBufferChans.clear (channelNum, 0, numSamples);
  51. }
  52. const int channelNum;
  53. JUCE_DECLARE_NON_COPYABLE (ClearChannelOp)
  54. };
  55. //==============================================================================
  56. struct CopyChannelOp : public AudioGraphRenderingOp<CopyChannelOp>
  57. {
  58. CopyChannelOp (const int srcChan, const int dstChan) noexcept
  59. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  60. {}
  61. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  62. {
  63. sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  64. }
  65. const int srcChannelNum, dstChannelNum;
  66. JUCE_DECLARE_NON_COPYABLE (CopyChannelOp)
  67. };
  68. //==============================================================================
  69. struct AddChannelOp : public AudioGraphRenderingOp<AddChannelOp>
  70. {
  71. AddChannelOp (const int srcChan, const int dstChan) noexcept
  72. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  73. {}
  74. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  75. {
  76. sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  77. }
  78. const int srcChannelNum, dstChannelNum;
  79. JUCE_DECLARE_NON_COPYABLE (AddChannelOp)
  80. };
  81. //==============================================================================
  82. struct ClearMidiBufferOp : public AudioGraphRenderingOp<ClearMidiBufferOp>
  83. {
  84. ClearMidiBufferOp (const int buffer) noexcept : bufferNum (buffer) {}
  85. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  86. {
  87. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  88. }
  89. const int bufferNum;
  90. JUCE_DECLARE_NON_COPYABLE (ClearMidiBufferOp)
  91. };
  92. //==============================================================================
  93. struct CopyMidiBufferOp : public AudioGraphRenderingOp<CopyMidiBufferOp>
  94. {
  95. CopyMidiBufferOp (const int srcBuffer, const int dstBuffer) noexcept
  96. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  97. {}
  98. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  99. {
  100. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  101. }
  102. const int srcBufferNum, dstBufferNum;
  103. JUCE_DECLARE_NON_COPYABLE (CopyMidiBufferOp)
  104. };
  105. //==============================================================================
  106. struct AddMidiBufferOp : public AudioGraphRenderingOp<AddMidiBufferOp>
  107. {
  108. AddMidiBufferOp (const int srcBuffer, const int dstBuffer)
  109. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  110. {}
  111. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  112. {
  113. sharedMidiBuffers.getUnchecked (dstBufferNum)
  114. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  115. }
  116. const int srcBufferNum, dstBufferNum;
  117. JUCE_DECLARE_NON_COPYABLE (AddMidiBufferOp)
  118. };
  119. //==============================================================================
  120. struct DelayChannelOp : public AudioGraphRenderingOp<DelayChannelOp>
  121. {
  122. DelayChannelOp (const int chan, const int delaySize)
  123. : channel (chan),
  124. bufferSize (delaySize + 1),
  125. readIndex (0), writeIndex (delaySize)
  126. {
  127. buffer.calloc ((size_t) bufferSize);
  128. }
  129. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  130. {
  131. float* data = sharedBufferChans.getWritePointer (channel, 0);
  132. HeapBlock<float>& block = buffer;
  133. for (int i = numSamples; --i >= 0;)
  134. {
  135. block [writeIndex] = *data;
  136. *data++ = block [readIndex];
  137. if (++readIndex >= bufferSize) readIndex = 0;
  138. if (++writeIndex >= bufferSize) writeIndex = 0;
  139. }
  140. }
  141. private:
  142. HeapBlock<float> buffer;
  143. const int channel, bufferSize;
  144. int readIndex, writeIndex;
  145. JUCE_DECLARE_NON_COPYABLE (DelayChannelOp)
  146. };
  147. //==============================================================================
  148. struct ProcessBufferOp : public AudioGraphRenderingOp<ProcessBufferOp>
  149. {
  150. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& n,
  151. const Array<int>& audioChannelsUsed,
  152. const int totalNumChans,
  153. const int midiBuffer)
  154. : node (n),
  155. processor (n->getProcessor()),
  156. audioChannelsToUse (audioChannelsUsed),
  157. totalChans (jmax (1, totalNumChans)),
  158. midiBufferToUse (midiBuffer)
  159. {
  160. audioChannels.calloc ((size_t) totalChans);
  161. while (audioChannelsToUse.size() < totalChans)
  162. audioChannelsToUse.add (0);
  163. }
  164. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  165. {
  166. HeapBlock<float*>& channels = audioChannels;
  167. for (int i = totalChans; --i >= 0;)
  168. channels[i] = sharedBufferChans.getWritePointer (audioChannelsToUse.getUnchecked (i), 0);
  169. AudioSampleBuffer buffer (channels, totalChans, numSamples);
  170. if (processor->isSuspended())
  171. {
  172. buffer.clear();
  173. }
  174. else
  175. {
  176. const CarlaRecursiveMutexLocker cml (processor->getCallbackLock());
  177. callProcess (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  178. }
  179. }
  180. void callProcess (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  181. {
  182. processor->processBlock (buffer, midiMessages);
  183. }
  184. const AudioProcessorGraph::Node::Ptr node;
  185. AudioProcessor* const processor;
  186. private:
  187. Array<int> audioChannelsToUse;
  188. HeapBlock<float*> audioChannels;
  189. AudioSampleBuffer tempBuffer;
  190. const int totalChans;
  191. const int midiBufferToUse;
  192. JUCE_DECLARE_NON_COPYABLE (ProcessBufferOp)
  193. };
  194. //==============================================================================
  195. /** Used to calculate the correct sequence of rendering ops needed, based on
  196. the best re-use of shared buffers at each stage.
  197. */
  198. struct RenderingOpSequenceCalculator
  199. {
  200. RenderingOpSequenceCalculator (AudioProcessorGraph& g,
  201. const Array<AudioProcessorGraph::Node*>& nodes,
  202. Array<void*>& renderingOps)
  203. : graph (g),
  204. orderedNodes (nodes),
  205. totalLatency (0)
  206. {
  207. nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros
  208. channels.add (0);
  209. midiNodeIds.add ((uint32) zeroNodeID);
  210. for (int i = 0; i < orderedNodes.size(); ++i)
  211. {
  212. createRenderingOpsForNode (*orderedNodes.getUnchecked(i), renderingOps, i);
  213. markAnyUnusedBuffersAsFree (i);
  214. }
  215. graph.setLatencySamples (totalLatency);
  216. }
  217. int getNumBuffersNeeded() const noexcept { return nodeIds.size(); }
  218. int getNumMidiBuffersNeeded() const noexcept { return midiNodeIds.size(); }
  219. private:
  220. //==============================================================================
  221. AudioProcessorGraph& graph;
  222. const Array<AudioProcessorGraph::Node*>& orderedNodes;
  223. Array<int> channels;
  224. Array<uint32> nodeIds, midiNodeIds;
  225. enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe };
  226. static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; }
  227. Array<uint32> nodeDelayIDs;
  228. Array<int> nodeDelays;
  229. int totalLatency;
  230. int getNodeDelay (const uint32 nodeID) const { return nodeDelays [nodeDelayIDs.indexOf (nodeID)]; }
  231. void setNodeDelay (const uint32 nodeID, const int latency)
  232. {
  233. const int index = nodeDelayIDs.indexOf (nodeID);
  234. if (index >= 0)
  235. {
  236. nodeDelays.set (index, latency);
  237. }
  238. else
  239. {
  240. nodeDelayIDs.add (nodeID);
  241. nodeDelays.add (latency);
  242. }
  243. }
  244. int getInputLatencyForNode (const uint32 nodeID) const
  245. {
  246. int maxLatency = 0;
  247. for (int i = graph.getNumConnections(); --i >= 0;)
  248. {
  249. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  250. if (c->destNodeId == nodeID)
  251. maxLatency = jmax (maxLatency, getNodeDelay (c->sourceNodeId));
  252. }
  253. return maxLatency;
  254. }
  255. //==============================================================================
  256. void createRenderingOpsForNode (AudioProcessorGraph::Node& node,
  257. Array<void*>& renderingOps,
  258. const int ourRenderingIndex)
  259. {
  260. AudioProcessor& processor = *node.getProcessor();
  261. const int numIns = processor.getTotalNumInputChannels();
  262. const int numOuts = processor.getTotalNumOutputChannels();
  263. const int totalChans = jmax (numIns, numOuts);
  264. Array<int> audioChannelsToUse;
  265. int midiBufferToUse = -1;
  266. int maxLatency = getInputLatencyForNode (node.nodeId);
  267. for (int inputChan = 0; inputChan < numIns; ++inputChan)
  268. {
  269. // get a list of all the inputs to this node
  270. Array<uint32> sourceNodes;
  271. Array<int> sourceOutputChans;
  272. for (int i = graph.getNumConnections(); --i >= 0;)
  273. {
  274. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  275. if (c->destNodeId == node.nodeId && c->destChannelIndex == inputChan)
  276. {
  277. sourceNodes.add (c->sourceNodeId);
  278. sourceOutputChans.add (c->sourceChannelIndex);
  279. }
  280. }
  281. int bufIndex = -1;
  282. if (sourceNodes.size() == 0)
  283. {
  284. // unconnected input channel
  285. if (inputChan >= numOuts)
  286. {
  287. bufIndex = getReadOnlyEmptyBuffer();
  288. jassert (bufIndex >= 0);
  289. }
  290. else
  291. {
  292. bufIndex = getFreeBuffer (false);
  293. renderingOps.add (new ClearChannelOp (bufIndex));
  294. }
  295. }
  296. else if (sourceNodes.size() == 1)
  297. {
  298. // channel with a straightforward single input..
  299. const uint32 srcNode = sourceNodes.getUnchecked(0);
  300. const int srcChan = sourceOutputChans.getUnchecked(0);
  301. bufIndex = getBufferContaining (srcNode, srcChan);
  302. if (bufIndex < 0)
  303. {
  304. // if not found, this is probably a feedback loop
  305. bufIndex = getReadOnlyEmptyBuffer();
  306. jassert (bufIndex >= 0);
  307. }
  308. if (inputChan < numOuts
  309. && isBufferNeededLater (ourRenderingIndex,
  310. inputChan,
  311. srcNode, srcChan))
  312. {
  313. // can't mess up this channel because it's needed later by another node, so we
  314. // need to use a copy of it..
  315. const int newFreeBuffer = getFreeBuffer (false);
  316. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer));
  317. bufIndex = newFreeBuffer;
  318. }
  319. const int nodeDelay = getNodeDelay (srcNode);
  320. if (nodeDelay < maxLatency)
  321. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  322. }
  323. else
  324. {
  325. // channel with a mix of several inputs..
  326. // try to find a re-usable channel from our inputs..
  327. int reusableInputIndex = -1;
  328. for (int i = 0; i < sourceNodes.size(); ++i)
  329. {
  330. const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i),
  331. sourceOutputChans.getUnchecked(i));
  332. if (sourceBufIndex >= 0
  333. && ! isBufferNeededLater (ourRenderingIndex,
  334. inputChan,
  335. sourceNodes.getUnchecked(i),
  336. sourceOutputChans.getUnchecked(i)))
  337. {
  338. // we've found one of our input chans that can be re-used..
  339. reusableInputIndex = i;
  340. bufIndex = sourceBufIndex;
  341. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (i));
  342. if (nodeDelay < maxLatency)
  343. renderingOps.add (new DelayChannelOp (sourceBufIndex, maxLatency - nodeDelay));
  344. break;
  345. }
  346. }
  347. if (reusableInputIndex < 0)
  348. {
  349. // can't re-use any of our input chans, so get a new one and copy everything into it..
  350. bufIndex = getFreeBuffer (false);
  351. jassert (bufIndex != 0);
  352. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0),
  353. sourceOutputChans.getUnchecked (0));
  354. if (srcIndex < 0)
  355. {
  356. // if not found, this is probably a feedback loop
  357. renderingOps.add (new ClearChannelOp (bufIndex));
  358. }
  359. else
  360. {
  361. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex));
  362. }
  363. reusableInputIndex = 0;
  364. const int nodeDelay = getNodeDelay (sourceNodes.getFirst());
  365. if (nodeDelay < maxLatency)
  366. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  367. }
  368. for (int j = 0; j < sourceNodes.size(); ++j)
  369. {
  370. if (j != reusableInputIndex)
  371. {
  372. int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j),
  373. sourceOutputChans.getUnchecked(j));
  374. if (srcIndex >= 0)
  375. {
  376. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (j));
  377. if (nodeDelay < maxLatency)
  378. {
  379. if (! isBufferNeededLater (ourRenderingIndex, inputChan,
  380. sourceNodes.getUnchecked(j),
  381. sourceOutputChans.getUnchecked(j)))
  382. {
  383. renderingOps.add (new DelayChannelOp (srcIndex, maxLatency - nodeDelay));
  384. }
  385. else // buffer is reused elsewhere, can't be delayed
  386. {
  387. const int bufferToDelay = getFreeBuffer (false);
  388. renderingOps.add (new CopyChannelOp (srcIndex, bufferToDelay));
  389. renderingOps.add (new DelayChannelOp (bufferToDelay, maxLatency - nodeDelay));
  390. srcIndex = bufferToDelay;
  391. }
  392. }
  393. renderingOps.add (new AddChannelOp (srcIndex, bufIndex));
  394. }
  395. }
  396. }
  397. }
  398. jassert (bufIndex >= 0);
  399. audioChannelsToUse.add (bufIndex);
  400. if (inputChan < numOuts)
  401. markBufferAsContaining (bufIndex, node.nodeId, inputChan);
  402. }
  403. for (int outputChan = numIns; outputChan < numOuts; ++outputChan)
  404. {
  405. const int bufIndex = getFreeBuffer (false);
  406. jassert (bufIndex != 0);
  407. audioChannelsToUse.add (bufIndex);
  408. markBufferAsContaining (bufIndex, node.nodeId, outputChan);
  409. }
  410. // Now the same thing for midi..
  411. Array<uint32> midiSourceNodes;
  412. for (int i = graph.getNumConnections(); --i >= 0;)
  413. {
  414. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  415. if (c->destNodeId == node.nodeId && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex)
  416. midiSourceNodes.add (c->sourceNodeId);
  417. }
  418. if (midiSourceNodes.size() == 0)
  419. {
  420. // No midi inputs..
  421. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  422. if (processor.acceptsMidi() || processor.producesMidi())
  423. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  424. }
  425. else if (midiSourceNodes.size() == 1)
  426. {
  427. // One midi input..
  428. midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0),
  429. AudioProcessorGraph::midiChannelIndex);
  430. if (midiBufferToUse >= 0)
  431. {
  432. if (isBufferNeededLater (ourRenderingIndex,
  433. AudioProcessorGraph::midiChannelIndex,
  434. midiSourceNodes.getUnchecked(0),
  435. AudioProcessorGraph::midiChannelIndex))
  436. {
  437. // can't mess up this channel because it's needed later by another node, so we
  438. // need to use a copy of it..
  439. const int newFreeBuffer = getFreeBuffer (true);
  440. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  441. midiBufferToUse = newFreeBuffer;
  442. }
  443. }
  444. else
  445. {
  446. // probably a feedback loop, so just use an empty one..
  447. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  448. }
  449. }
  450. else
  451. {
  452. // More than one midi input being mixed..
  453. int reusableInputIndex = -1;
  454. for (int i = 0; i < midiSourceNodes.size(); ++i)
  455. {
  456. const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i),
  457. AudioProcessorGraph::midiChannelIndex);
  458. if (sourceBufIndex >= 0
  459. && ! isBufferNeededLater (ourRenderingIndex,
  460. AudioProcessorGraph::midiChannelIndex,
  461. midiSourceNodes.getUnchecked(i),
  462. AudioProcessorGraph::midiChannelIndex))
  463. {
  464. // we've found one of our input buffers that can be re-used..
  465. reusableInputIndex = i;
  466. midiBufferToUse = sourceBufIndex;
  467. break;
  468. }
  469. }
  470. if (reusableInputIndex < 0)
  471. {
  472. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  473. midiBufferToUse = getFreeBuffer (true);
  474. jassert (midiBufferToUse >= 0);
  475. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0),
  476. AudioProcessorGraph::midiChannelIndex);
  477. if (srcIndex >= 0)
  478. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  479. else
  480. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  481. reusableInputIndex = 0;
  482. }
  483. for (int j = 0; j < midiSourceNodes.size(); ++j)
  484. {
  485. if (j != reusableInputIndex)
  486. {
  487. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j),
  488. AudioProcessorGraph::midiChannelIndex);
  489. if (srcIndex >= 0)
  490. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  491. }
  492. }
  493. }
  494. if (processor.producesMidi())
  495. markBufferAsContaining (midiBufferToUse, node.nodeId,
  496. AudioProcessorGraph::midiChannelIndex);
  497. setNodeDelay (node.nodeId, maxLatency + processor.getLatencySamples());
  498. if (numOuts == 0)
  499. totalLatency = maxLatency;
  500. renderingOps.add (new ProcessBufferOp (&node, audioChannelsToUse,
  501. totalChans, midiBufferToUse));
  502. }
  503. //==============================================================================
  504. int getFreeBuffer (const bool forMidi)
  505. {
  506. if (forMidi)
  507. {
  508. for (int i = 1; i < midiNodeIds.size(); ++i)
  509. if (midiNodeIds.getUnchecked(i) == freeNodeID)
  510. return i;
  511. midiNodeIds.add ((uint32) freeNodeID);
  512. return midiNodeIds.size() - 1;
  513. }
  514. else
  515. {
  516. for (int i = 1; i < nodeIds.size(); ++i)
  517. if (nodeIds.getUnchecked(i) == freeNodeID)
  518. return i;
  519. nodeIds.add ((uint32) freeNodeID);
  520. channels.add (0);
  521. return nodeIds.size() - 1;
  522. }
  523. }
  524. int getReadOnlyEmptyBuffer() const noexcept
  525. {
  526. return 0;
  527. }
  528. int getBufferContaining (const uint32 nodeId, const int outputChannel) const noexcept
  529. {
  530. if (outputChannel == AudioProcessorGraph::midiChannelIndex)
  531. {
  532. for (int i = midiNodeIds.size(); --i >= 0;)
  533. if (midiNodeIds.getUnchecked(i) == nodeId)
  534. return i;
  535. }
  536. else
  537. {
  538. for (int i = nodeIds.size(); --i >= 0;)
  539. if (nodeIds.getUnchecked(i) == nodeId
  540. && channels.getUnchecked(i) == outputChannel)
  541. return i;
  542. }
  543. return -1;
  544. }
  545. void markAnyUnusedBuffersAsFree (const int stepIndex)
  546. {
  547. for (int i = 0; i < nodeIds.size(); ++i)
  548. {
  549. if (isNodeBusy (nodeIds.getUnchecked(i))
  550. && ! isBufferNeededLater (stepIndex, -1,
  551. nodeIds.getUnchecked(i),
  552. channels.getUnchecked(i)))
  553. {
  554. nodeIds.set (i, (uint32) freeNodeID);
  555. }
  556. }
  557. for (int i = 0; i < midiNodeIds.size(); ++i)
  558. {
  559. if (isNodeBusy (midiNodeIds.getUnchecked(i))
  560. && ! isBufferNeededLater (stepIndex, -1,
  561. midiNodeIds.getUnchecked(i),
  562. AudioProcessorGraph::midiChannelIndex))
  563. {
  564. midiNodeIds.set (i, (uint32) freeNodeID);
  565. }
  566. }
  567. }
  568. bool isBufferNeededLater (int stepIndexToSearchFrom,
  569. int inputChannelOfIndexToIgnore,
  570. const uint32 nodeId,
  571. const int outputChanIndex) const
  572. {
  573. while (stepIndexToSearchFrom < orderedNodes.size())
  574. {
  575. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  576. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  577. {
  578. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  579. && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex,
  580. node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr)
  581. return true;
  582. }
  583. else
  584. {
  585. for (int i = 0; i < node->getProcessor()->getTotalNumInputChannels(); ++i)
  586. if (i != inputChannelOfIndexToIgnore
  587. && graph.getConnectionBetween (nodeId, outputChanIndex,
  588. node->nodeId, i) != nullptr)
  589. return true;
  590. }
  591. inputChannelOfIndexToIgnore = -1;
  592. ++stepIndexToSearchFrom;
  593. }
  594. return false;
  595. }
  596. void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex)
  597. {
  598. if (outputIndex == AudioProcessorGraph::midiChannelIndex)
  599. {
  600. jassert (bufferNum > 0 && bufferNum < midiNodeIds.size());
  601. midiNodeIds.set (bufferNum, nodeId);
  602. }
  603. else
  604. {
  605. jassert (bufferNum >= 0 && bufferNum < nodeIds.size());
  606. nodeIds.set (bufferNum, nodeId);
  607. channels.set (bufferNum, outputIndex);
  608. }
  609. }
  610. JUCE_DECLARE_NON_COPYABLE (RenderingOpSequenceCalculator)
  611. };
  612. //==============================================================================
  613. // Holds a fast lookup table for checking which nodes are inputs to others.
  614. class ConnectionLookupTable
  615. {
  616. public:
  617. explicit ConnectionLookupTable (const OwnedArray<AudioProcessorGraph::Connection>& connections)
  618. {
  619. for (int i = 0; i < connections.size(); ++i)
  620. {
  621. const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i);
  622. int index;
  623. Entry* entry = findEntry (c->destNodeId, index);
  624. if (entry == nullptr)
  625. {
  626. entry = new Entry (c->destNodeId);
  627. entries.insert (index, entry);
  628. }
  629. entry->srcNodes.add (c->sourceNodeId);
  630. }
  631. }
  632. bool isAnInputTo (const uint32 possibleInputId,
  633. const uint32 possibleDestinationId) const noexcept
  634. {
  635. return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size());
  636. }
  637. private:
  638. //==============================================================================
  639. struct Entry
  640. {
  641. explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {}
  642. const uint32 destNodeId;
  643. SortedSet<uint32> srcNodes;
  644. JUCE_DECLARE_NON_COPYABLE (Entry)
  645. };
  646. OwnedArray<Entry> entries;
  647. bool isAnInputToRecursive (const uint32 possibleInputId,
  648. const uint32 possibleDestinationId,
  649. int recursionCheck) const noexcept
  650. {
  651. int index;
  652. if (const Entry* const entry = findEntry (possibleDestinationId, index))
  653. {
  654. const SortedSet<uint32>& srcNodes = entry->srcNodes;
  655. if (srcNodes.contains (possibleInputId))
  656. return true;
  657. if (--recursionCheck >= 0)
  658. {
  659. for (int i = 0; i < srcNodes.size(); ++i)
  660. if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck))
  661. return true;
  662. }
  663. }
  664. return false;
  665. }
  666. Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept
  667. {
  668. Entry* result = nullptr;
  669. int start = 0;
  670. int end = entries.size();
  671. for (;;)
  672. {
  673. if (start >= end)
  674. {
  675. break;
  676. }
  677. else if (destNodeId == entries.getUnchecked (start)->destNodeId)
  678. {
  679. result = entries.getUnchecked (start);
  680. break;
  681. }
  682. else
  683. {
  684. const int halfway = (start + end) / 2;
  685. if (halfway == start)
  686. {
  687. if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  688. ++start;
  689. break;
  690. }
  691. else if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  692. start = halfway;
  693. else
  694. end = halfway;
  695. }
  696. }
  697. insertIndex = start;
  698. return result;
  699. }
  700. JUCE_DECLARE_NON_COPYABLE (ConnectionLookupTable)
  701. };
  702. //==============================================================================
  703. struct ConnectionSorter
  704. {
  705. static int compareElements (const AudioProcessorGraph::Connection* const first,
  706. const AudioProcessorGraph::Connection* const second) noexcept
  707. {
  708. if (first->sourceNodeId < second->sourceNodeId) return -1;
  709. if (first->sourceNodeId > second->sourceNodeId) return 1;
  710. if (first->destNodeId < second->destNodeId) return -1;
  711. if (first->destNodeId > second->destNodeId) return 1;
  712. if (first->sourceChannelIndex < second->sourceChannelIndex) return -1;
  713. if (first->sourceChannelIndex > second->sourceChannelIndex) return 1;
  714. if (first->destChannelIndex < second->destChannelIndex) return -1;
  715. if (first->destChannelIndex > second->destChannelIndex) return 1;
  716. return 0;
  717. }
  718. };
  719. }
  720. //==============================================================================
  721. AudioProcessorGraph::Connection::Connection (const uint32 sourceID, const int sourceChannel,
  722. const uint32 destID, const int destChannel) noexcept
  723. : sourceNodeId (sourceID), sourceChannelIndex (sourceChannel),
  724. destNodeId (destID), destChannelIndex (destChannel)
  725. {
  726. }
  727. //==============================================================================
  728. AudioProcessorGraph::Node::Node (const uint32 nodeID, AudioProcessor* const p) noexcept
  729. : nodeId (nodeID), processor (p), isPrepared (false)
  730. {
  731. jassert (processor != nullptr);
  732. }
  733. void AudioProcessorGraph::Node::prepare (const double newSampleRate, const int newBlockSize,
  734. AudioProcessorGraph* const graph)
  735. {
  736. if (! isPrepared)
  737. {
  738. isPrepared = true;
  739. setParentGraph (graph);
  740. processor->setRateAndBufferSizeDetails (newSampleRate, newBlockSize);
  741. processor->prepareToPlay (newSampleRate, newBlockSize);
  742. }
  743. }
  744. void AudioProcessorGraph::Node::unprepare()
  745. {
  746. if (isPrepared)
  747. {
  748. isPrepared = false;
  749. processor->releaseResources();
  750. }
  751. }
  752. void AudioProcessorGraph::Node::setParentGraph (AudioProcessorGraph* const graph) const
  753. {
  754. if (AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  755. = dynamic_cast<AudioProcessorGraph::AudioGraphIOProcessor*> (processor.get()))
  756. ioProc->setParentGraph (graph);
  757. }
  758. //==============================================================================
  759. struct AudioProcessorGraph::AudioProcessorGraphBufferHelpers
  760. {
  761. AudioProcessorGraphBufferHelpers()
  762. {
  763. currentAudioInputBuffer = nullptr;
  764. }
  765. void setRenderingBufferSize (int newNumChannels, int newNumSamples)
  766. {
  767. renderingBuffers.setSize (newNumChannels, newNumSamples);
  768. renderingBuffers.clear();
  769. }
  770. void release()
  771. {
  772. renderingBuffers.setSize (1, 1);
  773. currentAudioInputBuffer = nullptr;
  774. currentAudioOutputBuffer.setSize (1, 1);
  775. }
  776. void prepareInOutBuffers(int newNumChannels, int newNumSamples)
  777. {
  778. currentAudioInputBuffer = nullptr;
  779. currentAudioOutputBuffer.setSize (newNumChannels, newNumSamples);
  780. }
  781. AudioSampleBuffer renderingBuffers;
  782. AudioSampleBuffer* currentAudioInputBuffer;
  783. AudioSampleBuffer currentAudioOutputBuffer;
  784. };
  785. //==============================================================================
  786. AudioProcessorGraph::AudioProcessorGraph()
  787. : lastNodeId (0), audioBuffers (new AudioProcessorGraphBufferHelpers),
  788. currentMidiInputBuffer (nullptr), isPrepared (false), needsReorder (false)
  789. {
  790. }
  791. AudioProcessorGraph::~AudioProcessorGraph()
  792. {
  793. clearRenderingSequence();
  794. clear();
  795. }
  796. const String AudioProcessorGraph::getName() const
  797. {
  798. return "Audio Graph";
  799. }
  800. //==============================================================================
  801. void AudioProcessorGraph::clear()
  802. {
  803. nodes.clear();
  804. connections.clear();
  805. needsReorder = true;
  806. }
  807. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const
  808. {
  809. for (int i = nodes.size(); --i >= 0;)
  810. if (nodes.getUnchecked(i)->nodeId == nodeId)
  811. return nodes.getUnchecked(i);
  812. return nullptr;
  813. }
  814. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, uint32 nodeId)
  815. {
  816. if (newProcessor == nullptr || newProcessor == this)
  817. {
  818. jassertfalse;
  819. return nullptr;
  820. }
  821. for (int i = nodes.size(); --i >= 0;)
  822. {
  823. if (nodes.getUnchecked(i)->getProcessor() == newProcessor)
  824. {
  825. jassertfalse; // Cannot add the same object to the graph twice!
  826. return nullptr;
  827. }
  828. }
  829. if (nodeId == 0)
  830. {
  831. nodeId = ++lastNodeId;
  832. }
  833. else
  834. {
  835. // you can't add a node with an id that already exists in the graph..
  836. jassert (getNodeForId (nodeId) == nullptr);
  837. removeNode (nodeId);
  838. if (nodeId > lastNodeId)
  839. lastNodeId = nodeId;
  840. }
  841. // newProcessor->setPlayHead (getPlayHead());
  842. Node* const n = new Node (nodeId, newProcessor);
  843. nodes.add (n);
  844. if (isPrepared)
  845. needsReorder = true;
  846. n->setParentGraph (this);
  847. return n;
  848. }
  849. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  850. {
  851. disconnectNode (nodeId);
  852. for (int i = nodes.size(); --i >= 0;)
  853. {
  854. if (nodes.getUnchecked(i)->nodeId == nodeId)
  855. {
  856. nodes.remove (i);
  857. if (isPrepared)
  858. needsReorder = true;
  859. return true;
  860. }
  861. }
  862. return false;
  863. }
  864. bool AudioProcessorGraph::removeNode (Node* node)
  865. {
  866. if (node != nullptr)
  867. return removeNode (node->nodeId);
  868. jassertfalse;
  869. return false;
  870. }
  871. //==============================================================================
  872. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId,
  873. const int sourceChannelIndex,
  874. const uint32 destNodeId,
  875. const int destChannelIndex) const
  876. {
  877. const Connection c (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex);
  878. GraphRenderingOps::ConnectionSorter sorter;
  879. return connections [connections.indexOfSorted (sorter, &c)];
  880. }
  881. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  882. const uint32 possibleDestNodeId) const
  883. {
  884. for (int i = connections.size(); --i >= 0;)
  885. {
  886. const Connection* const c = connections.getUnchecked(i);
  887. if (c->sourceNodeId == possibleSourceNodeId
  888. && c->destNodeId == possibleDestNodeId)
  889. {
  890. return true;
  891. }
  892. }
  893. return false;
  894. }
  895. bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId,
  896. const int sourceChannelIndex,
  897. const uint32 destNodeId,
  898. const int destChannelIndex) const
  899. {
  900. if (sourceChannelIndex < 0
  901. || destChannelIndex < 0
  902. || sourceNodeId == destNodeId
  903. || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  904. return false;
  905. const Node* const source = getNodeForId (sourceNodeId);
  906. if (source == nullptr
  907. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getTotalNumOutputChannels())
  908. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  909. return false;
  910. const Node* const dest = getNodeForId (destNodeId);
  911. if (dest == nullptr
  912. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getTotalNumInputChannels())
  913. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  914. return false;
  915. return getConnectionBetween (sourceNodeId, sourceChannelIndex,
  916. destNodeId, destChannelIndex) == nullptr;
  917. }
  918. bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId,
  919. const int sourceChannelIndex,
  920. const uint32 destNodeId,
  921. const int destChannelIndex)
  922. {
  923. if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  924. return false;
  925. GraphRenderingOps::ConnectionSorter sorter;
  926. connections.addSorted (sorter, new Connection (sourceNodeId, sourceChannelIndex,
  927. destNodeId, destChannelIndex));
  928. if (isPrepared)
  929. needsReorder = true;
  930. return true;
  931. }
  932. void AudioProcessorGraph::removeConnection (const int index)
  933. {
  934. connections.remove (index);
  935. if (isPrepared)
  936. needsReorder = true;
  937. }
  938. bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex,
  939. const uint32 destNodeId, const int destChannelIndex)
  940. {
  941. bool doneAnything = false;
  942. for (int i = connections.size(); --i >= 0;)
  943. {
  944. const Connection* const c = connections.getUnchecked(i);
  945. if (c->sourceNodeId == sourceNodeId
  946. && c->destNodeId == destNodeId
  947. && c->sourceChannelIndex == sourceChannelIndex
  948. && c->destChannelIndex == destChannelIndex)
  949. {
  950. removeConnection (i);
  951. doneAnything = true;
  952. }
  953. }
  954. return doneAnything;
  955. }
  956. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  957. {
  958. bool doneAnything = false;
  959. for (int i = connections.size(); --i >= 0;)
  960. {
  961. const Connection* const c = connections.getUnchecked(i);
  962. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  963. {
  964. removeConnection (i);
  965. doneAnything = true;
  966. }
  967. }
  968. return doneAnything;
  969. }
  970. bool AudioProcessorGraph::isConnectionLegal (const Connection* const c) const
  971. {
  972. jassert (c != nullptr);
  973. const Node* const source = getNodeForId (c->sourceNodeId);
  974. const Node* const dest = getNodeForId (c->destNodeId);
  975. return source != nullptr
  976. && dest != nullptr
  977. && (c->sourceChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->sourceChannelIndex, source->processor->getTotalNumOutputChannels())
  978. : source->processor->producesMidi())
  979. && (c->destChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->destChannelIndex, dest->processor->getTotalNumInputChannels())
  980. : dest->processor->acceptsMidi());
  981. }
  982. bool AudioProcessorGraph::removeIllegalConnections()
  983. {
  984. bool doneAnything = false;
  985. for (int i = connections.size(); --i >= 0;)
  986. {
  987. if (! isConnectionLegal (connections.getUnchecked(i)))
  988. {
  989. removeConnection (i);
  990. doneAnything = true;
  991. }
  992. }
  993. return doneAnything;
  994. }
  995. //==============================================================================
  996. static void deleteRenderOpArray (Array<void*>& ops)
  997. {
  998. for (int i = ops.size(); --i >= 0;)
  999. delete static_cast<GraphRenderingOps::AudioGraphRenderingOpBase*> (ops.getUnchecked(i));
  1000. }
  1001. void AudioProcessorGraph::clearRenderingSequence()
  1002. {
  1003. Array<void*> oldOps;
  1004. {
  1005. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1006. renderingOps.swapWith (oldOps);
  1007. }
  1008. deleteRenderOpArray (oldOps);
  1009. }
  1010. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  1011. const uint32 possibleDestinationId,
  1012. const int recursionCheck) const
  1013. {
  1014. if (recursionCheck > 0)
  1015. {
  1016. for (int i = connections.size(); --i >= 0;)
  1017. {
  1018. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  1019. if (c->destNodeId == possibleDestinationId
  1020. && (c->sourceNodeId == possibleInputId
  1021. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  1022. return true;
  1023. }
  1024. }
  1025. return false;
  1026. }
  1027. void AudioProcessorGraph::buildRenderingSequence()
  1028. {
  1029. Array<void*> newRenderingOps;
  1030. int numRenderingBuffersNeeded = 2;
  1031. int numMidiBuffersNeeded = 1;
  1032. {
  1033. const CarlaRecursiveMutexLocker cml (reorderMutex);
  1034. Array<Node*> orderedNodes;
  1035. {
  1036. const GraphRenderingOps::ConnectionLookupTable table (connections);
  1037. for (int i = 0; i < nodes.size(); ++i)
  1038. {
  1039. Node* const node = nodes.getUnchecked(i);
  1040. node->prepare (getSampleRate(), getBlockSize(), this);
  1041. int j = 0;
  1042. for (; j < orderedNodes.size(); ++j)
  1043. if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId))
  1044. break;
  1045. orderedNodes.insert (j, node);
  1046. }
  1047. }
  1048. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  1049. numRenderingBuffersNeeded = calculator.getNumBuffersNeeded();
  1050. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  1051. }
  1052. {
  1053. // swap over to the new rendering sequence..
  1054. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1055. audioBuffers->setRenderingBufferSize (numRenderingBuffersNeeded, getBlockSize());
  1056. for (int i = midiBuffers.size(); --i >= 0;)
  1057. midiBuffers.getUnchecked(i)->clear();
  1058. while (midiBuffers.size() < numMidiBuffersNeeded)
  1059. midiBuffers.add (new MidiBuffer());
  1060. renderingOps.swapWith (newRenderingOps);
  1061. }
  1062. // delete the old ones..
  1063. deleteRenderOpArray (newRenderingOps);
  1064. }
  1065. //==============================================================================
  1066. void AudioProcessorGraph::prepareToPlay (double /*sampleRate*/, int estimatedSamplesPerBlock)
  1067. {
  1068. audioBuffers->prepareInOutBuffers (jmax (1, getTotalNumOutputChannels()), estimatedSamplesPerBlock);
  1069. currentMidiInputBuffer = nullptr;
  1070. currentMidiOutputBuffer.clear();
  1071. clearRenderingSequence();
  1072. buildRenderingSequence();
  1073. isPrepared = true;
  1074. }
  1075. void AudioProcessorGraph::releaseResources()
  1076. {
  1077. isPrepared = false;
  1078. for (int i = 0; i < nodes.size(); ++i)
  1079. nodes.getUnchecked(i)->unprepare();
  1080. audioBuffers->release();
  1081. midiBuffers.clear();
  1082. currentMidiInputBuffer = nullptr;
  1083. currentMidiOutputBuffer.clear();
  1084. }
  1085. void AudioProcessorGraph::reset()
  1086. {
  1087. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1088. for (int i = 0; i < nodes.size(); ++i)
  1089. nodes.getUnchecked(i)->getProcessor()->reset();
  1090. }
  1091. void AudioProcessorGraph::setNonRealtime (bool isProcessingNonRealtime) noexcept
  1092. {
  1093. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1094. AudioProcessor::setNonRealtime (isProcessingNonRealtime);
  1095. for (int i = 0; i < nodes.size(); ++i)
  1096. nodes.getUnchecked(i)->getProcessor()->setNonRealtime (isProcessingNonRealtime);
  1097. }
  1098. // void AudioProcessorGraph::setPlayHead (AudioPlayHead* audioPlayHead)
  1099. // {
  1100. // const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1101. //
  1102. // AudioProcessor::setPlayHead (audioPlayHead);
  1103. //
  1104. // for (int i = 0; i < nodes.size(); ++i)
  1105. // nodes.getUnchecked(i)->getProcessor()->setPlayHead (audioPlayHead);
  1106. // }
  1107. void AudioProcessorGraph::processAudio (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1108. {
  1109. AudioSampleBuffer& renderingBuffers = audioBuffers->renderingBuffers;
  1110. AudioSampleBuffer*& currentAudioInputBuffer = audioBuffers->currentAudioInputBuffer;
  1111. AudioSampleBuffer& currentAudioOutputBuffer = audioBuffers->currentAudioOutputBuffer;
  1112. const int numSamples = buffer.getNumSamples();
  1113. currentAudioInputBuffer = &buffer;
  1114. currentAudioOutputBuffer.setSize (jmax (1, buffer.getNumChannels()), numSamples);
  1115. currentAudioOutputBuffer.clear();
  1116. currentMidiInputBuffer = &midiMessages;
  1117. currentMidiOutputBuffer.clear();
  1118. for (int i = 0; i < renderingOps.size(); ++i)
  1119. {
  1120. GraphRenderingOps::AudioGraphRenderingOpBase* const op
  1121. = (GraphRenderingOps::AudioGraphRenderingOpBase*) renderingOps.getUnchecked(i);
  1122. op->perform (renderingBuffers, midiBuffers, numSamples);
  1123. }
  1124. for (int i = 0; i < buffer.getNumChannels(); ++i)
  1125. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  1126. midiMessages.clear();
  1127. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  1128. }
  1129. bool AudioProcessorGraph::acceptsMidi() const { return true; }
  1130. bool AudioProcessorGraph::producesMidi() const { return true; }
  1131. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1132. {
  1133. processAudio (buffer, midiMessages);
  1134. }
  1135. void AudioProcessorGraph::reorderNowIfNeeded()
  1136. {
  1137. if (needsReorder)
  1138. {
  1139. needsReorder = false;
  1140. buildRenderingSequence();
  1141. }
  1142. }
  1143. //==============================================================================
  1144. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType deviceType)
  1145. : type (deviceType), graph (nullptr)
  1146. {
  1147. }
  1148. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  1149. {
  1150. }
  1151. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  1152. {
  1153. switch (type)
  1154. {
  1155. case audioOutputNode: return "Audio Output";
  1156. case audioInputNode: return "Audio Input";
  1157. case midiOutputNode: return "Midi Output";
  1158. case midiInputNode: return "Midi Input";
  1159. default: break;
  1160. }
  1161. return String();
  1162. }
  1163. #if 0
  1164. void AudioProcessorGraph::AudioGraphIOProcessor::fillInPluginDescription (PluginDescription& d) const
  1165. {
  1166. d.name = getName();
  1167. d.uid = d.name.hashCode();
  1168. d.category = "I/O devices";
  1169. d.pluginFormatName = "Internal";
  1170. d.manufacturerName = "ROLI Ltd.";
  1171. d.version = "1.0";
  1172. d.isInstrument = false;
  1173. d.numInputChannels = getTotalNumInputChannels();
  1174. if (type == audioOutputNode && graph != nullptr)
  1175. d.numInputChannels = graph->getTotalNumInputChannels();
  1176. d.numOutputChannels = getTotalNumOutputChannels();
  1177. if (type == audioInputNode && graph != nullptr)
  1178. d.numOutputChannels = graph->getTotalNumOutputChannels();
  1179. }
  1180. #endif
  1181. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  1182. {
  1183. jassert (graph != nullptr);
  1184. }
  1185. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  1186. {
  1187. }
  1188. void AudioProcessorGraph::AudioGraphIOProcessor::processAudio (AudioSampleBuffer& buffer,
  1189. MidiBuffer& midiMessages)
  1190. {
  1191. AudioSampleBuffer*& currentAudioInputBuffer =
  1192. graph->audioBuffers->currentAudioInputBuffer;
  1193. AudioSampleBuffer& currentAudioOutputBuffer =
  1194. graph->audioBuffers->currentAudioOutputBuffer;
  1195. jassert (graph != nullptr);
  1196. switch (type)
  1197. {
  1198. case audioOutputNode:
  1199. {
  1200. for (int i = jmin (currentAudioOutputBuffer.getNumChannels(),
  1201. buffer.getNumChannels()); --i >= 0;)
  1202. {
  1203. currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  1204. }
  1205. break;
  1206. }
  1207. case audioInputNode:
  1208. {
  1209. for (int i = jmin (currentAudioInputBuffer->getNumChannels(),
  1210. buffer.getNumChannels()); --i >= 0;)
  1211. {
  1212. buffer.copyFrom (i, 0, *currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1213. }
  1214. break;
  1215. }
  1216. case midiOutputNode:
  1217. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1218. break;
  1219. case midiInputNode:
  1220. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1221. break;
  1222. default:
  1223. break;
  1224. }
  1225. }
  1226. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  1227. MidiBuffer& midiMessages)
  1228. {
  1229. processAudio (buffer, midiMessages);
  1230. }
  1231. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1232. {
  1233. return type == midiOutputNode;
  1234. }
  1235. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1236. {
  1237. return type == midiInputNode;
  1238. }
  1239. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const noexcept { return type == audioInputNode || type == midiInputNode; }
  1240. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const noexcept { return type == audioOutputNode || type == midiOutputNode; }
  1241. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph)
  1242. {
  1243. graph = newGraph;
  1244. if (graph != nullptr)
  1245. {
  1246. setPlayConfigDetails (type == audioOutputNode ? graph->getTotalNumOutputChannels() : 0,
  1247. type == audioInputNode ? graph->getTotalNumInputChannels() : 0,
  1248. getSampleRate(),
  1249. getBlockSize());
  1250. }
  1251. }
  1252. }