Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1546 lines
53KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library.
  4. Copyright (c) 2015 - ROLI Ltd.
  5. Permission is granted to use this software under the terms of either:
  6. a) the GPL v2 (or any later version)
  7. b) the Affero GPL v3
  8. Details of these licenses can be found at: www.gnu.org/licenses
  9. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  11. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. ------------------------------------------------------------------------------
  13. To release a closed-source product which uses JUCE, commercial licenses are
  14. available: visit www.juce.com for more information.
  15. ==============================================================================
  16. */
  17. #include "AudioProcessorGraph.h"
  18. #include "../containers/SortedSet.h"
  19. namespace water {
  20. const int AudioProcessorGraph::midiChannelIndex = 0x1000;
  21. //==============================================================================
  22. namespace GraphRenderingOps
  23. {
  24. struct AudioGraphRenderingOpBase
  25. {
  26. AudioGraphRenderingOpBase() noexcept {}
  27. virtual ~AudioGraphRenderingOpBase() {}
  28. virtual void perform (AudioSampleBuffer& sharedBufferChans,
  29. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  30. const int numSamples) = 0;
  31. };
  32. // use CRTP
  33. template <class Child>
  34. struct AudioGraphRenderingOp : public AudioGraphRenderingOpBase
  35. {
  36. void perform (AudioSampleBuffer& sharedBufferChans,
  37. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  38. const int numSamples) override
  39. {
  40. static_cast<Child*> (this)->perform (sharedBufferChans, sharedMidiBuffers, numSamples);
  41. }
  42. };
  43. //==============================================================================
  44. struct ClearChannelOp : public AudioGraphRenderingOp<ClearChannelOp>
  45. {
  46. ClearChannelOp (const int channel) noexcept : channelNum (channel) {}
  47. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  48. {
  49. sharedBufferChans.clear (channelNum, 0, numSamples);
  50. }
  51. const int channelNum;
  52. JUCE_DECLARE_NON_COPYABLE (ClearChannelOp)
  53. };
  54. //==============================================================================
  55. struct CopyChannelOp : public AudioGraphRenderingOp<CopyChannelOp>
  56. {
  57. CopyChannelOp (const int srcChan, const int dstChan) noexcept
  58. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  59. {}
  60. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  61. {
  62. sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  63. }
  64. const int srcChannelNum, dstChannelNum;
  65. JUCE_DECLARE_NON_COPYABLE (CopyChannelOp)
  66. };
  67. //==============================================================================
  68. struct AddChannelOp : public AudioGraphRenderingOp<AddChannelOp>
  69. {
  70. AddChannelOp (const int srcChan, const int dstChan) noexcept
  71. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  72. {}
  73. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  74. {
  75. sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  76. }
  77. const int srcChannelNum, dstChannelNum;
  78. JUCE_DECLARE_NON_COPYABLE (AddChannelOp)
  79. };
  80. //==============================================================================
  81. struct ClearMidiBufferOp : public AudioGraphRenderingOp<ClearMidiBufferOp>
  82. {
  83. ClearMidiBufferOp (const int buffer) noexcept : bufferNum (buffer) {}
  84. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  85. {
  86. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  87. }
  88. const int bufferNum;
  89. JUCE_DECLARE_NON_COPYABLE (ClearMidiBufferOp)
  90. };
  91. //==============================================================================
  92. struct CopyMidiBufferOp : public AudioGraphRenderingOp<CopyMidiBufferOp>
  93. {
  94. CopyMidiBufferOp (const int srcBuffer, const int dstBuffer) noexcept
  95. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  96. {}
  97. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  98. {
  99. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  100. }
  101. const int srcBufferNum, dstBufferNum;
  102. JUCE_DECLARE_NON_COPYABLE (CopyMidiBufferOp)
  103. };
  104. //==============================================================================
  105. struct AddMidiBufferOp : public AudioGraphRenderingOp<AddMidiBufferOp>
  106. {
  107. AddMidiBufferOp (const int srcBuffer, const int dstBuffer)
  108. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  109. {}
  110. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  111. {
  112. sharedMidiBuffers.getUnchecked (dstBufferNum)
  113. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  114. }
  115. const int srcBufferNum, dstBufferNum;
  116. JUCE_DECLARE_NON_COPYABLE (AddMidiBufferOp)
  117. };
  118. //==============================================================================
  119. struct DelayChannelOp : public AudioGraphRenderingOp<DelayChannelOp>
  120. {
  121. DelayChannelOp (const int chan, const int delaySize)
  122. : channel (chan),
  123. bufferSize (delaySize + 1),
  124. readIndex (0), writeIndex (delaySize)
  125. {
  126. buffer.calloc ((size_t) bufferSize);
  127. }
  128. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  129. {
  130. float* data = sharedBufferChans.getWritePointer (channel, 0);
  131. HeapBlock<float>& block = buffer;
  132. for (int i = numSamples; --i >= 0;)
  133. {
  134. block [writeIndex] = *data;
  135. *data++ = block [readIndex];
  136. if (++readIndex >= bufferSize) readIndex = 0;
  137. if (++writeIndex >= bufferSize) writeIndex = 0;
  138. }
  139. }
  140. private:
  141. HeapBlock<float> buffer;
  142. const int channel, bufferSize;
  143. int readIndex, writeIndex;
  144. JUCE_DECLARE_NON_COPYABLE (DelayChannelOp)
  145. };
  146. //==============================================================================
  147. struct ProcessBufferOp : public AudioGraphRenderingOp<ProcessBufferOp>
  148. {
  149. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& n,
  150. const Array<int>& audioChannelsUsed,
  151. const int totalNumChans,
  152. const int midiBuffer)
  153. : node (n),
  154. processor (n->getProcessor()),
  155. audioChannelsToUse (audioChannelsUsed),
  156. totalChans (jmax (1, totalNumChans)),
  157. midiBufferToUse (midiBuffer)
  158. {
  159. audioChannels.calloc ((size_t) totalChans);
  160. while (audioChannelsToUse.size() < totalChans)
  161. audioChannelsToUse.add (0);
  162. }
  163. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  164. {
  165. HeapBlock<float*>& channels = audioChannels;
  166. for (int i = totalChans; --i >= 0;)
  167. channels[i] = sharedBufferChans.getWritePointer (audioChannelsToUse.getUnchecked (i), 0);
  168. AudioSampleBuffer buffer (channels, totalChans, numSamples);
  169. if (processor->isSuspended())
  170. {
  171. buffer.clear();
  172. }
  173. else
  174. {
  175. const CarlaRecursiveMutexLocker cml (processor->getCallbackLock());
  176. callProcess (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  177. }
  178. }
  179. void callProcess (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  180. {
  181. processor->processBlock (buffer, midiMessages);
  182. }
  183. const AudioProcessorGraph::Node::Ptr node;
  184. AudioProcessor* const processor;
  185. private:
  186. Array<int> audioChannelsToUse;
  187. HeapBlock<float*> audioChannels;
  188. AudioSampleBuffer tempBuffer;
  189. const int totalChans;
  190. const int midiBufferToUse;
  191. JUCE_DECLARE_NON_COPYABLE (ProcessBufferOp)
  192. };
  193. //==============================================================================
  194. /** Used to calculate the correct sequence of rendering ops needed, based on
  195. the best re-use of shared buffers at each stage.
  196. */
  197. struct RenderingOpSequenceCalculator
  198. {
  199. RenderingOpSequenceCalculator (AudioProcessorGraph& g,
  200. const Array<AudioProcessorGraph::Node*>& nodes,
  201. Array<void*>& renderingOps)
  202. : graph (g),
  203. orderedNodes (nodes),
  204. totalLatency (0)
  205. {
  206. nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros
  207. channels.add (0);
  208. midiNodeIds.add ((uint32) zeroNodeID);
  209. for (int i = 0; i < orderedNodes.size(); ++i)
  210. {
  211. createRenderingOpsForNode (*orderedNodes.getUnchecked(i), renderingOps, i);
  212. markAnyUnusedBuffersAsFree (i);
  213. }
  214. graph.setLatencySamples (totalLatency);
  215. }
  216. int getNumBuffersNeeded() const noexcept { return nodeIds.size(); }
  217. int getNumMidiBuffersNeeded() const noexcept { return midiNodeIds.size(); }
  218. private:
  219. //==============================================================================
  220. AudioProcessorGraph& graph;
  221. const Array<AudioProcessorGraph::Node*>& orderedNodes;
  222. Array<int> channels;
  223. Array<uint32> nodeIds, midiNodeIds;
  224. enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe };
  225. static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; }
  226. Array<uint32> nodeDelayIDs;
  227. Array<int> nodeDelays;
  228. int totalLatency;
  229. int getNodeDelay (const uint32 nodeID) const { return nodeDelays [nodeDelayIDs.indexOf (nodeID)]; }
  230. void setNodeDelay (const uint32 nodeID, const int latency)
  231. {
  232. const int index = nodeDelayIDs.indexOf (nodeID);
  233. if (index >= 0)
  234. {
  235. nodeDelays.set (index, latency);
  236. }
  237. else
  238. {
  239. nodeDelayIDs.add (nodeID);
  240. nodeDelays.add (latency);
  241. }
  242. }
  243. int getInputLatencyForNode (const uint32 nodeID) const
  244. {
  245. int maxLatency = 0;
  246. for (int i = graph.getNumConnections(); --i >= 0;)
  247. {
  248. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  249. if (c->destNodeId == nodeID)
  250. maxLatency = jmax (maxLatency, getNodeDelay (c->sourceNodeId));
  251. }
  252. return maxLatency;
  253. }
  254. //==============================================================================
  255. void createRenderingOpsForNode (AudioProcessorGraph::Node& node,
  256. Array<void*>& renderingOps,
  257. const int ourRenderingIndex)
  258. {
  259. AudioProcessor& processor = *node.getProcessor();
  260. const int numIns = processor.getTotalNumInputChannels();
  261. const int numOuts = processor.getTotalNumOutputChannels();
  262. const int totalChans = jmax (numIns, numOuts);
  263. Array<int> audioChannelsToUse;
  264. int midiBufferToUse = -1;
  265. int maxLatency = getInputLatencyForNode (node.nodeId);
  266. for (int inputChan = 0; inputChan < numIns; ++inputChan)
  267. {
  268. // get a list of all the inputs to this node
  269. Array<uint32> sourceNodes;
  270. Array<int> sourceOutputChans;
  271. for (int i = graph.getNumConnections(); --i >= 0;)
  272. {
  273. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  274. if (c->destNodeId == node.nodeId && c->destChannelIndex == inputChan)
  275. {
  276. sourceNodes.add (c->sourceNodeId);
  277. sourceOutputChans.add (c->sourceChannelIndex);
  278. }
  279. }
  280. int bufIndex = -1;
  281. if (sourceNodes.size() == 0)
  282. {
  283. // unconnected input channel
  284. if (inputChan >= numOuts)
  285. {
  286. bufIndex = getReadOnlyEmptyBuffer();
  287. jassert (bufIndex >= 0);
  288. }
  289. else
  290. {
  291. bufIndex = getFreeBuffer (false);
  292. renderingOps.add (new ClearChannelOp (bufIndex));
  293. }
  294. }
  295. else if (sourceNodes.size() == 1)
  296. {
  297. // channel with a straightforward single input..
  298. const uint32 srcNode = sourceNodes.getUnchecked(0);
  299. const int srcChan = sourceOutputChans.getUnchecked(0);
  300. bufIndex = getBufferContaining (srcNode, srcChan);
  301. if (bufIndex < 0)
  302. {
  303. // if not found, this is probably a feedback loop
  304. bufIndex = getReadOnlyEmptyBuffer();
  305. jassert (bufIndex >= 0);
  306. }
  307. if (inputChan < numOuts
  308. && isBufferNeededLater (ourRenderingIndex,
  309. inputChan,
  310. srcNode, srcChan))
  311. {
  312. // can't mess up this channel because it's needed later by another node, so we
  313. // need to use a copy of it..
  314. const int newFreeBuffer = getFreeBuffer (false);
  315. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer));
  316. bufIndex = newFreeBuffer;
  317. }
  318. const int nodeDelay = getNodeDelay (srcNode);
  319. if (nodeDelay < maxLatency)
  320. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  321. }
  322. else
  323. {
  324. // channel with a mix of several inputs..
  325. // try to find a re-usable channel from our inputs..
  326. int reusableInputIndex = -1;
  327. for (int i = 0; i < sourceNodes.size(); ++i)
  328. {
  329. const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i),
  330. sourceOutputChans.getUnchecked(i));
  331. if (sourceBufIndex >= 0
  332. && ! isBufferNeededLater (ourRenderingIndex,
  333. inputChan,
  334. sourceNodes.getUnchecked(i),
  335. sourceOutputChans.getUnchecked(i)))
  336. {
  337. // we've found one of our input chans that can be re-used..
  338. reusableInputIndex = i;
  339. bufIndex = sourceBufIndex;
  340. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (i));
  341. if (nodeDelay < maxLatency)
  342. renderingOps.add (new DelayChannelOp (sourceBufIndex, maxLatency - nodeDelay));
  343. break;
  344. }
  345. }
  346. if (reusableInputIndex < 0)
  347. {
  348. // can't re-use any of our input chans, so get a new one and copy everything into it..
  349. bufIndex = getFreeBuffer (false);
  350. jassert (bufIndex != 0);
  351. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0),
  352. sourceOutputChans.getUnchecked (0));
  353. if (srcIndex < 0)
  354. {
  355. // if not found, this is probably a feedback loop
  356. renderingOps.add (new ClearChannelOp (bufIndex));
  357. }
  358. else
  359. {
  360. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex));
  361. }
  362. reusableInputIndex = 0;
  363. const int nodeDelay = getNodeDelay (sourceNodes.getFirst());
  364. if (nodeDelay < maxLatency)
  365. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  366. }
  367. for (int j = 0; j < sourceNodes.size(); ++j)
  368. {
  369. if (j != reusableInputIndex)
  370. {
  371. int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j),
  372. sourceOutputChans.getUnchecked(j));
  373. if (srcIndex >= 0)
  374. {
  375. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (j));
  376. if (nodeDelay < maxLatency)
  377. {
  378. if (! isBufferNeededLater (ourRenderingIndex, inputChan,
  379. sourceNodes.getUnchecked(j),
  380. sourceOutputChans.getUnchecked(j)))
  381. {
  382. renderingOps.add (new DelayChannelOp (srcIndex, maxLatency - nodeDelay));
  383. }
  384. else // buffer is reused elsewhere, can't be delayed
  385. {
  386. const int bufferToDelay = getFreeBuffer (false);
  387. renderingOps.add (new CopyChannelOp (srcIndex, bufferToDelay));
  388. renderingOps.add (new DelayChannelOp (bufferToDelay, maxLatency - nodeDelay));
  389. srcIndex = bufferToDelay;
  390. }
  391. }
  392. renderingOps.add (new AddChannelOp (srcIndex, bufIndex));
  393. }
  394. }
  395. }
  396. }
  397. jassert (bufIndex >= 0);
  398. audioChannelsToUse.add (bufIndex);
  399. if (inputChan < numOuts)
  400. markBufferAsContaining (bufIndex, node.nodeId, inputChan);
  401. }
  402. for (int outputChan = numIns; outputChan < numOuts; ++outputChan)
  403. {
  404. const int bufIndex = getFreeBuffer (false);
  405. jassert (bufIndex != 0);
  406. audioChannelsToUse.add (bufIndex);
  407. markBufferAsContaining (bufIndex, node.nodeId, outputChan);
  408. }
  409. // Now the same thing for midi..
  410. Array<uint32> midiSourceNodes;
  411. for (int i = graph.getNumConnections(); --i >= 0;)
  412. {
  413. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  414. if (c->destNodeId == node.nodeId && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex)
  415. midiSourceNodes.add (c->sourceNodeId);
  416. }
  417. if (midiSourceNodes.size() == 0)
  418. {
  419. // No midi inputs..
  420. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  421. if (processor.acceptsMidi() || processor.producesMidi())
  422. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  423. }
  424. else if (midiSourceNodes.size() == 1)
  425. {
  426. // One midi input..
  427. midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0),
  428. AudioProcessorGraph::midiChannelIndex);
  429. if (midiBufferToUse >= 0)
  430. {
  431. if (isBufferNeededLater (ourRenderingIndex,
  432. AudioProcessorGraph::midiChannelIndex,
  433. midiSourceNodes.getUnchecked(0),
  434. AudioProcessorGraph::midiChannelIndex))
  435. {
  436. // can't mess up this channel because it's needed later by another node, so we
  437. // need to use a copy of it..
  438. const int newFreeBuffer = getFreeBuffer (true);
  439. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  440. midiBufferToUse = newFreeBuffer;
  441. }
  442. }
  443. else
  444. {
  445. // probably a feedback loop, so just use an empty one..
  446. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  447. }
  448. }
  449. else
  450. {
  451. // More than one midi input being mixed..
  452. int reusableInputIndex = -1;
  453. for (int i = 0; i < midiSourceNodes.size(); ++i)
  454. {
  455. const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i),
  456. AudioProcessorGraph::midiChannelIndex);
  457. if (sourceBufIndex >= 0
  458. && ! isBufferNeededLater (ourRenderingIndex,
  459. AudioProcessorGraph::midiChannelIndex,
  460. midiSourceNodes.getUnchecked(i),
  461. AudioProcessorGraph::midiChannelIndex))
  462. {
  463. // we've found one of our input buffers that can be re-used..
  464. reusableInputIndex = i;
  465. midiBufferToUse = sourceBufIndex;
  466. break;
  467. }
  468. }
  469. if (reusableInputIndex < 0)
  470. {
  471. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  472. midiBufferToUse = getFreeBuffer (true);
  473. jassert (midiBufferToUse >= 0);
  474. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0),
  475. AudioProcessorGraph::midiChannelIndex);
  476. if (srcIndex >= 0)
  477. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  478. else
  479. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  480. reusableInputIndex = 0;
  481. }
  482. for (int j = 0; j < midiSourceNodes.size(); ++j)
  483. {
  484. if (j != reusableInputIndex)
  485. {
  486. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j),
  487. AudioProcessorGraph::midiChannelIndex);
  488. if (srcIndex >= 0)
  489. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  490. }
  491. }
  492. }
  493. if (processor.producesMidi())
  494. markBufferAsContaining (midiBufferToUse, node.nodeId,
  495. AudioProcessorGraph::midiChannelIndex);
  496. setNodeDelay (node.nodeId, maxLatency + processor.getLatencySamples());
  497. if (numOuts == 0)
  498. totalLatency = maxLatency;
  499. renderingOps.add (new ProcessBufferOp (&node, audioChannelsToUse,
  500. totalChans, midiBufferToUse));
  501. }
  502. //==============================================================================
  503. int getFreeBuffer (const bool forMidi)
  504. {
  505. if (forMidi)
  506. {
  507. for (int i = 1; i < midiNodeIds.size(); ++i)
  508. if (midiNodeIds.getUnchecked(i) == freeNodeID)
  509. return i;
  510. midiNodeIds.add ((uint32) freeNodeID);
  511. return midiNodeIds.size() - 1;
  512. }
  513. else
  514. {
  515. for (int i = 1; i < nodeIds.size(); ++i)
  516. if (nodeIds.getUnchecked(i) == freeNodeID)
  517. return i;
  518. nodeIds.add ((uint32) freeNodeID);
  519. channels.add (0);
  520. return nodeIds.size() - 1;
  521. }
  522. }
  523. int getReadOnlyEmptyBuffer() const noexcept
  524. {
  525. return 0;
  526. }
  527. int getBufferContaining (const uint32 nodeId, const int outputChannel) const noexcept
  528. {
  529. if (outputChannel == AudioProcessorGraph::midiChannelIndex)
  530. {
  531. for (int i = midiNodeIds.size(); --i >= 0;)
  532. if (midiNodeIds.getUnchecked(i) == nodeId)
  533. return i;
  534. }
  535. else
  536. {
  537. for (int i = nodeIds.size(); --i >= 0;)
  538. if (nodeIds.getUnchecked(i) == nodeId
  539. && channels.getUnchecked(i) == outputChannel)
  540. return i;
  541. }
  542. return -1;
  543. }
  544. void markAnyUnusedBuffersAsFree (const int stepIndex)
  545. {
  546. for (int i = 0; i < nodeIds.size(); ++i)
  547. {
  548. if (isNodeBusy (nodeIds.getUnchecked(i))
  549. && ! isBufferNeededLater (stepIndex, -1,
  550. nodeIds.getUnchecked(i),
  551. channels.getUnchecked(i)))
  552. {
  553. nodeIds.set (i, (uint32) freeNodeID);
  554. }
  555. }
  556. for (int i = 0; i < midiNodeIds.size(); ++i)
  557. {
  558. if (isNodeBusy (midiNodeIds.getUnchecked(i))
  559. && ! isBufferNeededLater (stepIndex, -1,
  560. midiNodeIds.getUnchecked(i),
  561. AudioProcessorGraph::midiChannelIndex))
  562. {
  563. midiNodeIds.set (i, (uint32) freeNodeID);
  564. }
  565. }
  566. }
  567. bool isBufferNeededLater (int stepIndexToSearchFrom,
  568. int inputChannelOfIndexToIgnore,
  569. const uint32 nodeId,
  570. const int outputChanIndex) const
  571. {
  572. while (stepIndexToSearchFrom < orderedNodes.size())
  573. {
  574. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  575. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  576. {
  577. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  578. && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex,
  579. node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr)
  580. return true;
  581. }
  582. else
  583. {
  584. for (int i = 0; i < node->getProcessor()->getTotalNumInputChannels(); ++i)
  585. if (i != inputChannelOfIndexToIgnore
  586. && graph.getConnectionBetween (nodeId, outputChanIndex,
  587. node->nodeId, i) != nullptr)
  588. return true;
  589. }
  590. inputChannelOfIndexToIgnore = -1;
  591. ++stepIndexToSearchFrom;
  592. }
  593. return false;
  594. }
  595. void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex)
  596. {
  597. if (outputIndex == AudioProcessorGraph::midiChannelIndex)
  598. {
  599. jassert (bufferNum > 0 && bufferNum < midiNodeIds.size());
  600. midiNodeIds.set (bufferNum, nodeId);
  601. }
  602. else
  603. {
  604. jassert (bufferNum >= 0 && bufferNum < nodeIds.size());
  605. nodeIds.set (bufferNum, nodeId);
  606. channels.set (bufferNum, outputIndex);
  607. }
  608. }
  609. JUCE_DECLARE_NON_COPYABLE (RenderingOpSequenceCalculator)
  610. };
  611. //==============================================================================
  612. // Holds a fast lookup table for checking which nodes are inputs to others.
  613. class ConnectionLookupTable
  614. {
  615. public:
  616. explicit ConnectionLookupTable (const OwnedArray<AudioProcessorGraph::Connection>& connections)
  617. {
  618. for (int i = 0; i < connections.size(); ++i)
  619. {
  620. const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i);
  621. int index;
  622. Entry* entry = findEntry (c->destNodeId, index);
  623. if (entry == nullptr)
  624. {
  625. entry = new Entry (c->destNodeId);
  626. entries.insert (index, entry);
  627. }
  628. entry->srcNodes.add (c->sourceNodeId);
  629. }
  630. }
  631. bool isAnInputTo (const uint32 possibleInputId,
  632. const uint32 possibleDestinationId) const noexcept
  633. {
  634. return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size());
  635. }
  636. private:
  637. //==============================================================================
  638. struct Entry
  639. {
  640. explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {}
  641. const uint32 destNodeId;
  642. SortedSet<uint32> srcNodes;
  643. JUCE_DECLARE_NON_COPYABLE (Entry)
  644. };
  645. OwnedArray<Entry> entries;
  646. bool isAnInputToRecursive (const uint32 possibleInputId,
  647. const uint32 possibleDestinationId,
  648. int recursionCheck) const noexcept
  649. {
  650. int index;
  651. if (const Entry* const entry = findEntry (possibleDestinationId, index))
  652. {
  653. const SortedSet<uint32>& srcNodes = entry->srcNodes;
  654. if (srcNodes.contains (possibleInputId))
  655. return true;
  656. if (--recursionCheck >= 0)
  657. {
  658. for (int i = 0; i < srcNodes.size(); ++i)
  659. if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck))
  660. return true;
  661. }
  662. }
  663. return false;
  664. }
  665. Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept
  666. {
  667. Entry* result = nullptr;
  668. int start = 0;
  669. int end = entries.size();
  670. for (;;)
  671. {
  672. if (start >= end)
  673. {
  674. break;
  675. }
  676. else if (destNodeId == entries.getUnchecked (start)->destNodeId)
  677. {
  678. result = entries.getUnchecked (start);
  679. break;
  680. }
  681. else
  682. {
  683. const int halfway = (start + end) / 2;
  684. if (halfway == start)
  685. {
  686. if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  687. ++start;
  688. break;
  689. }
  690. else if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  691. start = halfway;
  692. else
  693. end = halfway;
  694. }
  695. }
  696. insertIndex = start;
  697. return result;
  698. }
  699. JUCE_DECLARE_NON_COPYABLE (ConnectionLookupTable)
  700. };
  701. //==============================================================================
  702. struct ConnectionSorter
  703. {
  704. static int compareElements (const AudioProcessorGraph::Connection* const first,
  705. const AudioProcessorGraph::Connection* const second) noexcept
  706. {
  707. if (first->sourceNodeId < second->sourceNodeId) return -1;
  708. if (first->sourceNodeId > second->sourceNodeId) return 1;
  709. if (first->destNodeId < second->destNodeId) return -1;
  710. if (first->destNodeId > second->destNodeId) return 1;
  711. if (first->sourceChannelIndex < second->sourceChannelIndex) return -1;
  712. if (first->sourceChannelIndex > second->sourceChannelIndex) return 1;
  713. if (first->destChannelIndex < second->destChannelIndex) return -1;
  714. if (first->destChannelIndex > second->destChannelIndex) return 1;
  715. return 0;
  716. }
  717. };
  718. }
  719. //==============================================================================
  720. AudioProcessorGraph::Connection::Connection (const uint32 sourceID, const int sourceChannel,
  721. const uint32 destID, const int destChannel) noexcept
  722. : sourceNodeId (sourceID), sourceChannelIndex (sourceChannel),
  723. destNodeId (destID), destChannelIndex (destChannel)
  724. {
  725. }
  726. //==============================================================================
  727. AudioProcessorGraph::Node::Node (const uint32 nodeID, AudioProcessor* const p) noexcept
  728. : nodeId (nodeID), processor (p), isPrepared (false)
  729. {
  730. jassert (processor != nullptr);
  731. }
  732. void AudioProcessorGraph::Node::prepare (const double newSampleRate, const int newBlockSize,
  733. AudioProcessorGraph* const graph)
  734. {
  735. if (! isPrepared)
  736. {
  737. isPrepared = true;
  738. setParentGraph (graph);
  739. processor->setRateAndBufferSizeDetails (newSampleRate, newBlockSize);
  740. processor->prepareToPlay (newSampleRate, newBlockSize);
  741. }
  742. }
  743. void AudioProcessorGraph::Node::unprepare()
  744. {
  745. if (isPrepared)
  746. {
  747. isPrepared = false;
  748. processor->releaseResources();
  749. }
  750. }
  751. void AudioProcessorGraph::Node::setParentGraph (AudioProcessorGraph* const graph) const
  752. {
  753. if (AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  754. = dynamic_cast<AudioProcessorGraph::AudioGraphIOProcessor*> (processor.get()))
  755. ioProc->setParentGraph (graph);
  756. }
  757. //==============================================================================
  758. struct AudioProcessorGraph::AudioProcessorGraphBufferHelpers
  759. {
  760. AudioProcessorGraphBufferHelpers()
  761. {
  762. currentAudioInputBuffer = nullptr;
  763. }
  764. void setRenderingBufferSize (int newNumChannels, int newNumSamples)
  765. {
  766. renderingBuffers.setSize (newNumChannels, newNumSamples);
  767. renderingBuffers.clear();
  768. }
  769. void release()
  770. {
  771. renderingBuffers.setSize (1, 1);
  772. currentAudioInputBuffer = nullptr;
  773. currentAudioOutputBuffer.setSize (1, 1);
  774. }
  775. void prepareInOutBuffers(int newNumChannels, int newNumSamples)
  776. {
  777. currentAudioInputBuffer = nullptr;
  778. currentAudioOutputBuffer.setSize (newNumChannels, newNumSamples);
  779. }
  780. AudioSampleBuffer renderingBuffers;
  781. AudioSampleBuffer* currentAudioInputBuffer;
  782. AudioSampleBuffer currentAudioOutputBuffer;
  783. };
  784. //==============================================================================
  785. AudioProcessorGraph::AudioProcessorGraph()
  786. : lastNodeId (0), audioBuffers (new AudioProcessorGraphBufferHelpers),
  787. currentMidiInputBuffer (nullptr), isPrepared (false), needsReorder (false)
  788. {
  789. }
  790. AudioProcessorGraph::~AudioProcessorGraph()
  791. {
  792. clearRenderingSequence();
  793. clear();
  794. }
  795. const String AudioProcessorGraph::getName() const
  796. {
  797. return "Audio Graph";
  798. }
  799. //==============================================================================
  800. void AudioProcessorGraph::clear()
  801. {
  802. nodes.clear();
  803. connections.clear();
  804. needsReorder = true;
  805. }
  806. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const
  807. {
  808. for (int i = nodes.size(); --i >= 0;)
  809. if (nodes.getUnchecked(i)->nodeId == nodeId)
  810. return nodes.getUnchecked(i);
  811. return nullptr;
  812. }
  813. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, uint32 nodeId)
  814. {
  815. if (newProcessor == nullptr || newProcessor == this)
  816. {
  817. jassertfalse;
  818. return nullptr;
  819. }
  820. for (int i = nodes.size(); --i >= 0;)
  821. {
  822. if (nodes.getUnchecked(i)->getProcessor() == newProcessor)
  823. {
  824. jassertfalse; // Cannot add the same object to the graph twice!
  825. return nullptr;
  826. }
  827. }
  828. if (nodeId == 0)
  829. {
  830. nodeId = ++lastNodeId;
  831. }
  832. else
  833. {
  834. // you can't add a node with an id that already exists in the graph..
  835. jassert (getNodeForId (nodeId) == nullptr);
  836. removeNode (nodeId);
  837. if (nodeId > lastNodeId)
  838. lastNodeId = nodeId;
  839. }
  840. // newProcessor->setPlayHead (getPlayHead());
  841. Node* const n = new Node (nodeId, newProcessor);
  842. nodes.add (n);
  843. if (isPrepared)
  844. needsReorder = true;
  845. n->setParentGraph (this);
  846. return n;
  847. }
  848. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  849. {
  850. disconnectNode (nodeId);
  851. for (int i = nodes.size(); --i >= 0;)
  852. {
  853. if (nodes.getUnchecked(i)->nodeId == nodeId)
  854. {
  855. nodes.remove (i);
  856. if (isPrepared)
  857. needsReorder = true;
  858. return true;
  859. }
  860. }
  861. return false;
  862. }
  863. bool AudioProcessorGraph::removeNode (Node* node)
  864. {
  865. if (node != nullptr)
  866. return removeNode (node->nodeId);
  867. jassertfalse;
  868. return false;
  869. }
  870. //==============================================================================
  871. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId,
  872. const int sourceChannelIndex,
  873. const uint32 destNodeId,
  874. const int destChannelIndex) const
  875. {
  876. const Connection c (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex);
  877. GraphRenderingOps::ConnectionSorter sorter;
  878. return connections [connections.indexOfSorted (sorter, &c)];
  879. }
  880. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  881. const uint32 possibleDestNodeId) const
  882. {
  883. for (int i = connections.size(); --i >= 0;)
  884. {
  885. const Connection* const c = connections.getUnchecked(i);
  886. if (c->sourceNodeId == possibleSourceNodeId
  887. && c->destNodeId == possibleDestNodeId)
  888. {
  889. return true;
  890. }
  891. }
  892. return false;
  893. }
  894. bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId,
  895. const int sourceChannelIndex,
  896. const uint32 destNodeId,
  897. const int destChannelIndex) const
  898. {
  899. if (sourceChannelIndex < 0
  900. || destChannelIndex < 0
  901. || sourceNodeId == destNodeId
  902. || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  903. return false;
  904. const Node* const source = getNodeForId (sourceNodeId);
  905. if (source == nullptr
  906. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getTotalNumOutputChannels())
  907. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  908. return false;
  909. const Node* const dest = getNodeForId (destNodeId);
  910. if (dest == nullptr
  911. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getTotalNumInputChannels())
  912. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  913. return false;
  914. return getConnectionBetween (sourceNodeId, sourceChannelIndex,
  915. destNodeId, destChannelIndex) == nullptr;
  916. }
  917. bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId,
  918. const int sourceChannelIndex,
  919. const uint32 destNodeId,
  920. const int destChannelIndex)
  921. {
  922. if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  923. return false;
  924. GraphRenderingOps::ConnectionSorter sorter;
  925. connections.addSorted (sorter, new Connection (sourceNodeId, sourceChannelIndex,
  926. destNodeId, destChannelIndex));
  927. if (isPrepared)
  928. needsReorder = true;
  929. return true;
  930. }
  931. void AudioProcessorGraph::removeConnection (const int index)
  932. {
  933. connections.remove (index);
  934. if (isPrepared)
  935. needsReorder = true;
  936. }
  937. bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex,
  938. const uint32 destNodeId, const int destChannelIndex)
  939. {
  940. bool doneAnything = false;
  941. for (int i = connections.size(); --i >= 0;)
  942. {
  943. const Connection* const c = connections.getUnchecked(i);
  944. if (c->sourceNodeId == sourceNodeId
  945. && c->destNodeId == destNodeId
  946. && c->sourceChannelIndex == sourceChannelIndex
  947. && c->destChannelIndex == destChannelIndex)
  948. {
  949. removeConnection (i);
  950. doneAnything = true;
  951. }
  952. }
  953. return doneAnything;
  954. }
  955. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  956. {
  957. bool doneAnything = false;
  958. for (int i = connections.size(); --i >= 0;)
  959. {
  960. const Connection* const c = connections.getUnchecked(i);
  961. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  962. {
  963. removeConnection (i);
  964. doneAnything = true;
  965. }
  966. }
  967. return doneAnything;
  968. }
  969. bool AudioProcessorGraph::isConnectionLegal (const Connection* const c) const
  970. {
  971. jassert (c != nullptr);
  972. const Node* const source = getNodeForId (c->sourceNodeId);
  973. const Node* const dest = getNodeForId (c->destNodeId);
  974. return source != nullptr
  975. && dest != nullptr
  976. && (c->sourceChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->sourceChannelIndex, source->processor->getTotalNumOutputChannels())
  977. : source->processor->producesMidi())
  978. && (c->destChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->destChannelIndex, dest->processor->getTotalNumInputChannels())
  979. : dest->processor->acceptsMidi());
  980. }
  981. bool AudioProcessorGraph::removeIllegalConnections()
  982. {
  983. bool doneAnything = false;
  984. for (int i = connections.size(); --i >= 0;)
  985. {
  986. if (! isConnectionLegal (connections.getUnchecked(i)))
  987. {
  988. removeConnection (i);
  989. doneAnything = true;
  990. }
  991. }
  992. return doneAnything;
  993. }
  994. //==============================================================================
  995. static void deleteRenderOpArray (Array<void*>& ops)
  996. {
  997. for (int i = ops.size(); --i >= 0;)
  998. delete static_cast<GraphRenderingOps::AudioGraphRenderingOpBase*> (ops.getUnchecked(i));
  999. }
  1000. void AudioProcessorGraph::clearRenderingSequence()
  1001. {
  1002. Array<void*> oldOps;
  1003. {
  1004. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1005. renderingOps.swapWith (oldOps);
  1006. }
  1007. deleteRenderOpArray (oldOps);
  1008. }
  1009. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  1010. const uint32 possibleDestinationId,
  1011. const int recursionCheck) const
  1012. {
  1013. if (recursionCheck > 0)
  1014. {
  1015. for (int i = connections.size(); --i >= 0;)
  1016. {
  1017. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  1018. if (c->destNodeId == possibleDestinationId
  1019. && (c->sourceNodeId == possibleInputId
  1020. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  1021. return true;
  1022. }
  1023. }
  1024. return false;
  1025. }
  1026. void AudioProcessorGraph::buildRenderingSequence()
  1027. {
  1028. Array<void*> newRenderingOps;
  1029. int numRenderingBuffersNeeded = 2;
  1030. int numMidiBuffersNeeded = 1;
  1031. {
  1032. const CarlaRecursiveMutexLocker cml (reorderMutex);
  1033. Array<Node*> orderedNodes;
  1034. {
  1035. const GraphRenderingOps::ConnectionLookupTable table (connections);
  1036. for (int i = 0; i < nodes.size(); ++i)
  1037. {
  1038. Node* const node = nodes.getUnchecked(i);
  1039. node->prepare (getSampleRate(), getBlockSize(), this);
  1040. int j = 0;
  1041. for (; j < orderedNodes.size(); ++j)
  1042. if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId))
  1043. break;
  1044. orderedNodes.insert (j, node);
  1045. }
  1046. }
  1047. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  1048. numRenderingBuffersNeeded = calculator.getNumBuffersNeeded();
  1049. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  1050. }
  1051. {
  1052. // swap over to the new rendering sequence..
  1053. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1054. audioBuffers->setRenderingBufferSize (numRenderingBuffersNeeded, getBlockSize());
  1055. for (int i = midiBuffers.size(); --i >= 0;)
  1056. midiBuffers.getUnchecked(i)->clear();
  1057. while (midiBuffers.size() < numMidiBuffersNeeded)
  1058. midiBuffers.add (new MidiBuffer());
  1059. renderingOps.swapWith (newRenderingOps);
  1060. }
  1061. // delete the old ones..
  1062. deleteRenderOpArray (newRenderingOps);
  1063. }
  1064. //==============================================================================
  1065. void AudioProcessorGraph::prepareToPlay (double /*sampleRate*/, int estimatedSamplesPerBlock)
  1066. {
  1067. audioBuffers->prepareInOutBuffers (jmax (1, getTotalNumOutputChannels()), estimatedSamplesPerBlock);
  1068. currentMidiInputBuffer = nullptr;
  1069. currentMidiOutputBuffer.clear();
  1070. clearRenderingSequence();
  1071. buildRenderingSequence();
  1072. isPrepared = true;
  1073. }
  1074. void AudioProcessorGraph::releaseResources()
  1075. {
  1076. isPrepared = false;
  1077. for (int i = 0; i < nodes.size(); ++i)
  1078. nodes.getUnchecked(i)->unprepare();
  1079. audioBuffers->release();
  1080. midiBuffers.clear();
  1081. currentMidiInputBuffer = nullptr;
  1082. currentMidiOutputBuffer.clear();
  1083. }
  1084. void AudioProcessorGraph::reset()
  1085. {
  1086. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1087. for (int i = 0; i < nodes.size(); ++i)
  1088. nodes.getUnchecked(i)->getProcessor()->reset();
  1089. }
  1090. void AudioProcessorGraph::setNonRealtime (bool isProcessingNonRealtime) noexcept
  1091. {
  1092. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1093. AudioProcessor::setNonRealtime (isProcessingNonRealtime);
  1094. for (int i = 0; i < nodes.size(); ++i)
  1095. nodes.getUnchecked(i)->getProcessor()->setNonRealtime (isProcessingNonRealtime);
  1096. }
  1097. // void AudioProcessorGraph::setPlayHead (AudioPlayHead* audioPlayHead)
  1098. // {
  1099. // const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1100. //
  1101. // AudioProcessor::setPlayHead (audioPlayHead);
  1102. //
  1103. // for (int i = 0; i < nodes.size(); ++i)
  1104. // nodes.getUnchecked(i)->getProcessor()->setPlayHead (audioPlayHead);
  1105. // }
  1106. void AudioProcessorGraph::processAudio (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1107. {
  1108. AudioSampleBuffer& renderingBuffers = audioBuffers->renderingBuffers;
  1109. AudioSampleBuffer*& currentAudioInputBuffer = audioBuffers->currentAudioInputBuffer;
  1110. AudioSampleBuffer& currentAudioOutputBuffer = audioBuffers->currentAudioOutputBuffer;
  1111. const int numSamples = buffer.getNumSamples();
  1112. currentAudioInputBuffer = &buffer;
  1113. currentAudioOutputBuffer.setSize (jmax (1, buffer.getNumChannels()), numSamples);
  1114. currentAudioOutputBuffer.clear();
  1115. currentMidiInputBuffer = &midiMessages;
  1116. currentMidiOutputBuffer.clear();
  1117. for (int i = 0; i < renderingOps.size(); ++i)
  1118. {
  1119. GraphRenderingOps::AudioGraphRenderingOpBase* const op
  1120. = (GraphRenderingOps::AudioGraphRenderingOpBase*) renderingOps.getUnchecked(i);
  1121. op->perform (renderingBuffers, midiBuffers, numSamples);
  1122. }
  1123. for (int i = 0; i < buffer.getNumChannels(); ++i)
  1124. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  1125. midiMessages.clear();
  1126. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  1127. }
  1128. bool AudioProcessorGraph::acceptsMidi() const { return true; }
  1129. bool AudioProcessorGraph::producesMidi() const { return true; }
  1130. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1131. {
  1132. processAudio (buffer, midiMessages);
  1133. }
  1134. void AudioProcessorGraph::reorderNowIfNeeded()
  1135. {
  1136. if (needsReorder)
  1137. {
  1138. needsReorder = false;
  1139. buildRenderingSequence();
  1140. }
  1141. }
  1142. //==============================================================================
  1143. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType deviceType)
  1144. : type (deviceType), graph (nullptr)
  1145. {
  1146. }
  1147. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  1148. {
  1149. }
  1150. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  1151. {
  1152. switch (type)
  1153. {
  1154. case audioOutputNode: return "Audio Output";
  1155. case audioInputNode: return "Audio Input";
  1156. case midiOutputNode: return "Midi Output";
  1157. case midiInputNode: return "Midi Input";
  1158. default: break;
  1159. }
  1160. return String();
  1161. }
  1162. #if 0
  1163. void AudioProcessorGraph::AudioGraphIOProcessor::fillInPluginDescription (PluginDescription& d) const
  1164. {
  1165. d.name = getName();
  1166. d.uid = d.name.hashCode();
  1167. d.category = "I/O devices";
  1168. d.pluginFormatName = "Internal";
  1169. d.manufacturerName = "ROLI Ltd.";
  1170. d.version = "1.0";
  1171. d.isInstrument = false;
  1172. d.numInputChannels = getTotalNumInputChannels();
  1173. if (type == audioOutputNode && graph != nullptr)
  1174. d.numInputChannels = graph->getTotalNumInputChannels();
  1175. d.numOutputChannels = getTotalNumOutputChannels();
  1176. if (type == audioInputNode && graph != nullptr)
  1177. d.numOutputChannels = graph->getTotalNumOutputChannels();
  1178. }
  1179. #endif
  1180. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  1181. {
  1182. jassert (graph != nullptr);
  1183. }
  1184. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  1185. {
  1186. }
  1187. void AudioProcessorGraph::AudioGraphIOProcessor::processAudio (AudioSampleBuffer& buffer,
  1188. MidiBuffer& midiMessages)
  1189. {
  1190. AudioSampleBuffer*& currentAudioInputBuffer =
  1191. graph->audioBuffers->currentAudioInputBuffer;
  1192. AudioSampleBuffer& currentAudioOutputBuffer =
  1193. graph->audioBuffers->currentAudioOutputBuffer;
  1194. jassert (graph != nullptr);
  1195. switch (type)
  1196. {
  1197. case audioOutputNode:
  1198. {
  1199. for (int i = jmin (currentAudioOutputBuffer.getNumChannels(),
  1200. buffer.getNumChannels()); --i >= 0;)
  1201. {
  1202. currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  1203. }
  1204. break;
  1205. }
  1206. case audioInputNode:
  1207. {
  1208. for (int i = jmin (currentAudioInputBuffer->getNumChannels(),
  1209. buffer.getNumChannels()); --i >= 0;)
  1210. {
  1211. buffer.copyFrom (i, 0, *currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1212. }
  1213. break;
  1214. }
  1215. case midiOutputNode:
  1216. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1217. break;
  1218. case midiInputNode:
  1219. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1220. break;
  1221. default:
  1222. break;
  1223. }
  1224. }
  1225. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  1226. MidiBuffer& midiMessages)
  1227. {
  1228. processAudio (buffer, midiMessages);
  1229. }
  1230. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1231. {
  1232. return type == midiOutputNode;
  1233. }
  1234. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1235. {
  1236. return type == midiInputNode;
  1237. }
  1238. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const noexcept { return type == audioInputNode || type == midiInputNode; }
  1239. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const noexcept { return type == audioOutputNode || type == midiOutputNode; }
  1240. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph)
  1241. {
  1242. graph = newGraph;
  1243. if (graph != nullptr)
  1244. {
  1245. setPlayConfigDetails (type == audioOutputNode ? graph->getTotalNumOutputChannels() : 0,
  1246. type == audioInputNode ? graph->getTotalNumInputChannels() : 0,
  1247. getSampleRate(),
  1248. getBlockSize());
  1249. }
  1250. }
  1251. }