Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1499 lines
52KB

  1. /*
  2. ==============================================================================
  3. This file is part of the Water library.
  4. Copyright (c) 2015 ROLI Ltd.
  5. Copyright (C) 2017 Filipe Coelho <falktx@falktx.com>
  6. Permission is granted to use this software under the terms of the GNU
  7. General Public License as published by the Free Software Foundation;
  8. either version 2 of the License, or any later version.
  9. This program is distributed in the hope that it will be useful, but WITHOUT
  10. ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
  11. FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. For a full copy of the GNU General Public License see the doc/GPL.txt file.
  13. ==============================================================================
  14. */
  15. #include "AudioProcessorGraph.h"
  16. #include "../containers/SortedSet.h"
  17. namespace water {
  18. const int AudioProcessorGraph::midiChannelIndex = 0x1000;
  19. //==============================================================================
  20. namespace GraphRenderingOps
  21. {
  22. struct AudioGraphRenderingOpBase
  23. {
  24. AudioGraphRenderingOpBase() noexcept {}
  25. virtual ~AudioGraphRenderingOpBase() {}
  26. virtual void perform (AudioSampleBuffer& sharedBufferChans,
  27. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  28. const int numSamples) = 0;
  29. };
  30. // use CRTP
  31. template <class Child>
  32. struct AudioGraphRenderingOp : public AudioGraphRenderingOpBase
  33. {
  34. void perform (AudioSampleBuffer& sharedBufferChans,
  35. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  36. const int numSamples) override
  37. {
  38. static_cast<Child*> (this)->perform (sharedBufferChans, sharedMidiBuffers, numSamples);
  39. }
  40. };
  41. //==============================================================================
  42. struct ClearChannelOp : public AudioGraphRenderingOp<ClearChannelOp>
  43. {
  44. ClearChannelOp (const int channel) noexcept : channelNum (channel) {}
  45. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  46. {
  47. sharedBufferChans.clear (channelNum, 0, numSamples);
  48. }
  49. const int channelNum;
  50. CARLA_DECLARE_NON_COPY_CLASS (ClearChannelOp)
  51. };
  52. //==============================================================================
  53. struct CopyChannelOp : public AudioGraphRenderingOp<CopyChannelOp>
  54. {
  55. CopyChannelOp (const int srcChan, const int dstChan) noexcept
  56. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  57. {}
  58. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  59. {
  60. sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  61. }
  62. const int srcChannelNum, dstChannelNum;
  63. CARLA_DECLARE_NON_COPY_CLASS (CopyChannelOp)
  64. };
  65. //==============================================================================
  66. struct AddChannelOp : public AudioGraphRenderingOp<AddChannelOp>
  67. {
  68. AddChannelOp (const int srcChan, const int dstChan) noexcept
  69. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  70. {}
  71. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  72. {
  73. sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  74. }
  75. const int srcChannelNum, dstChannelNum;
  76. CARLA_DECLARE_NON_COPY_CLASS (AddChannelOp)
  77. };
  78. //==============================================================================
  79. struct ClearMidiBufferOp : public AudioGraphRenderingOp<ClearMidiBufferOp>
  80. {
  81. ClearMidiBufferOp (const int buffer) noexcept : bufferNum (buffer) {}
  82. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  83. {
  84. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  85. }
  86. const int bufferNum;
  87. CARLA_DECLARE_NON_COPY_CLASS (ClearMidiBufferOp)
  88. };
  89. //==============================================================================
  90. struct CopyMidiBufferOp : public AudioGraphRenderingOp<CopyMidiBufferOp>
  91. {
  92. CopyMidiBufferOp (const int srcBuffer, const int dstBuffer) noexcept
  93. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  94. {}
  95. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  96. {
  97. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  98. }
  99. const int srcBufferNum, dstBufferNum;
  100. CARLA_DECLARE_NON_COPY_CLASS (CopyMidiBufferOp)
  101. };
  102. //==============================================================================
  103. struct AddMidiBufferOp : public AudioGraphRenderingOp<AddMidiBufferOp>
  104. {
  105. AddMidiBufferOp (const int srcBuffer, const int dstBuffer)
  106. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  107. {}
  108. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  109. {
  110. sharedMidiBuffers.getUnchecked (dstBufferNum)
  111. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  112. }
  113. const int srcBufferNum, dstBufferNum;
  114. CARLA_DECLARE_NON_COPY_CLASS (AddMidiBufferOp)
  115. };
  116. //==============================================================================
  117. struct DelayChannelOp : public AudioGraphRenderingOp<DelayChannelOp>
  118. {
  119. DelayChannelOp (const int chan, const int delaySize)
  120. : channel (chan),
  121. bufferSize (delaySize + 1),
  122. readIndex (0), writeIndex (delaySize)
  123. {
  124. buffer.calloc ((size_t) bufferSize);
  125. }
  126. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  127. {
  128. float* data = sharedBufferChans.getWritePointer (channel, 0);
  129. HeapBlock<float>& block = buffer;
  130. for (int i = numSamples; --i >= 0;)
  131. {
  132. block [writeIndex] = *data;
  133. *data++ = block [readIndex];
  134. if (++readIndex >= bufferSize) readIndex = 0;
  135. if (++writeIndex >= bufferSize) writeIndex = 0;
  136. }
  137. }
  138. private:
  139. HeapBlock<float> buffer;
  140. const int channel, bufferSize;
  141. int readIndex, writeIndex;
  142. CARLA_DECLARE_NON_COPY_CLASS (DelayChannelOp)
  143. };
  144. //==============================================================================
  145. struct ProcessBufferOp : public AudioGraphRenderingOp<ProcessBufferOp>
  146. {
  147. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& n,
  148. const Array<int>& audioChannelsUsed,
  149. const int totalNumChans,
  150. const int midiBuffer)
  151. : node (n),
  152. processor (n->getProcessor()),
  153. audioChannelsToUse (audioChannelsUsed),
  154. totalChans (jmax (1, totalNumChans)),
  155. midiBufferToUse (midiBuffer)
  156. {
  157. audioChannels.calloc ((size_t) totalChans);
  158. while (audioChannelsToUse.size() < totalChans)
  159. audioChannelsToUse.add (0);
  160. }
  161. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  162. {
  163. HeapBlock<float*>& channels = audioChannels;
  164. for (int i = totalChans; --i >= 0;)
  165. channels[i] = sharedBufferChans.getWritePointer (audioChannelsToUse.getUnchecked (i), 0);
  166. AudioSampleBuffer buffer (channels, totalChans, numSamples);
  167. if (processor->isSuspended())
  168. {
  169. buffer.clear();
  170. }
  171. else
  172. {
  173. const CarlaRecursiveMutexLocker cml (processor->getCallbackLock());
  174. callProcess (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  175. }
  176. }
  177. void callProcess (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  178. {
  179. processor->processBlock (buffer, midiMessages);
  180. }
  181. const AudioProcessorGraph::Node::Ptr node;
  182. AudioProcessor* const processor;
  183. private:
  184. Array<int> audioChannelsToUse;
  185. HeapBlock<float*> audioChannels;
  186. AudioSampleBuffer tempBuffer;
  187. const int totalChans;
  188. const int midiBufferToUse;
  189. CARLA_DECLARE_NON_COPY_CLASS (ProcessBufferOp)
  190. };
  191. //==============================================================================
  192. /** Used to calculate the correct sequence of rendering ops needed, based on
  193. the best re-use of shared buffers at each stage.
  194. */
  195. struct RenderingOpSequenceCalculator
  196. {
  197. RenderingOpSequenceCalculator (AudioProcessorGraph& g,
  198. const Array<AudioProcessorGraph::Node*>& nodes,
  199. Array<void*>& renderingOps)
  200. : graph (g),
  201. orderedNodes (nodes),
  202. totalLatency (0)
  203. {
  204. nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros
  205. channels.add (0);
  206. midiNodeIds.add ((uint32) zeroNodeID);
  207. for (int i = 0; i < orderedNodes.size(); ++i)
  208. {
  209. createRenderingOpsForNode (*orderedNodes.getUnchecked(i), renderingOps, i);
  210. markAnyUnusedBuffersAsFree (i);
  211. }
  212. graph.setLatencySamples (totalLatency);
  213. }
  214. int getNumBuffersNeeded() const noexcept { return nodeIds.size(); }
  215. int getNumMidiBuffersNeeded() const noexcept { return midiNodeIds.size(); }
  216. private:
  217. //==============================================================================
  218. AudioProcessorGraph& graph;
  219. const Array<AudioProcessorGraph::Node*>& orderedNodes;
  220. Array<int> channels;
  221. Array<uint32> nodeIds, midiNodeIds;
  222. enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe };
  223. static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; }
  224. Array<uint32> nodeDelayIDs;
  225. Array<int> nodeDelays;
  226. int totalLatency;
  227. int getNodeDelay (const uint32 nodeID) const { return nodeDelays [nodeDelayIDs.indexOf (nodeID)]; }
  228. void setNodeDelay (const uint32 nodeID, const int latency)
  229. {
  230. const int index = nodeDelayIDs.indexOf (nodeID);
  231. if (index >= 0)
  232. {
  233. nodeDelays.set (index, latency);
  234. }
  235. else
  236. {
  237. nodeDelayIDs.add (nodeID);
  238. nodeDelays.add (latency);
  239. }
  240. }
  241. int getInputLatencyForNode (const uint32 nodeID) const
  242. {
  243. int maxLatency = 0;
  244. for (int i = graph.getNumConnections(); --i >= 0;)
  245. {
  246. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  247. if (c->destNodeId == nodeID)
  248. maxLatency = jmax (maxLatency, getNodeDelay (c->sourceNodeId));
  249. }
  250. return maxLatency;
  251. }
  252. //==============================================================================
  253. void createRenderingOpsForNode (AudioProcessorGraph::Node& node,
  254. Array<void*>& renderingOps,
  255. const int ourRenderingIndex)
  256. {
  257. AudioProcessor& processor = *node.getProcessor();
  258. const int numIns = processor.getTotalNumInputChannels();
  259. const int numOuts = processor.getTotalNumOutputChannels();
  260. const int totalChans = jmax (numIns, numOuts);
  261. Array<int> audioChannelsToUse;
  262. int midiBufferToUse = -1;
  263. int maxLatency = getInputLatencyForNode (node.nodeId);
  264. for (int inputChan = 0; inputChan < numIns; ++inputChan)
  265. {
  266. // get a list of all the inputs to this node
  267. Array<uint32> sourceNodes;
  268. Array<int> sourceOutputChans;
  269. for (int i = graph.getNumConnections(); --i >= 0;)
  270. {
  271. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  272. if (c->destNodeId == node.nodeId && c->destChannelIndex == inputChan)
  273. {
  274. sourceNodes.add (c->sourceNodeId);
  275. sourceOutputChans.add (c->sourceChannelIndex);
  276. }
  277. }
  278. int bufIndex = -1;
  279. if (sourceNodes.size() == 0)
  280. {
  281. // unconnected input channel
  282. if (inputChan >= numOuts)
  283. {
  284. bufIndex = getReadOnlyEmptyBuffer();
  285. jassert (bufIndex >= 0);
  286. }
  287. else
  288. {
  289. bufIndex = getFreeBuffer (false);
  290. renderingOps.add (new ClearChannelOp (bufIndex));
  291. }
  292. }
  293. else if (sourceNodes.size() == 1)
  294. {
  295. // channel with a straightforward single input..
  296. const uint32 srcNode = sourceNodes.getUnchecked(0);
  297. const int srcChan = sourceOutputChans.getUnchecked(0);
  298. bufIndex = getBufferContaining (srcNode, srcChan);
  299. if (bufIndex < 0)
  300. {
  301. // if not found, this is probably a feedback loop
  302. bufIndex = getReadOnlyEmptyBuffer();
  303. jassert (bufIndex >= 0);
  304. }
  305. if (inputChan < numOuts
  306. && isBufferNeededLater (ourRenderingIndex,
  307. inputChan,
  308. srcNode, srcChan))
  309. {
  310. // can't mess up this channel because it's needed later by another node, so we
  311. // need to use a copy of it..
  312. const int newFreeBuffer = getFreeBuffer (false);
  313. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer));
  314. bufIndex = newFreeBuffer;
  315. }
  316. const int nodeDelay = getNodeDelay (srcNode);
  317. if (nodeDelay < maxLatency)
  318. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  319. }
  320. else
  321. {
  322. // channel with a mix of several inputs..
  323. // try to find a re-usable channel from our inputs..
  324. int reusableInputIndex = -1;
  325. for (int i = 0; i < sourceNodes.size(); ++i)
  326. {
  327. const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i),
  328. sourceOutputChans.getUnchecked(i));
  329. if (sourceBufIndex >= 0
  330. && ! isBufferNeededLater (ourRenderingIndex,
  331. inputChan,
  332. sourceNodes.getUnchecked(i),
  333. sourceOutputChans.getUnchecked(i)))
  334. {
  335. // we've found one of our input chans that can be re-used..
  336. reusableInputIndex = i;
  337. bufIndex = sourceBufIndex;
  338. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (i));
  339. if (nodeDelay < maxLatency)
  340. renderingOps.add (new DelayChannelOp (sourceBufIndex, maxLatency - nodeDelay));
  341. break;
  342. }
  343. }
  344. if (reusableInputIndex < 0)
  345. {
  346. // can't re-use any of our input chans, so get a new one and copy everything into it..
  347. bufIndex = getFreeBuffer (false);
  348. jassert (bufIndex != 0);
  349. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0),
  350. sourceOutputChans.getUnchecked (0));
  351. if (srcIndex < 0)
  352. {
  353. // if not found, this is probably a feedback loop
  354. renderingOps.add (new ClearChannelOp (bufIndex));
  355. }
  356. else
  357. {
  358. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex));
  359. }
  360. reusableInputIndex = 0;
  361. const int nodeDelay = getNodeDelay (sourceNodes.getFirst());
  362. if (nodeDelay < maxLatency)
  363. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  364. }
  365. for (int j = 0; j < sourceNodes.size(); ++j)
  366. {
  367. if (j != reusableInputIndex)
  368. {
  369. int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j),
  370. sourceOutputChans.getUnchecked(j));
  371. if (srcIndex >= 0)
  372. {
  373. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (j));
  374. if (nodeDelay < maxLatency)
  375. {
  376. if (! isBufferNeededLater (ourRenderingIndex, inputChan,
  377. sourceNodes.getUnchecked(j),
  378. sourceOutputChans.getUnchecked(j)))
  379. {
  380. renderingOps.add (new DelayChannelOp (srcIndex, maxLatency - nodeDelay));
  381. }
  382. else // buffer is reused elsewhere, can't be delayed
  383. {
  384. const int bufferToDelay = getFreeBuffer (false);
  385. renderingOps.add (new CopyChannelOp (srcIndex, bufferToDelay));
  386. renderingOps.add (new DelayChannelOp (bufferToDelay, maxLatency - nodeDelay));
  387. srcIndex = bufferToDelay;
  388. }
  389. }
  390. renderingOps.add (new AddChannelOp (srcIndex, bufIndex));
  391. }
  392. }
  393. }
  394. }
  395. jassert (bufIndex >= 0);
  396. audioChannelsToUse.add (bufIndex);
  397. if (inputChan < numOuts)
  398. markBufferAsContaining (bufIndex, node.nodeId, inputChan);
  399. }
  400. for (int outputChan = numIns; outputChan < numOuts; ++outputChan)
  401. {
  402. const int bufIndex = getFreeBuffer (false);
  403. jassert (bufIndex != 0);
  404. audioChannelsToUse.add (bufIndex);
  405. markBufferAsContaining (bufIndex, node.nodeId, outputChan);
  406. }
  407. // Now the same thing for midi..
  408. Array<uint32> midiSourceNodes;
  409. for (int i = graph.getNumConnections(); --i >= 0;)
  410. {
  411. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  412. if (c->destNodeId == node.nodeId && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex)
  413. midiSourceNodes.add (c->sourceNodeId);
  414. }
  415. if (midiSourceNodes.size() == 0)
  416. {
  417. // No midi inputs..
  418. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  419. if (processor.acceptsMidi() || processor.producesMidi())
  420. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  421. }
  422. else if (midiSourceNodes.size() == 1)
  423. {
  424. // One midi input..
  425. midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0),
  426. AudioProcessorGraph::midiChannelIndex);
  427. if (midiBufferToUse >= 0)
  428. {
  429. if (isBufferNeededLater (ourRenderingIndex,
  430. AudioProcessorGraph::midiChannelIndex,
  431. midiSourceNodes.getUnchecked(0),
  432. AudioProcessorGraph::midiChannelIndex))
  433. {
  434. // can't mess up this channel because it's needed later by another node, so we
  435. // need to use a copy of it..
  436. const int newFreeBuffer = getFreeBuffer (true);
  437. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  438. midiBufferToUse = newFreeBuffer;
  439. }
  440. }
  441. else
  442. {
  443. // probably a feedback loop, so just use an empty one..
  444. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  445. }
  446. }
  447. else
  448. {
  449. // More than one midi input being mixed..
  450. int reusableInputIndex = -1;
  451. for (int i = 0; i < midiSourceNodes.size(); ++i)
  452. {
  453. const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i),
  454. AudioProcessorGraph::midiChannelIndex);
  455. if (sourceBufIndex >= 0
  456. && ! isBufferNeededLater (ourRenderingIndex,
  457. AudioProcessorGraph::midiChannelIndex,
  458. midiSourceNodes.getUnchecked(i),
  459. AudioProcessorGraph::midiChannelIndex))
  460. {
  461. // we've found one of our input buffers that can be re-used..
  462. reusableInputIndex = i;
  463. midiBufferToUse = sourceBufIndex;
  464. break;
  465. }
  466. }
  467. if (reusableInputIndex < 0)
  468. {
  469. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  470. midiBufferToUse = getFreeBuffer (true);
  471. jassert (midiBufferToUse >= 0);
  472. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0),
  473. AudioProcessorGraph::midiChannelIndex);
  474. if (srcIndex >= 0)
  475. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  476. else
  477. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  478. reusableInputIndex = 0;
  479. }
  480. for (int j = 0; j < midiSourceNodes.size(); ++j)
  481. {
  482. if (j != reusableInputIndex)
  483. {
  484. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j),
  485. AudioProcessorGraph::midiChannelIndex);
  486. if (srcIndex >= 0)
  487. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  488. }
  489. }
  490. }
  491. if (processor.producesMidi())
  492. markBufferAsContaining (midiBufferToUse, node.nodeId,
  493. AudioProcessorGraph::midiChannelIndex);
  494. setNodeDelay (node.nodeId, maxLatency + processor.getLatencySamples());
  495. if (numOuts == 0)
  496. totalLatency = maxLatency;
  497. renderingOps.add (new ProcessBufferOp (&node, audioChannelsToUse,
  498. totalChans, midiBufferToUse));
  499. }
  500. //==============================================================================
  501. int getFreeBuffer (const bool forMidi)
  502. {
  503. if (forMidi)
  504. {
  505. for (int i = 1; i < midiNodeIds.size(); ++i)
  506. if (midiNodeIds.getUnchecked(i) == freeNodeID)
  507. return i;
  508. midiNodeIds.add ((uint32) freeNodeID);
  509. return midiNodeIds.size() - 1;
  510. }
  511. else
  512. {
  513. for (int i = 1; i < nodeIds.size(); ++i)
  514. if (nodeIds.getUnchecked(i) == freeNodeID)
  515. return i;
  516. nodeIds.add ((uint32) freeNodeID);
  517. channels.add (0);
  518. return nodeIds.size() - 1;
  519. }
  520. }
  521. int getReadOnlyEmptyBuffer() const noexcept
  522. {
  523. return 0;
  524. }
  525. int getBufferContaining (const uint32 nodeId, const int outputChannel) const noexcept
  526. {
  527. if (outputChannel == AudioProcessorGraph::midiChannelIndex)
  528. {
  529. for (int i = midiNodeIds.size(); --i >= 0;)
  530. if (midiNodeIds.getUnchecked(i) == nodeId)
  531. return i;
  532. }
  533. else
  534. {
  535. for (int i = nodeIds.size(); --i >= 0;)
  536. if (nodeIds.getUnchecked(i) == nodeId
  537. && channels.getUnchecked(i) == outputChannel)
  538. return i;
  539. }
  540. return -1;
  541. }
  542. void markAnyUnusedBuffersAsFree (const int stepIndex)
  543. {
  544. for (int i = 0; i < nodeIds.size(); ++i)
  545. {
  546. if (isNodeBusy (nodeIds.getUnchecked(i))
  547. && ! isBufferNeededLater (stepIndex, -1,
  548. nodeIds.getUnchecked(i),
  549. channels.getUnchecked(i)))
  550. {
  551. nodeIds.set (i, (uint32) freeNodeID);
  552. }
  553. }
  554. for (int i = 0; i < midiNodeIds.size(); ++i)
  555. {
  556. if (isNodeBusy (midiNodeIds.getUnchecked(i))
  557. && ! isBufferNeededLater (stepIndex, -1,
  558. midiNodeIds.getUnchecked(i),
  559. AudioProcessorGraph::midiChannelIndex))
  560. {
  561. midiNodeIds.set (i, (uint32) freeNodeID);
  562. }
  563. }
  564. }
  565. bool isBufferNeededLater (int stepIndexToSearchFrom,
  566. int inputChannelOfIndexToIgnore,
  567. const uint32 nodeId,
  568. const int outputChanIndex) const
  569. {
  570. while (stepIndexToSearchFrom < orderedNodes.size())
  571. {
  572. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  573. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  574. {
  575. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  576. && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex,
  577. node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr)
  578. return true;
  579. }
  580. else
  581. {
  582. for (int i = 0; i < node->getProcessor()->getTotalNumInputChannels(); ++i)
  583. if (i != inputChannelOfIndexToIgnore
  584. && graph.getConnectionBetween (nodeId, outputChanIndex,
  585. node->nodeId, i) != nullptr)
  586. return true;
  587. }
  588. inputChannelOfIndexToIgnore = -1;
  589. ++stepIndexToSearchFrom;
  590. }
  591. return false;
  592. }
  593. void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex)
  594. {
  595. if (outputIndex == AudioProcessorGraph::midiChannelIndex)
  596. {
  597. jassert (bufferNum > 0 && bufferNum < midiNodeIds.size());
  598. midiNodeIds.set (bufferNum, nodeId);
  599. }
  600. else
  601. {
  602. jassert (bufferNum >= 0 && bufferNum < nodeIds.size());
  603. nodeIds.set (bufferNum, nodeId);
  604. channels.set (bufferNum, outputIndex);
  605. }
  606. }
  607. CARLA_DECLARE_NON_COPY_CLASS (RenderingOpSequenceCalculator)
  608. };
  609. //==============================================================================
  610. // Holds a fast lookup table for checking which nodes are inputs to others.
  611. class ConnectionLookupTable
  612. {
  613. public:
  614. explicit ConnectionLookupTable (const OwnedArray<AudioProcessorGraph::Connection>& connections)
  615. {
  616. for (int i = 0; i < connections.size(); ++i)
  617. {
  618. const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i);
  619. int index;
  620. Entry* entry = findEntry (c->destNodeId, index);
  621. if (entry == nullptr)
  622. {
  623. entry = new Entry (c->destNodeId);
  624. entries.insert (index, entry);
  625. }
  626. entry->srcNodes.add (c->sourceNodeId);
  627. }
  628. }
  629. bool isAnInputTo (const uint32 possibleInputId,
  630. const uint32 possibleDestinationId) const noexcept
  631. {
  632. return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size());
  633. }
  634. private:
  635. //==============================================================================
  636. struct Entry
  637. {
  638. explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {}
  639. const uint32 destNodeId;
  640. SortedSet<uint32> srcNodes;
  641. CARLA_DECLARE_NON_COPY_CLASS (Entry)
  642. };
  643. OwnedArray<Entry> entries;
  644. bool isAnInputToRecursive (const uint32 possibleInputId,
  645. const uint32 possibleDestinationId,
  646. int recursionCheck) const noexcept
  647. {
  648. int index;
  649. if (const Entry* const entry = findEntry (possibleDestinationId, index))
  650. {
  651. const SortedSet<uint32>& srcNodes = entry->srcNodes;
  652. if (srcNodes.contains (possibleInputId))
  653. return true;
  654. if (--recursionCheck >= 0)
  655. {
  656. for (int i = 0; i < srcNodes.size(); ++i)
  657. if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck))
  658. return true;
  659. }
  660. }
  661. return false;
  662. }
  663. Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept
  664. {
  665. Entry* result = nullptr;
  666. int start = 0;
  667. int end = entries.size();
  668. for (;;)
  669. {
  670. if (start >= end)
  671. {
  672. break;
  673. }
  674. else if (destNodeId == entries.getUnchecked (start)->destNodeId)
  675. {
  676. result = entries.getUnchecked (start);
  677. break;
  678. }
  679. else
  680. {
  681. const int halfway = (start + end) / 2;
  682. if (halfway == start)
  683. {
  684. if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  685. ++start;
  686. break;
  687. }
  688. else if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  689. start = halfway;
  690. else
  691. end = halfway;
  692. }
  693. }
  694. insertIndex = start;
  695. return result;
  696. }
  697. CARLA_DECLARE_NON_COPY_CLASS (ConnectionLookupTable)
  698. };
  699. //==============================================================================
  700. struct ConnectionSorter
  701. {
  702. static int compareElements (const AudioProcessorGraph::Connection* const first,
  703. const AudioProcessorGraph::Connection* const second) noexcept
  704. {
  705. if (first->sourceNodeId < second->sourceNodeId) return -1;
  706. if (first->sourceNodeId > second->sourceNodeId) return 1;
  707. if (first->destNodeId < second->destNodeId) return -1;
  708. if (first->destNodeId > second->destNodeId) return 1;
  709. if (first->sourceChannelIndex < second->sourceChannelIndex) return -1;
  710. if (first->sourceChannelIndex > second->sourceChannelIndex) return 1;
  711. if (first->destChannelIndex < second->destChannelIndex) return -1;
  712. if (first->destChannelIndex > second->destChannelIndex) return 1;
  713. return 0;
  714. }
  715. };
  716. }
  717. //==============================================================================
  718. AudioProcessorGraph::Connection::Connection (const uint32 sourceID, const int sourceChannel,
  719. const uint32 destID, const int destChannel) noexcept
  720. : sourceNodeId (sourceID), sourceChannelIndex (sourceChannel),
  721. destNodeId (destID), destChannelIndex (destChannel)
  722. {
  723. }
  724. //==============================================================================
  725. AudioProcessorGraph::Node::Node (const uint32 nodeID, AudioProcessor* const p) noexcept
  726. : nodeId (nodeID), processor (p), isPrepared (false)
  727. {
  728. jassert (processor != nullptr);
  729. }
  730. void AudioProcessorGraph::Node::prepare (const double newSampleRate, const int newBlockSize,
  731. AudioProcessorGraph* const graph)
  732. {
  733. if (! isPrepared)
  734. {
  735. isPrepared = true;
  736. setParentGraph (graph);
  737. processor->setRateAndBufferSizeDetails (newSampleRate, newBlockSize);
  738. processor->prepareToPlay (newSampleRate, newBlockSize);
  739. }
  740. }
  741. void AudioProcessorGraph::Node::unprepare()
  742. {
  743. if (isPrepared)
  744. {
  745. isPrepared = false;
  746. processor->releaseResources();
  747. }
  748. }
  749. void AudioProcessorGraph::Node::setParentGraph (AudioProcessorGraph* const graph) const
  750. {
  751. if (AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  752. = dynamic_cast<AudioProcessorGraph::AudioGraphIOProcessor*> (processor.get()))
  753. ioProc->setParentGraph (graph);
  754. }
  755. //==============================================================================
  756. struct AudioProcessorGraph::AudioProcessorGraphBufferHelpers
  757. {
  758. AudioProcessorGraphBufferHelpers()
  759. {
  760. currentAudioInputBuffer = nullptr;
  761. }
  762. void setRenderingBufferSize (int newNumChannels, int newNumSamples)
  763. {
  764. renderingBuffers.setSize (newNumChannels, newNumSamples);
  765. renderingBuffers.clear();
  766. }
  767. void release()
  768. {
  769. renderingBuffers.setSize (1, 1);
  770. currentAudioInputBuffer = nullptr;
  771. currentAudioOutputBuffer.setSize (1, 1);
  772. }
  773. void prepareInOutBuffers(int newNumChannels, int newNumSamples)
  774. {
  775. currentAudioInputBuffer = nullptr;
  776. currentAudioOutputBuffer.setSize (newNumChannels, newNumSamples);
  777. }
  778. AudioSampleBuffer renderingBuffers;
  779. AudioSampleBuffer* currentAudioInputBuffer;
  780. AudioSampleBuffer currentAudioOutputBuffer;
  781. };
  782. //==============================================================================
  783. AudioProcessorGraph::AudioProcessorGraph()
  784. : lastNodeId (0), audioBuffers (new AudioProcessorGraphBufferHelpers),
  785. currentMidiInputBuffer (nullptr), isPrepared (false), needsReorder (false)
  786. {
  787. }
  788. AudioProcessorGraph::~AudioProcessorGraph()
  789. {
  790. clearRenderingSequence();
  791. clear();
  792. }
  793. const String AudioProcessorGraph::getName() const
  794. {
  795. return "Audio Graph";
  796. }
  797. //==============================================================================
  798. void AudioProcessorGraph::clear()
  799. {
  800. nodes.clear();
  801. connections.clear();
  802. needsReorder = true;
  803. }
  804. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const
  805. {
  806. for (int i = nodes.size(); --i >= 0;)
  807. if (nodes.getUnchecked(i)->nodeId == nodeId)
  808. return nodes.getUnchecked(i);
  809. return nullptr;
  810. }
  811. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, uint32 nodeId)
  812. {
  813. CARLA_SAFE_ASSERT_RETURN (newProcessor != nullptr && newProcessor != this, nullptr);
  814. for (int i = nodes.size(); --i >= 0;)
  815. {
  816. CARLA_SAFE_ASSERT_RETURN(nodes.getUnchecked(i)->getProcessor() != newProcessor, nullptr);
  817. }
  818. if (nodeId == 0)
  819. {
  820. nodeId = ++lastNodeId;
  821. }
  822. else
  823. {
  824. // you can't add a node with an id that already exists in the graph..
  825. jassert (getNodeForId (nodeId) == nullptr);
  826. removeNode (nodeId);
  827. if (nodeId > lastNodeId)
  828. lastNodeId = nodeId;
  829. }
  830. Node* const n = new Node (nodeId, newProcessor);
  831. nodes.add (n);
  832. if (isPrepared)
  833. needsReorder = true;
  834. n->setParentGraph (this);
  835. return n;
  836. }
  837. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  838. {
  839. disconnectNode (nodeId);
  840. for (int i = nodes.size(); --i >= 0;)
  841. {
  842. if (nodes.getUnchecked(i)->nodeId == nodeId)
  843. {
  844. nodes.remove (i);
  845. if (isPrepared)
  846. needsReorder = true;
  847. return true;
  848. }
  849. }
  850. return false;
  851. }
  852. bool AudioProcessorGraph::removeNode (Node* node)
  853. {
  854. CARLA_SAFE_ASSERT_RETURN(node != nullptr, false);
  855. return removeNode (node->nodeId);
  856. }
  857. //==============================================================================
  858. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId,
  859. const int sourceChannelIndex,
  860. const uint32 destNodeId,
  861. const int destChannelIndex) const
  862. {
  863. const Connection c (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex);
  864. GraphRenderingOps::ConnectionSorter sorter;
  865. return connections [connections.indexOfSorted (sorter, &c)];
  866. }
  867. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  868. const uint32 possibleDestNodeId) const
  869. {
  870. for (int i = connections.size(); --i >= 0;)
  871. {
  872. const Connection* const c = connections.getUnchecked(i);
  873. if (c->sourceNodeId == possibleSourceNodeId
  874. && c->destNodeId == possibleDestNodeId)
  875. {
  876. return true;
  877. }
  878. }
  879. return false;
  880. }
  881. bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId,
  882. const int sourceChannelIndex,
  883. const uint32 destNodeId,
  884. const int destChannelIndex) const
  885. {
  886. if (sourceChannelIndex < 0
  887. || destChannelIndex < 0
  888. || sourceNodeId == destNodeId
  889. || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  890. return false;
  891. const Node* const source = getNodeForId (sourceNodeId);
  892. if (source == nullptr
  893. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getTotalNumOutputChannels())
  894. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  895. return false;
  896. const Node* const dest = getNodeForId (destNodeId);
  897. if (dest == nullptr
  898. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getTotalNumInputChannels())
  899. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  900. return false;
  901. return getConnectionBetween (sourceNodeId, sourceChannelIndex,
  902. destNodeId, destChannelIndex) == nullptr;
  903. }
  904. bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId,
  905. const int sourceChannelIndex,
  906. const uint32 destNodeId,
  907. const int destChannelIndex)
  908. {
  909. if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  910. return false;
  911. GraphRenderingOps::ConnectionSorter sorter;
  912. connections.addSorted (sorter, new Connection (sourceNodeId, sourceChannelIndex,
  913. destNodeId, destChannelIndex));
  914. if (isPrepared)
  915. needsReorder = true;
  916. return true;
  917. }
  918. void AudioProcessorGraph::removeConnection (const int index)
  919. {
  920. connections.remove (index);
  921. if (isPrepared)
  922. needsReorder = true;
  923. }
  924. bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex,
  925. const uint32 destNodeId, const int destChannelIndex)
  926. {
  927. bool doneAnything = false;
  928. for (int i = connections.size(); --i >= 0;)
  929. {
  930. const Connection* const c = connections.getUnchecked(i);
  931. if (c->sourceNodeId == sourceNodeId
  932. && c->destNodeId == destNodeId
  933. && c->sourceChannelIndex == sourceChannelIndex
  934. && c->destChannelIndex == destChannelIndex)
  935. {
  936. removeConnection (i);
  937. doneAnything = true;
  938. }
  939. }
  940. return doneAnything;
  941. }
  942. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  943. {
  944. bool doneAnything = false;
  945. for (int i = connections.size(); --i >= 0;)
  946. {
  947. const Connection* const c = connections.getUnchecked(i);
  948. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  949. {
  950. removeConnection (i);
  951. doneAnything = true;
  952. }
  953. }
  954. return doneAnything;
  955. }
  956. bool AudioProcessorGraph::isConnectionLegal (const Connection* const c) const
  957. {
  958. jassert (c != nullptr);
  959. const Node* const source = getNodeForId (c->sourceNodeId);
  960. const Node* const dest = getNodeForId (c->destNodeId);
  961. return source != nullptr
  962. && dest != nullptr
  963. && (c->sourceChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->sourceChannelIndex, source->processor->getTotalNumOutputChannels())
  964. : source->processor->producesMidi())
  965. && (c->destChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->destChannelIndex, dest->processor->getTotalNumInputChannels())
  966. : dest->processor->acceptsMidi());
  967. }
  968. bool AudioProcessorGraph::removeIllegalConnections()
  969. {
  970. bool doneAnything = false;
  971. for (int i = connections.size(); --i >= 0;)
  972. {
  973. if (! isConnectionLegal (connections.getUnchecked(i)))
  974. {
  975. removeConnection (i);
  976. doneAnything = true;
  977. }
  978. }
  979. return doneAnything;
  980. }
  981. //==============================================================================
  982. static void deleteRenderOpArray (Array<void*>& ops)
  983. {
  984. for (int i = ops.size(); --i >= 0;)
  985. delete static_cast<GraphRenderingOps::AudioGraphRenderingOpBase*> (ops.getUnchecked(i));
  986. }
  987. void AudioProcessorGraph::clearRenderingSequence()
  988. {
  989. Array<void*> oldOps;
  990. {
  991. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  992. renderingOps.swapWith (oldOps);
  993. }
  994. deleteRenderOpArray (oldOps);
  995. }
  996. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  997. const uint32 possibleDestinationId,
  998. const int recursionCheck) const
  999. {
  1000. if (recursionCheck > 0)
  1001. {
  1002. for (int i = connections.size(); --i >= 0;)
  1003. {
  1004. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  1005. if (c->destNodeId == possibleDestinationId
  1006. && (c->sourceNodeId == possibleInputId
  1007. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  1008. return true;
  1009. }
  1010. }
  1011. return false;
  1012. }
  1013. void AudioProcessorGraph::buildRenderingSequence()
  1014. {
  1015. Array<void*> newRenderingOps;
  1016. int numRenderingBuffersNeeded = 2;
  1017. int numMidiBuffersNeeded = 1;
  1018. {
  1019. const CarlaRecursiveMutexLocker cml (reorderMutex);
  1020. Array<Node*> orderedNodes;
  1021. {
  1022. const GraphRenderingOps::ConnectionLookupTable table (connections);
  1023. for (int i = 0; i < nodes.size(); ++i)
  1024. {
  1025. Node* const node = nodes.getUnchecked(i);
  1026. node->prepare (getSampleRate(), getBlockSize(), this);
  1027. int j = 0;
  1028. for (; j < orderedNodes.size(); ++j)
  1029. if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId))
  1030. break;
  1031. orderedNodes.insert (j, node);
  1032. }
  1033. }
  1034. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  1035. numRenderingBuffersNeeded = calculator.getNumBuffersNeeded();
  1036. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  1037. }
  1038. {
  1039. // swap over to the new rendering sequence..
  1040. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1041. audioBuffers->setRenderingBufferSize (numRenderingBuffersNeeded, getBlockSize());
  1042. for (int i = midiBuffers.size(); --i >= 0;)
  1043. midiBuffers.getUnchecked(i)->clear();
  1044. while (midiBuffers.size() < numMidiBuffersNeeded)
  1045. midiBuffers.add (new MidiBuffer());
  1046. renderingOps.swapWith (newRenderingOps);
  1047. }
  1048. // delete the old ones..
  1049. deleteRenderOpArray (newRenderingOps);
  1050. }
  1051. //==============================================================================
  1052. void AudioProcessorGraph::prepareToPlay (double /*sampleRate*/, int estimatedSamplesPerBlock)
  1053. {
  1054. audioBuffers->prepareInOutBuffers (jmax (1, getTotalNumOutputChannels()), estimatedSamplesPerBlock);
  1055. currentMidiInputBuffer = nullptr;
  1056. currentMidiOutputBuffer.clear();
  1057. clearRenderingSequence();
  1058. buildRenderingSequence();
  1059. isPrepared = true;
  1060. }
  1061. void AudioProcessorGraph::releaseResources()
  1062. {
  1063. isPrepared = false;
  1064. for (int i = 0; i < nodes.size(); ++i)
  1065. nodes.getUnchecked(i)->unprepare();
  1066. audioBuffers->release();
  1067. midiBuffers.clear();
  1068. currentMidiInputBuffer = nullptr;
  1069. currentMidiOutputBuffer.clear();
  1070. }
  1071. void AudioProcessorGraph::reset()
  1072. {
  1073. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1074. for (int i = 0; i < nodes.size(); ++i)
  1075. nodes.getUnchecked(i)->getProcessor()->reset();
  1076. }
  1077. void AudioProcessorGraph::setNonRealtime (bool isProcessingNonRealtime) noexcept
  1078. {
  1079. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1080. AudioProcessor::setNonRealtime (isProcessingNonRealtime);
  1081. for (int i = 0; i < nodes.size(); ++i)
  1082. nodes.getUnchecked(i)->getProcessor()->setNonRealtime (isProcessingNonRealtime);
  1083. }
  1084. void AudioProcessorGraph::processAudio (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1085. {
  1086. AudioSampleBuffer& renderingBuffers = audioBuffers->renderingBuffers;
  1087. AudioSampleBuffer*& currentAudioInputBuffer = audioBuffers->currentAudioInputBuffer;
  1088. AudioSampleBuffer& currentAudioOutputBuffer = audioBuffers->currentAudioOutputBuffer;
  1089. const int numSamples = buffer.getNumSamples();
  1090. currentAudioInputBuffer = &buffer;
  1091. currentAudioOutputBuffer.setSize (jmax (1, buffer.getNumChannels()), numSamples);
  1092. currentAudioOutputBuffer.clear();
  1093. currentMidiInputBuffer = &midiMessages;
  1094. currentMidiOutputBuffer.clear();
  1095. for (int i = 0; i < renderingOps.size(); ++i)
  1096. {
  1097. GraphRenderingOps::AudioGraphRenderingOpBase* const op
  1098. = (GraphRenderingOps::AudioGraphRenderingOpBase*) renderingOps.getUnchecked(i);
  1099. op->perform (renderingBuffers, midiBuffers, numSamples);
  1100. }
  1101. for (int i = 0; i < buffer.getNumChannels(); ++i)
  1102. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  1103. midiMessages.clear();
  1104. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  1105. }
  1106. bool AudioProcessorGraph::acceptsMidi() const { return true; }
  1107. bool AudioProcessorGraph::producesMidi() const { return true; }
  1108. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1109. {
  1110. processAudio (buffer, midiMessages);
  1111. }
  1112. void AudioProcessorGraph::reorderNowIfNeeded()
  1113. {
  1114. if (needsReorder)
  1115. {
  1116. needsReorder = false;
  1117. buildRenderingSequence();
  1118. }
  1119. }
  1120. //==============================================================================
  1121. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType deviceType)
  1122. : type (deviceType), graph (nullptr)
  1123. {
  1124. }
  1125. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  1126. {
  1127. }
  1128. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  1129. {
  1130. switch (type)
  1131. {
  1132. case audioOutputNode: return "Audio Output";
  1133. case audioInputNode: return "Audio Input";
  1134. case midiOutputNode: return "Midi Output";
  1135. case midiInputNode: return "Midi Input";
  1136. default: break;
  1137. }
  1138. return String();
  1139. }
  1140. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  1141. {
  1142. CARLA_SAFE_ASSERT (graph != nullptr);
  1143. }
  1144. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  1145. {
  1146. }
  1147. void AudioProcessorGraph::AudioGraphIOProcessor::processAudio (AudioSampleBuffer& buffer,
  1148. MidiBuffer& midiMessages)
  1149. {
  1150. AudioSampleBuffer*& currentAudioInputBuffer =
  1151. graph->audioBuffers->currentAudioInputBuffer;
  1152. AudioSampleBuffer& currentAudioOutputBuffer =
  1153. graph->audioBuffers->currentAudioOutputBuffer;
  1154. jassert (graph != nullptr);
  1155. switch (type)
  1156. {
  1157. case audioOutputNode:
  1158. {
  1159. for (int i = jmin (currentAudioOutputBuffer.getNumChannels(),
  1160. buffer.getNumChannels()); --i >= 0;)
  1161. {
  1162. currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  1163. }
  1164. break;
  1165. }
  1166. case audioInputNode:
  1167. {
  1168. for (int i = jmin (currentAudioInputBuffer->getNumChannels(),
  1169. buffer.getNumChannels()); --i >= 0;)
  1170. {
  1171. buffer.copyFrom (i, 0, *currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1172. }
  1173. break;
  1174. }
  1175. case midiOutputNode:
  1176. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1177. break;
  1178. case midiInputNode:
  1179. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1180. break;
  1181. default:
  1182. break;
  1183. }
  1184. }
  1185. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  1186. MidiBuffer& midiMessages)
  1187. {
  1188. processAudio (buffer, midiMessages);
  1189. }
  1190. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1191. {
  1192. return type == midiOutputNode;
  1193. }
  1194. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1195. {
  1196. return type == midiInputNode;
  1197. }
  1198. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const noexcept { return type == audioInputNode || type == midiInputNode; }
  1199. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const noexcept { return type == audioOutputNode || type == midiOutputNode; }
  1200. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph)
  1201. {
  1202. graph = newGraph;
  1203. if (graph != nullptr)
  1204. {
  1205. setPlayConfigDetails (type == audioOutputNode ? graph->getTotalNumOutputChannels() : 0,
  1206. type == audioInputNode ? graph->getTotalNumInputChannels() : 0,
  1207. getSampleRate(),
  1208. getBlockSize());
  1209. }
  1210. }
  1211. }