Audio plugin host https://kx.studio/carla
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1508 lines
52KB

  1. /*
  2. ==============================================================================
  3. This file is part of the Water library.
  4. Copyright (c) 2015 ROLI Ltd.
  5. Copyright (C) 2017-2018 Filipe Coelho <falktx@falktx.com>
  6. Permission is granted to use this software under the terms of the GNU
  7. General Public License as published by the Free Software Foundation;
  8. either version 2 of the License, or any later version.
  9. This program is distributed in the hope that it will be useful, but WITHOUT
  10. ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
  11. FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  12. For a full copy of the GNU General Public License see the doc/GPL.txt file.
  13. ==============================================================================
  14. */
  15. #include "AudioProcessorGraph.h"
  16. #include "../containers/SortedSet.h"
  17. namespace water {
  18. const int AudioProcessorGraph::midiChannelIndex = 0x1000;
  19. //==============================================================================
  20. namespace GraphRenderingOps
  21. {
  22. struct AudioGraphRenderingOpBase
  23. {
  24. AudioGraphRenderingOpBase() noexcept {}
  25. virtual ~AudioGraphRenderingOpBase() {}
  26. virtual void perform (AudioSampleBuffer& sharedBufferChans,
  27. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  28. const int numSamples) = 0;
  29. };
  30. // use CRTP
  31. template <class Child>
  32. struct AudioGraphRenderingOp : public AudioGraphRenderingOpBase
  33. {
  34. void perform (AudioSampleBuffer& sharedBufferChans,
  35. const OwnedArray<MidiBuffer>& sharedMidiBuffers,
  36. const int numSamples) override
  37. {
  38. static_cast<Child*> (this)->perform (sharedBufferChans, sharedMidiBuffers, numSamples);
  39. }
  40. };
  41. //==============================================================================
  42. struct ClearChannelOp : public AudioGraphRenderingOp<ClearChannelOp>
  43. {
  44. ClearChannelOp (const int channel) noexcept : channelNum (channel) {}
  45. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  46. {
  47. sharedBufferChans.clear (channelNum, 0, numSamples);
  48. }
  49. const int channelNum;
  50. CARLA_DECLARE_NON_COPY_CLASS (ClearChannelOp)
  51. };
  52. //==============================================================================
  53. struct CopyChannelOp : public AudioGraphRenderingOp<CopyChannelOp>
  54. {
  55. CopyChannelOp (const int srcChan, const int dstChan) noexcept
  56. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  57. {}
  58. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  59. {
  60. sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  61. }
  62. const int srcChannelNum, dstChannelNum;
  63. CARLA_DECLARE_NON_COPY_CLASS (CopyChannelOp)
  64. };
  65. //==============================================================================
  66. struct AddChannelOp : public AudioGraphRenderingOp<AddChannelOp>
  67. {
  68. AddChannelOp (const int srcChan, const int dstChan) noexcept
  69. : srcChannelNum (srcChan), dstChannelNum (dstChan)
  70. {}
  71. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  72. {
  73. sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  74. }
  75. const int srcChannelNum, dstChannelNum;
  76. CARLA_DECLARE_NON_COPY_CLASS (AddChannelOp)
  77. };
  78. //==============================================================================
  79. struct ClearMidiBufferOp : public AudioGraphRenderingOp<ClearMidiBufferOp>
  80. {
  81. ClearMidiBufferOp (const int buffer) noexcept : bufferNum (buffer) {}
  82. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  83. {
  84. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  85. }
  86. const int bufferNum;
  87. CARLA_DECLARE_NON_COPY_CLASS (ClearMidiBufferOp)
  88. };
  89. //==============================================================================
  90. struct CopyMidiBufferOp : public AudioGraphRenderingOp<CopyMidiBufferOp>
  91. {
  92. CopyMidiBufferOp (const int srcBuffer, const int dstBuffer) noexcept
  93. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  94. {}
  95. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int)
  96. {
  97. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  98. }
  99. const int srcBufferNum, dstBufferNum;
  100. CARLA_DECLARE_NON_COPY_CLASS (CopyMidiBufferOp)
  101. };
  102. //==============================================================================
  103. struct AddMidiBufferOp : public AudioGraphRenderingOp<AddMidiBufferOp>
  104. {
  105. AddMidiBufferOp (const int srcBuffer, const int dstBuffer)
  106. : srcBufferNum (srcBuffer), dstBufferNum (dstBuffer)
  107. {}
  108. void perform (AudioSampleBuffer&, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  109. {
  110. sharedMidiBuffers.getUnchecked (dstBufferNum)
  111. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  112. }
  113. const int srcBufferNum, dstBufferNum;
  114. CARLA_DECLARE_NON_COPY_CLASS (AddMidiBufferOp)
  115. };
  116. //==============================================================================
  117. struct DelayChannelOp : public AudioGraphRenderingOp<DelayChannelOp>
  118. {
  119. DelayChannelOp (const int chan, const int delaySize)
  120. : channel (chan),
  121. bufferSize (delaySize + 1),
  122. readIndex (0), writeIndex (delaySize)
  123. {
  124. buffer.calloc ((size_t) bufferSize);
  125. }
  126. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>&, const int numSamples)
  127. {
  128. float* data = sharedBufferChans.getWritePointer (channel, 0);
  129. HeapBlock<float>& block = buffer;
  130. for (int i = numSamples; --i >= 0;)
  131. {
  132. block [writeIndex] = *data;
  133. *data++ = block [readIndex];
  134. if (++readIndex >= bufferSize) readIndex = 0;
  135. if (++writeIndex >= bufferSize) writeIndex = 0;
  136. }
  137. }
  138. private:
  139. HeapBlock<float> buffer;
  140. const int channel, bufferSize;
  141. int readIndex, writeIndex;
  142. CARLA_DECLARE_NON_COPY_CLASS (DelayChannelOp)
  143. };
  144. //==============================================================================
  145. struct ProcessBufferOp : public AudioGraphRenderingOp<ProcessBufferOp>
  146. {
  147. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& n,
  148. const Array<int>& audioChannelsUsed,
  149. const int totalNumChans,
  150. const int midiBuffer)
  151. : node (n),
  152. processor (n->getProcessor()),
  153. audioChannelsToUse (audioChannelsUsed),
  154. totalChans (jmax (1, totalNumChans)),
  155. midiBufferToUse (midiBuffer)
  156. {
  157. audioChannels.calloc ((size_t) totalChans);
  158. while (audioChannelsToUse.size() < totalChans)
  159. audioChannelsToUse.add (0);
  160. }
  161. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray<MidiBuffer>& sharedMidiBuffers, const int numSamples)
  162. {
  163. HeapBlock<float*>& channels = audioChannels;
  164. for (int i = totalChans; --i >= 0;)
  165. channels[i] = sharedBufferChans.getWritePointer (audioChannelsToUse.getUnchecked (i), 0);
  166. AudioSampleBuffer buffer (channels, totalChans, numSamples);
  167. if (processor->isSuspended())
  168. {
  169. buffer.clear();
  170. }
  171. else
  172. {
  173. const CarlaRecursiveMutexLocker cml (processor->getCallbackLock());
  174. callProcess (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  175. }
  176. }
  177. void callProcess (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  178. {
  179. processor->processBlock (buffer, midiMessages);
  180. }
  181. const AudioProcessorGraph::Node::Ptr node;
  182. AudioProcessor* const processor;
  183. private:
  184. Array<int> audioChannelsToUse;
  185. HeapBlock<float*> audioChannels;
  186. AudioSampleBuffer tempBuffer;
  187. const int totalChans;
  188. const int midiBufferToUse;
  189. CARLA_DECLARE_NON_COPY_CLASS (ProcessBufferOp)
  190. };
  191. //==============================================================================
  192. /** Used to calculate the correct sequence of rendering ops needed, based on
  193. the best re-use of shared buffers at each stage.
  194. */
  195. struct RenderingOpSequenceCalculator
  196. {
  197. RenderingOpSequenceCalculator (AudioProcessorGraph& g,
  198. const Array<AudioProcessorGraph::Node*>& nodes,
  199. Array<void*>& renderingOps)
  200. : graph (g),
  201. orderedNodes (nodes),
  202. totalLatency (0)
  203. {
  204. nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros
  205. channels.add (0);
  206. midiNodeIds.add ((uint32) zeroNodeID);
  207. for (int i = 0; i < orderedNodes.size(); ++i)
  208. {
  209. createRenderingOpsForNode (*orderedNodes.getUnchecked(i), renderingOps, i);
  210. markAnyUnusedBuffersAsFree (i);
  211. }
  212. graph.setLatencySamples (totalLatency);
  213. }
  214. int getNumBuffersNeeded() const noexcept { return nodeIds.size(); }
  215. int getNumMidiBuffersNeeded() const noexcept { return midiNodeIds.size(); }
  216. private:
  217. //==============================================================================
  218. AudioProcessorGraph& graph;
  219. const Array<AudioProcessorGraph::Node*>& orderedNodes;
  220. Array<int> channels;
  221. Array<uint32> nodeIds, midiNodeIds;
  222. enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe };
  223. static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; }
  224. Array<uint32> nodeDelayIDs;
  225. Array<int> nodeDelays;
  226. int totalLatency;
  227. int getNodeDelay (const uint32 nodeID) const { return nodeDelays [nodeDelayIDs.indexOf (nodeID)]; }
  228. void setNodeDelay (const uint32 nodeID, const int latency)
  229. {
  230. const int index = nodeDelayIDs.indexOf (nodeID);
  231. if (index >= 0)
  232. {
  233. nodeDelays.set (index, latency);
  234. }
  235. else
  236. {
  237. nodeDelayIDs.add (nodeID);
  238. nodeDelays.add (latency);
  239. }
  240. }
  241. int getInputLatencyForNode (const uint32 nodeID) const
  242. {
  243. int maxLatency = 0;
  244. for (int i = graph.getNumConnections(); --i >= 0;)
  245. {
  246. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  247. if (c->destNodeId == nodeID)
  248. maxLatency = jmax (maxLatency, getNodeDelay (c->sourceNodeId));
  249. }
  250. return maxLatency;
  251. }
  252. //==============================================================================
  253. void createRenderingOpsForNode (AudioProcessorGraph::Node& node,
  254. Array<void*>& renderingOps,
  255. const int ourRenderingIndex)
  256. {
  257. AudioProcessor& processor = *node.getProcessor();
  258. const int numIns = processor.getTotalNumInputChannels();
  259. const int numOuts = processor.getTotalNumOutputChannels();
  260. const int totalChans = jmax (numIns, numOuts);
  261. Array<int> audioChannelsToUse;
  262. int midiBufferToUse = -1;
  263. int maxLatency = getInputLatencyForNode (node.nodeId);
  264. for (int inputChan = 0; inputChan < numIns; ++inputChan)
  265. {
  266. // get a list of all the inputs to this node
  267. Array<uint32> sourceNodes;
  268. Array<int> sourceOutputChans;
  269. for (int i = graph.getNumConnections(); --i >= 0;)
  270. {
  271. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  272. if (c->destNodeId == node.nodeId && c->destChannelIndex == inputChan)
  273. {
  274. sourceNodes.add (c->sourceNodeId);
  275. sourceOutputChans.add (c->sourceChannelIndex);
  276. }
  277. }
  278. int bufIndex = -1;
  279. if (sourceNodes.size() == 0)
  280. {
  281. // unconnected input channel
  282. if (inputChan >= numOuts)
  283. {
  284. bufIndex = getReadOnlyEmptyBuffer();
  285. wassert (bufIndex >= 0);
  286. }
  287. else
  288. {
  289. bufIndex = getFreeBuffer (false);
  290. renderingOps.add (new ClearChannelOp (bufIndex));
  291. }
  292. }
  293. else if (sourceNodes.size() == 1)
  294. {
  295. // channel with a straightforward single input..
  296. const uint32 srcNode = sourceNodes.getUnchecked(0);
  297. const int srcChan = sourceOutputChans.getUnchecked(0);
  298. bufIndex = getBufferContaining (srcNode, srcChan);
  299. if (bufIndex < 0)
  300. {
  301. // if not found, this is probably a feedback loop
  302. bufIndex = getReadOnlyEmptyBuffer();
  303. wassert (bufIndex >= 0);
  304. }
  305. if (inputChan < numOuts
  306. && isBufferNeededLater (ourRenderingIndex,
  307. inputChan,
  308. srcNode, srcChan))
  309. {
  310. // can't mess up this channel because it's needed later by another node, so we
  311. // need to use a copy of it..
  312. const int newFreeBuffer = getFreeBuffer (false);
  313. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer));
  314. bufIndex = newFreeBuffer;
  315. }
  316. const int nodeDelay = getNodeDelay (srcNode);
  317. if (nodeDelay < maxLatency)
  318. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  319. }
  320. else
  321. {
  322. // channel with a mix of several inputs..
  323. // try to find a re-usable channel from our inputs..
  324. int reusableInputIndex = -1;
  325. for (int i = 0; i < sourceNodes.size(); ++i)
  326. {
  327. const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i),
  328. sourceOutputChans.getUnchecked(i));
  329. if (sourceBufIndex >= 0
  330. && ! isBufferNeededLater (ourRenderingIndex,
  331. inputChan,
  332. sourceNodes.getUnchecked(i),
  333. sourceOutputChans.getUnchecked(i)))
  334. {
  335. // we've found one of our input chans that can be re-used..
  336. reusableInputIndex = i;
  337. bufIndex = sourceBufIndex;
  338. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (i));
  339. if (nodeDelay < maxLatency)
  340. renderingOps.add (new DelayChannelOp (sourceBufIndex, maxLatency - nodeDelay));
  341. break;
  342. }
  343. }
  344. if (reusableInputIndex < 0)
  345. {
  346. // can't re-use any of our input chans, so get a new one and copy everything into it..
  347. bufIndex = getFreeBuffer (false);
  348. wassert (bufIndex != 0);
  349. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0),
  350. sourceOutputChans.getUnchecked (0));
  351. if (srcIndex < 0)
  352. {
  353. // if not found, this is probably a feedback loop
  354. renderingOps.add (new ClearChannelOp (bufIndex));
  355. }
  356. else
  357. {
  358. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex));
  359. }
  360. reusableInputIndex = 0;
  361. const int nodeDelay = getNodeDelay (sourceNodes.getFirst());
  362. if (nodeDelay < maxLatency)
  363. renderingOps.add (new DelayChannelOp (bufIndex, maxLatency - nodeDelay));
  364. }
  365. for (int j = 0; j < sourceNodes.size(); ++j)
  366. {
  367. if (j != reusableInputIndex)
  368. {
  369. int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j),
  370. sourceOutputChans.getUnchecked(j));
  371. if (srcIndex >= 0)
  372. {
  373. const int nodeDelay = getNodeDelay (sourceNodes.getUnchecked (j));
  374. if (nodeDelay < maxLatency)
  375. {
  376. if (! isBufferNeededLater (ourRenderingIndex, inputChan,
  377. sourceNodes.getUnchecked(j),
  378. sourceOutputChans.getUnchecked(j)))
  379. {
  380. renderingOps.add (new DelayChannelOp (srcIndex, maxLatency - nodeDelay));
  381. }
  382. else // buffer is reused elsewhere, can't be delayed
  383. {
  384. const int bufferToDelay = getFreeBuffer (false);
  385. renderingOps.add (new CopyChannelOp (srcIndex, bufferToDelay));
  386. renderingOps.add (new DelayChannelOp (bufferToDelay, maxLatency - nodeDelay));
  387. srcIndex = bufferToDelay;
  388. }
  389. }
  390. renderingOps.add (new AddChannelOp (srcIndex, bufIndex));
  391. }
  392. }
  393. }
  394. }
  395. wassert (bufIndex >= 0);
  396. audioChannelsToUse.add (bufIndex);
  397. if (inputChan < numOuts)
  398. markBufferAsContaining (bufIndex, node.nodeId, inputChan);
  399. }
  400. for (int outputChan = numIns; outputChan < numOuts; ++outputChan)
  401. {
  402. const int bufIndex = getFreeBuffer (false);
  403. wassert (bufIndex != 0);
  404. audioChannelsToUse.add (bufIndex);
  405. markBufferAsContaining (bufIndex, node.nodeId, outputChan);
  406. }
  407. // Now the same thing for midi..
  408. Array<uint32> midiSourceNodes;
  409. for (int i = graph.getNumConnections(); --i >= 0;)
  410. {
  411. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  412. if (c->destNodeId == node.nodeId && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex)
  413. midiSourceNodes.add (c->sourceNodeId);
  414. }
  415. if (midiSourceNodes.size() == 0)
  416. {
  417. // No midi inputs..
  418. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  419. if (processor.acceptsMidi() || processor.producesMidi())
  420. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  421. }
  422. else if (midiSourceNodes.size() == 1)
  423. {
  424. // One midi input..
  425. midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0),
  426. AudioProcessorGraph::midiChannelIndex);
  427. if (midiBufferToUse >= 0)
  428. {
  429. if (isBufferNeededLater (ourRenderingIndex,
  430. AudioProcessorGraph::midiChannelIndex,
  431. midiSourceNodes.getUnchecked(0),
  432. AudioProcessorGraph::midiChannelIndex))
  433. {
  434. // can't mess up this channel because it's needed later by another node, so we
  435. // need to use a copy of it..
  436. const int newFreeBuffer = getFreeBuffer (true);
  437. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  438. midiBufferToUse = newFreeBuffer;
  439. }
  440. }
  441. else
  442. {
  443. // probably a feedback loop, so just use an empty one..
  444. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  445. }
  446. }
  447. else
  448. {
  449. // More than one midi input being mixed..
  450. int reusableInputIndex = -1;
  451. for (int i = 0; i < midiSourceNodes.size(); ++i)
  452. {
  453. const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i),
  454. AudioProcessorGraph::midiChannelIndex);
  455. if (sourceBufIndex >= 0
  456. && ! isBufferNeededLater (ourRenderingIndex,
  457. AudioProcessorGraph::midiChannelIndex,
  458. midiSourceNodes.getUnchecked(i),
  459. AudioProcessorGraph::midiChannelIndex))
  460. {
  461. // we've found one of our input buffers that can be re-used..
  462. reusableInputIndex = i;
  463. midiBufferToUse = sourceBufIndex;
  464. break;
  465. }
  466. }
  467. if (reusableInputIndex < 0)
  468. {
  469. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  470. midiBufferToUse = getFreeBuffer (true);
  471. wassert (midiBufferToUse >= 0);
  472. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0),
  473. AudioProcessorGraph::midiChannelIndex);
  474. if (srcIndex >= 0)
  475. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  476. else
  477. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  478. reusableInputIndex = 0;
  479. }
  480. for (int j = 0; j < midiSourceNodes.size(); ++j)
  481. {
  482. if (j != reusableInputIndex)
  483. {
  484. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j),
  485. AudioProcessorGraph::midiChannelIndex);
  486. if (srcIndex >= 0)
  487. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  488. }
  489. }
  490. }
  491. if (processor.producesMidi())
  492. markBufferAsContaining (midiBufferToUse, node.nodeId,
  493. AudioProcessorGraph::midiChannelIndex);
  494. setNodeDelay (node.nodeId, maxLatency + processor.getLatencySamples());
  495. if (numOuts == 0)
  496. totalLatency = maxLatency;
  497. renderingOps.add (new ProcessBufferOp (&node, audioChannelsToUse,
  498. totalChans, midiBufferToUse));
  499. }
  500. //==============================================================================
  501. int getFreeBuffer (const bool forMidi)
  502. {
  503. if (forMidi)
  504. {
  505. for (int i = 1; i < midiNodeIds.size(); ++i)
  506. if (midiNodeIds.getUnchecked(i) == freeNodeID)
  507. return i;
  508. midiNodeIds.add ((uint32) freeNodeID);
  509. return midiNodeIds.size() - 1;
  510. }
  511. else
  512. {
  513. for (int i = 1; i < nodeIds.size(); ++i)
  514. if (nodeIds.getUnchecked(i) == freeNodeID)
  515. return i;
  516. nodeIds.add ((uint32) freeNodeID);
  517. channels.add (0);
  518. return nodeIds.size() - 1;
  519. }
  520. }
  521. int getReadOnlyEmptyBuffer() const noexcept
  522. {
  523. return 0;
  524. }
  525. int getBufferContaining (const uint32 nodeId, const int outputChannel) const noexcept
  526. {
  527. if (outputChannel == AudioProcessorGraph::midiChannelIndex)
  528. {
  529. for (int i = midiNodeIds.size(); --i >= 0;)
  530. if (midiNodeIds.getUnchecked(i) == nodeId)
  531. return i;
  532. }
  533. else
  534. {
  535. for (int i = nodeIds.size(); --i >= 0;)
  536. if (nodeIds.getUnchecked(i) == nodeId
  537. && channels.getUnchecked(i) == outputChannel)
  538. return i;
  539. }
  540. return -1;
  541. }
  542. void markAnyUnusedBuffersAsFree (const int stepIndex)
  543. {
  544. for (int i = 0; i < nodeIds.size(); ++i)
  545. {
  546. if (isNodeBusy (nodeIds.getUnchecked(i))
  547. && ! isBufferNeededLater (stepIndex, -1,
  548. nodeIds.getUnchecked(i),
  549. channels.getUnchecked(i)))
  550. {
  551. nodeIds.set (i, (uint32) freeNodeID);
  552. }
  553. }
  554. for (int i = 0; i < midiNodeIds.size(); ++i)
  555. {
  556. if (isNodeBusy (midiNodeIds.getUnchecked(i))
  557. && ! isBufferNeededLater (stepIndex, -1,
  558. midiNodeIds.getUnchecked(i),
  559. AudioProcessorGraph::midiChannelIndex))
  560. {
  561. midiNodeIds.set (i, (uint32) freeNodeID);
  562. }
  563. }
  564. }
  565. bool isBufferNeededLater (int stepIndexToSearchFrom,
  566. int inputChannelOfIndexToIgnore,
  567. const uint32 nodeId,
  568. const int outputChanIndex) const
  569. {
  570. while (stepIndexToSearchFrom < orderedNodes.size())
  571. {
  572. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  573. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  574. {
  575. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  576. && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex,
  577. node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr)
  578. return true;
  579. }
  580. else
  581. {
  582. for (int i = 0; i < node->getProcessor()->getTotalNumInputChannels(); ++i)
  583. if (i != inputChannelOfIndexToIgnore
  584. && graph.getConnectionBetween (nodeId, outputChanIndex,
  585. node->nodeId, i) != nullptr)
  586. return true;
  587. }
  588. inputChannelOfIndexToIgnore = -1;
  589. ++stepIndexToSearchFrom;
  590. }
  591. return false;
  592. }
  593. void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex)
  594. {
  595. if (outputIndex == AudioProcessorGraph::midiChannelIndex)
  596. {
  597. wassert (bufferNum > 0 && bufferNum < midiNodeIds.size());
  598. midiNodeIds.set (bufferNum, nodeId);
  599. }
  600. else
  601. {
  602. wassert (bufferNum >= 0 && bufferNum < nodeIds.size());
  603. nodeIds.set (bufferNum, nodeId);
  604. channels.set (bufferNum, outputIndex);
  605. }
  606. }
  607. CARLA_DECLARE_NON_COPY_CLASS (RenderingOpSequenceCalculator)
  608. };
  609. //==============================================================================
  610. // Holds a fast lookup table for checking which nodes are inputs to others.
  611. class ConnectionLookupTable
  612. {
  613. public:
  614. explicit ConnectionLookupTable (const OwnedArray<AudioProcessorGraph::Connection>& connections)
  615. {
  616. for (int i = 0; i < static_cast<int>(connections.size()); ++i)
  617. {
  618. const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i);
  619. int index;
  620. Entry* entry = findEntry (c->destNodeId, index);
  621. if (entry == nullptr)
  622. {
  623. entry = new Entry (c->destNodeId);
  624. entries.insert (index, entry);
  625. }
  626. entry->srcNodes.add (c->sourceNodeId);
  627. }
  628. }
  629. bool isAnInputTo (const uint32 possibleInputId,
  630. const uint32 possibleDestinationId) const noexcept
  631. {
  632. return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size());
  633. }
  634. private:
  635. //==============================================================================
  636. struct Entry
  637. {
  638. explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {}
  639. const uint32 destNodeId;
  640. SortedSet<uint32> srcNodes;
  641. CARLA_DECLARE_NON_COPY_CLASS (Entry)
  642. };
  643. OwnedArray<Entry> entries;
  644. bool isAnInputToRecursive (const uint32 possibleInputId,
  645. const uint32 possibleDestinationId,
  646. int recursionCheck) const noexcept
  647. {
  648. int index;
  649. if (const Entry* const entry = findEntry (possibleDestinationId, index))
  650. {
  651. const SortedSet<uint32>& srcNodes = entry->srcNodes;
  652. if (srcNodes.contains (possibleInputId))
  653. return true;
  654. if (--recursionCheck >= 0)
  655. {
  656. for (int i = 0; i < srcNodes.size(); ++i)
  657. if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck))
  658. return true;
  659. }
  660. }
  661. return false;
  662. }
  663. Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept
  664. {
  665. Entry* result = nullptr;
  666. int start = 0;
  667. int end = entries.size();
  668. for (;;)
  669. {
  670. if (start >= end)
  671. {
  672. break;
  673. }
  674. else if (destNodeId == entries.getUnchecked (start)->destNodeId)
  675. {
  676. result = entries.getUnchecked (start);
  677. break;
  678. }
  679. else
  680. {
  681. const int halfway = (start + end) / 2;
  682. if (halfway == start)
  683. {
  684. if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  685. ++start;
  686. break;
  687. }
  688. else if (destNodeId >= entries.getUnchecked (halfway)->destNodeId)
  689. start = halfway;
  690. else
  691. end = halfway;
  692. }
  693. }
  694. insertIndex = start;
  695. return result;
  696. }
  697. CARLA_DECLARE_NON_COPY_CLASS (ConnectionLookupTable)
  698. };
  699. //==============================================================================
  700. struct ConnectionSorter
  701. {
  702. static int compareElements (const AudioProcessorGraph::Connection* const first,
  703. const AudioProcessorGraph::Connection* const second) noexcept
  704. {
  705. if (first->sourceNodeId < second->sourceNodeId) return -1;
  706. if (first->sourceNodeId > second->sourceNodeId) return 1;
  707. if (first->destNodeId < second->destNodeId) return -1;
  708. if (first->destNodeId > second->destNodeId) return 1;
  709. if (first->sourceChannelIndex < second->sourceChannelIndex) return -1;
  710. if (first->sourceChannelIndex > second->sourceChannelIndex) return 1;
  711. if (first->destChannelIndex < second->destChannelIndex) return -1;
  712. if (first->destChannelIndex > second->destChannelIndex) return 1;
  713. return 0;
  714. }
  715. };
  716. }
  717. //==============================================================================
  718. AudioProcessorGraph::Connection::Connection (const uint32 sourceID, const int sourceChannel,
  719. const uint32 destID, const int destChannel) noexcept
  720. : sourceNodeId (sourceID), sourceChannelIndex (sourceChannel),
  721. destNodeId (destID), destChannelIndex (destChannel)
  722. {
  723. }
  724. //==============================================================================
  725. AudioProcessorGraph::Node::Node (const uint32 nodeID, AudioProcessor* const p) noexcept
  726. : nodeId (nodeID), processor (p), isPrepared (false)
  727. {
  728. wassert (processor != nullptr);
  729. }
  730. void AudioProcessorGraph::Node::prepare (const double newSampleRate, const int newBlockSize,
  731. AudioProcessorGraph* const graph)
  732. {
  733. if (! isPrepared)
  734. {
  735. isPrepared = true;
  736. setParentGraph (graph);
  737. processor->setRateAndBufferSizeDetails (newSampleRate, newBlockSize);
  738. processor->prepareToPlay (newSampleRate, newBlockSize);
  739. }
  740. }
  741. void AudioProcessorGraph::Node::unprepare()
  742. {
  743. if (isPrepared)
  744. {
  745. isPrepared = false;
  746. processor->releaseResources();
  747. }
  748. }
  749. void AudioProcessorGraph::Node::setParentGraph (AudioProcessorGraph* const graph) const
  750. {
  751. if (AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  752. = dynamic_cast<AudioProcessorGraph::AudioGraphIOProcessor*> (processor.get()))
  753. ioProc->setParentGraph (graph);
  754. }
  755. //==============================================================================
  756. struct AudioProcessorGraph::AudioProcessorGraphBufferHelpers
  757. {
  758. AudioProcessorGraphBufferHelpers() noexcept
  759. : currentAudioInputBuffer (nullptr) {}
  760. void setRenderingBufferSize (int newNumChannels, int newNumSamples) noexcept
  761. {
  762. renderingBuffers.setSize (newNumChannels, newNumSamples);
  763. renderingBuffers.clear();
  764. }
  765. void release() noexcept
  766. {
  767. renderingBuffers.setSize (1, 1);
  768. currentAudioInputBuffer = nullptr;
  769. currentAudioOutputBuffer.setSize (1, 1);
  770. }
  771. void prepareInOutBuffers(int newNumChannels, int newNumSamples) noexcept
  772. {
  773. currentAudioInputBuffer = nullptr;
  774. currentAudioOutputBuffer.setSize (newNumChannels, newNumSamples);
  775. }
  776. AudioSampleBuffer renderingBuffers;
  777. AudioSampleBuffer* currentAudioInputBuffer;
  778. AudioSampleBuffer currentAudioOutputBuffer;
  779. };
  780. //==============================================================================
  781. AudioProcessorGraph::AudioProcessorGraph()
  782. : lastNodeId (0), audioBuffers (new AudioProcessorGraphBufferHelpers),
  783. currentMidiInputBuffer (nullptr), isPrepared (false), needsReorder (false)
  784. {
  785. }
  786. AudioProcessorGraph::~AudioProcessorGraph()
  787. {
  788. clearRenderingSequence();
  789. clear();
  790. }
  791. const String AudioProcessorGraph::getName() const
  792. {
  793. return "Audio Graph";
  794. }
  795. //==============================================================================
  796. void AudioProcessorGraph::clear()
  797. {
  798. nodes.clear();
  799. connections.clear();
  800. needsReorder = true;
  801. }
  802. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const
  803. {
  804. for (int i = nodes.size(); --i >= 0;)
  805. if (nodes.getUnchecked(i)->nodeId == nodeId)
  806. return nodes.getUnchecked(i);
  807. return nullptr;
  808. }
  809. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, uint32 nodeId)
  810. {
  811. CARLA_SAFE_ASSERT_RETURN (newProcessor != nullptr && newProcessor != this, nullptr);
  812. for (int i = nodes.size(); --i >= 0;)
  813. {
  814. CARLA_SAFE_ASSERT_RETURN(nodes.getUnchecked(i)->getProcessor() != newProcessor, nullptr);
  815. }
  816. if (nodeId == 0)
  817. {
  818. nodeId = ++lastNodeId;
  819. }
  820. else
  821. {
  822. // you can't add a node with an id that already exists in the graph..
  823. wassert (getNodeForId (nodeId) == nullptr);
  824. removeNode (nodeId);
  825. if (nodeId > lastNodeId)
  826. lastNodeId = nodeId;
  827. }
  828. Node* const n = new Node (nodeId, newProcessor);
  829. nodes.add (n);
  830. if (isPrepared)
  831. needsReorder = true;
  832. n->setParentGraph (this);
  833. return n;
  834. }
  835. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  836. {
  837. disconnectNode (nodeId);
  838. for (int i = nodes.size(); --i >= 0;)
  839. {
  840. if (nodes.getUnchecked(i)->nodeId == nodeId)
  841. {
  842. nodes.remove (i);
  843. if (isPrepared)
  844. needsReorder = true;
  845. return true;
  846. }
  847. }
  848. return false;
  849. }
  850. bool AudioProcessorGraph::removeNode (Node* node)
  851. {
  852. CARLA_SAFE_ASSERT_RETURN(node != nullptr, false);
  853. return removeNode (node->nodeId);
  854. }
  855. //==============================================================================
  856. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId,
  857. const int sourceChannelIndex,
  858. const uint32 destNodeId,
  859. const int destChannelIndex) const
  860. {
  861. const Connection c (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex);
  862. GraphRenderingOps::ConnectionSorter sorter;
  863. return connections [connections.indexOfSorted (sorter, &c)];
  864. }
  865. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  866. const uint32 possibleDestNodeId) const
  867. {
  868. for (int i = connections.size(); --i >= 0;)
  869. {
  870. const Connection* const c = connections.getUnchecked(i);
  871. if (c->sourceNodeId == possibleSourceNodeId
  872. && c->destNodeId == possibleDestNodeId)
  873. {
  874. return true;
  875. }
  876. }
  877. return false;
  878. }
  879. bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId,
  880. const int sourceChannelIndex,
  881. const uint32 destNodeId,
  882. const int destChannelIndex) const
  883. {
  884. if (sourceChannelIndex < 0
  885. || destChannelIndex < 0
  886. || sourceNodeId == destNodeId
  887. || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  888. return false;
  889. const Node* const source = getNodeForId (sourceNodeId);
  890. if (source == nullptr
  891. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getTotalNumOutputChannels())
  892. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  893. return false;
  894. const Node* const dest = getNodeForId (destNodeId);
  895. if (dest == nullptr
  896. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getTotalNumInputChannels())
  897. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  898. return false;
  899. return getConnectionBetween (sourceNodeId, sourceChannelIndex,
  900. destNodeId, destChannelIndex) == nullptr;
  901. }
  902. bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId,
  903. const int sourceChannelIndex,
  904. const uint32 destNodeId,
  905. const int destChannelIndex)
  906. {
  907. if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  908. return false;
  909. GraphRenderingOps::ConnectionSorter sorter;
  910. connections.addSorted (sorter, new Connection (sourceNodeId, sourceChannelIndex,
  911. destNodeId, destChannelIndex));
  912. if (isPrepared)
  913. needsReorder = true;
  914. return true;
  915. }
  916. void AudioProcessorGraph::removeConnection (const int index)
  917. {
  918. connections.remove (index);
  919. if (isPrepared)
  920. needsReorder = true;
  921. }
  922. bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex,
  923. const uint32 destNodeId, const int destChannelIndex)
  924. {
  925. bool doneAnything = false;
  926. for (int i = connections.size(); --i >= 0;)
  927. {
  928. const Connection* const c = connections.getUnchecked(i);
  929. if (c->sourceNodeId == sourceNodeId
  930. && c->destNodeId == destNodeId
  931. && c->sourceChannelIndex == sourceChannelIndex
  932. && c->destChannelIndex == destChannelIndex)
  933. {
  934. removeConnection (i);
  935. doneAnything = true;
  936. }
  937. }
  938. return doneAnything;
  939. }
  940. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  941. {
  942. bool doneAnything = false;
  943. for (int i = connections.size(); --i >= 0;)
  944. {
  945. const Connection* const c = connections.getUnchecked(i);
  946. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  947. {
  948. removeConnection (i);
  949. doneAnything = true;
  950. }
  951. }
  952. return doneAnything;
  953. }
  954. bool AudioProcessorGraph::isConnectionLegal (const Connection* const c) const
  955. {
  956. wassert (c != nullptr);
  957. const Node* const source = getNodeForId (c->sourceNodeId);
  958. const Node* const dest = getNodeForId (c->destNodeId);
  959. return source != nullptr
  960. && dest != nullptr
  961. && (c->sourceChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->sourceChannelIndex, source->processor->getTotalNumOutputChannels())
  962. : source->processor->producesMidi())
  963. && (c->destChannelIndex != midiChannelIndex ? isPositiveAndBelow (c->destChannelIndex, dest->processor->getTotalNumInputChannels())
  964. : dest->processor->acceptsMidi());
  965. }
  966. bool AudioProcessorGraph::removeIllegalConnections()
  967. {
  968. bool doneAnything = false;
  969. for (int i = connections.size(); --i >= 0;)
  970. {
  971. if (! isConnectionLegal (connections.getUnchecked(i)))
  972. {
  973. removeConnection (i);
  974. doneAnything = true;
  975. }
  976. }
  977. return doneAnything;
  978. }
  979. //==============================================================================
  980. static void deleteRenderOpArray (Array<void*>& ops)
  981. {
  982. for (int i = ops.size(); --i >= 0;)
  983. delete static_cast<GraphRenderingOps::AudioGraphRenderingOpBase*> (ops.getUnchecked(i));
  984. }
  985. void AudioProcessorGraph::clearRenderingSequence()
  986. {
  987. Array<void*> oldOps;
  988. {
  989. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  990. renderingOps.swapWith (oldOps);
  991. }
  992. deleteRenderOpArray (oldOps);
  993. }
  994. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  995. const uint32 possibleDestinationId,
  996. const int recursionCheck) const
  997. {
  998. if (recursionCheck > 0)
  999. {
  1000. for (int i = connections.size(); --i >= 0;)
  1001. {
  1002. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  1003. if (c->destNodeId == possibleDestinationId
  1004. && (c->sourceNodeId == possibleInputId
  1005. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  1006. return true;
  1007. }
  1008. }
  1009. return false;
  1010. }
  1011. void AudioProcessorGraph::buildRenderingSequence()
  1012. {
  1013. Array<void*> newRenderingOps;
  1014. int numRenderingBuffersNeeded = 2;
  1015. int numMidiBuffersNeeded = 1;
  1016. {
  1017. const CarlaRecursiveMutexLocker cml (reorderMutex);
  1018. Array<Node*> orderedNodes;
  1019. {
  1020. const GraphRenderingOps::ConnectionLookupTable table (connections);
  1021. for (int i = 0; i < nodes.size(); ++i)
  1022. {
  1023. Node* const node = nodes.getUnchecked(i);
  1024. node->prepare (getSampleRate(), getBlockSize(), this);
  1025. int j = 0;
  1026. for (; j < orderedNodes.size(); ++j)
  1027. if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId))
  1028. break;
  1029. orderedNodes.insert (j, node);
  1030. }
  1031. }
  1032. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  1033. numRenderingBuffersNeeded = calculator.getNumBuffersNeeded();
  1034. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  1035. }
  1036. {
  1037. // swap over to the new rendering sequence..
  1038. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1039. audioBuffers->setRenderingBufferSize (numRenderingBuffersNeeded, getBlockSize());
  1040. for (int i = static_cast<int>(midiBuffers.size()); --i >= 0;)
  1041. midiBuffers.getUnchecked(i)->clear();
  1042. while (static_cast<int>(midiBuffers.size()) < numMidiBuffersNeeded)
  1043. midiBuffers.add (new MidiBuffer());
  1044. renderingOps.swapWith (newRenderingOps);
  1045. }
  1046. // delete the old ones..
  1047. deleteRenderOpArray (newRenderingOps);
  1048. }
  1049. //==============================================================================
  1050. void AudioProcessorGraph::prepareToPlay (double sampleRate, int estimatedSamplesPerBlock)
  1051. {
  1052. setRateAndBufferSizeDetails(sampleRate, estimatedSamplesPerBlock);
  1053. audioBuffers->prepareInOutBuffers(jmax(1, getTotalNumOutputChannels()), estimatedSamplesPerBlock);
  1054. currentMidiInputBuffer = nullptr;
  1055. currentMidiOutputBuffer.clear();
  1056. clearRenderingSequence();
  1057. buildRenderingSequence();
  1058. isPrepared = true;
  1059. }
  1060. void AudioProcessorGraph::releaseResources()
  1061. {
  1062. isPrepared = false;
  1063. for (int i = 0; i < nodes.size(); ++i)
  1064. nodes.getUnchecked(i)->unprepare();
  1065. audioBuffers->release();
  1066. midiBuffers.clear();
  1067. currentMidiInputBuffer = nullptr;
  1068. currentMidiOutputBuffer.clear();
  1069. }
  1070. void AudioProcessorGraph::reset()
  1071. {
  1072. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1073. for (int i = 0; i < nodes.size(); ++i)
  1074. nodes.getUnchecked(i)->getProcessor()->reset();
  1075. }
  1076. void AudioProcessorGraph::setNonRealtime (bool isProcessingNonRealtime) noexcept
  1077. {
  1078. const CarlaRecursiveMutexLocker cml (getCallbackLock());
  1079. AudioProcessor::setNonRealtime (isProcessingNonRealtime);
  1080. for (int i = 0; i < nodes.size(); ++i)
  1081. nodes.getUnchecked(i)->getProcessor()->setNonRealtime (isProcessingNonRealtime);
  1082. }
  1083. void AudioProcessorGraph::processAudio (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1084. {
  1085. AudioSampleBuffer*& currentAudioInputBuffer = audioBuffers->currentAudioInputBuffer;
  1086. AudioSampleBuffer& currentAudioOutputBuffer = audioBuffers->currentAudioOutputBuffer;
  1087. AudioSampleBuffer& renderingBuffers = audioBuffers->renderingBuffers;
  1088. const int numSamples = buffer.getNumSamples();
  1089. if (! audioBuffers->currentAudioOutputBuffer.setSizeRT(numSamples))
  1090. return;
  1091. if (! audioBuffers->renderingBuffers.setSizeRT(numSamples))
  1092. return;
  1093. currentAudioInputBuffer = &buffer;
  1094. currentAudioOutputBuffer.clear();
  1095. currentMidiInputBuffer = &midiMessages;
  1096. currentMidiOutputBuffer.clear();
  1097. for (int i = 0; i < renderingOps.size(); ++i)
  1098. {
  1099. GraphRenderingOps::AudioGraphRenderingOpBase* const op
  1100. = (GraphRenderingOps::AudioGraphRenderingOpBase*) renderingOps.getUnchecked(i);
  1101. op->perform (renderingBuffers, midiBuffers, numSamples);
  1102. }
  1103. for (uint32_t i = 0; i < buffer.getNumChannels(); ++i)
  1104. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  1105. midiMessages.clear();
  1106. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  1107. }
  1108. bool AudioProcessorGraph::acceptsMidi() const { return true; }
  1109. bool AudioProcessorGraph::producesMidi() const { return true; }
  1110. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  1111. {
  1112. processAudio (buffer, midiMessages);
  1113. }
  1114. void AudioProcessorGraph::reorderNowIfNeeded()
  1115. {
  1116. if (needsReorder)
  1117. {
  1118. needsReorder = false;
  1119. buildRenderingSequence();
  1120. }
  1121. }
  1122. const CarlaRecursiveMutex& AudioProcessorGraph::getReorderMutex() const
  1123. {
  1124. return reorderMutex;
  1125. }
  1126. //==============================================================================
  1127. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType deviceType)
  1128. : type (deviceType), graph (nullptr)
  1129. {
  1130. }
  1131. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  1132. {
  1133. }
  1134. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  1135. {
  1136. switch (type)
  1137. {
  1138. case audioOutputNode: return "Audio Output";
  1139. case audioInputNode: return "Audio Input";
  1140. case midiOutputNode: return "Midi Output";
  1141. case midiInputNode: return "Midi Input";
  1142. default: break;
  1143. }
  1144. return String();
  1145. }
  1146. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  1147. {
  1148. CARLA_SAFE_ASSERT (graph != nullptr);
  1149. }
  1150. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  1151. {
  1152. }
  1153. void AudioProcessorGraph::AudioGraphIOProcessor::processAudio (AudioSampleBuffer& buffer,
  1154. MidiBuffer& midiMessages)
  1155. {
  1156. CARLA_SAFE_ASSERT_RETURN(graph != nullptr,);
  1157. AudioSampleBuffer*& currentAudioInputBuffer =
  1158. graph->audioBuffers->currentAudioInputBuffer;
  1159. AudioSampleBuffer& currentAudioOutputBuffer =
  1160. graph->audioBuffers->currentAudioOutputBuffer;
  1161. switch (type)
  1162. {
  1163. case audioOutputNode:
  1164. {
  1165. for (int i = jmin (currentAudioOutputBuffer.getNumChannels(),
  1166. buffer.getNumChannels()); --i >= 0;)
  1167. {
  1168. currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  1169. }
  1170. break;
  1171. }
  1172. case audioInputNode:
  1173. {
  1174. for (int i = jmin (currentAudioInputBuffer->getNumChannels(),
  1175. buffer.getNumChannels()); --i >= 0;)
  1176. {
  1177. buffer.copyFrom (i, 0, *currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1178. }
  1179. break;
  1180. }
  1181. case midiOutputNode:
  1182. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1183. break;
  1184. case midiInputNode:
  1185. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1186. break;
  1187. default:
  1188. break;
  1189. }
  1190. }
  1191. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  1192. MidiBuffer& midiMessages)
  1193. {
  1194. processAudio (buffer, midiMessages);
  1195. }
  1196. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1197. {
  1198. return type == midiOutputNode;
  1199. }
  1200. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1201. {
  1202. return type == midiInputNode;
  1203. }
  1204. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const noexcept { return type == audioInputNode || type == midiInputNode; }
  1205. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const noexcept { return type == audioOutputNode || type == midiOutputNode; }
  1206. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph)
  1207. {
  1208. graph = newGraph;
  1209. if (graph != nullptr)
  1210. {
  1211. setPlayConfigDetails (type == audioOutputNode ? graph->getTotalNumOutputChannels() : 0,
  1212. type == audioInputNode ? graph->getTotalNumInputChannels() : 0,
  1213. getSampleRate(),
  1214. getBlockSize());
  1215. }
  1216. }
  1217. }