The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1358 lines
44KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-9 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. #include "../../core/juce_StandardHeader.h"
  19. BEGIN_JUCE_NAMESPACE
  20. #include "juce_AudioProcessorGraph.h"
  21. #include "../../events/juce_MessageManager.h"
  22. const int AudioProcessorGraph::midiChannelIndex = 0x1000;
  23. //==============================================================================
  24. AudioProcessorGraph::Node::Node (const uint32 id_, AudioProcessor* const processor_)
  25. : id (id_),
  26. processor (processor_),
  27. isPrepared (false)
  28. {
  29. jassert (processor_ != 0);
  30. }
  31. AudioProcessorGraph::Node::~Node()
  32. {
  33. delete processor;
  34. }
  35. void AudioProcessorGraph::Node::prepare (const double sampleRate, const int blockSize,
  36. AudioProcessorGraph* const graph)
  37. {
  38. if (! isPrepared)
  39. {
  40. isPrepared = true;
  41. AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  42. = dynamic_cast <AudioProcessorGraph::AudioGraphIOProcessor*> (processor);
  43. if (ioProc != 0)
  44. ioProc->setParentGraph (graph);
  45. processor->setPlayConfigDetails (processor->getNumInputChannels(),
  46. processor->getNumOutputChannels(),
  47. sampleRate, blockSize);
  48. processor->prepareToPlay (sampleRate, blockSize);
  49. }
  50. }
  51. void AudioProcessorGraph::Node::unprepare()
  52. {
  53. if (isPrepared)
  54. {
  55. isPrepared = false;
  56. processor->releaseResources();
  57. }
  58. }
  59. //==============================================================================
  60. AudioProcessorGraph::AudioProcessorGraph()
  61. : lastNodeId (0),
  62. renderingBuffers (1, 1),
  63. currentAudioOutputBuffer (1, 1)
  64. {
  65. }
  66. AudioProcessorGraph::~AudioProcessorGraph()
  67. {
  68. clearRenderingSequence();
  69. clear();
  70. }
  71. const String AudioProcessorGraph::getName() const
  72. {
  73. return "Audio Graph";
  74. }
  75. //==============================================================================
  76. void AudioProcessorGraph::clear()
  77. {
  78. nodes.clear();
  79. connections.clear();
  80. triggerAsyncUpdate();
  81. }
  82. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const
  83. {
  84. for (int i = nodes.size(); --i >= 0;)
  85. if (nodes.getUnchecked(i)->id == nodeId)
  86. return nodes.getUnchecked(i);
  87. return 0;
  88. }
  89. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor,
  90. uint32 nodeId)
  91. {
  92. if (newProcessor == 0)
  93. {
  94. jassertfalse
  95. return 0;
  96. }
  97. if (nodeId == 0)
  98. {
  99. nodeId = ++lastNodeId;
  100. }
  101. else
  102. {
  103. // you can't add a node with an id that already exists in the graph..
  104. jassert (getNodeForId (nodeId) == 0);
  105. removeNode (nodeId);
  106. }
  107. lastNodeId = nodeId;
  108. Node* const n = new Node (nodeId, newProcessor);
  109. nodes.add (n);
  110. triggerAsyncUpdate();
  111. AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  112. = dynamic_cast <AudioProcessorGraph::AudioGraphIOProcessor*> (n->processor);
  113. if (ioProc != 0)
  114. ioProc->setParentGraph (this);
  115. return n;
  116. }
  117. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  118. {
  119. disconnectNode (nodeId);
  120. for (int i = nodes.size(); --i >= 0;)
  121. {
  122. if (nodes.getUnchecked(i)->id == nodeId)
  123. {
  124. AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  125. = dynamic_cast <AudioProcessorGraph::AudioGraphIOProcessor*> (nodes.getUnchecked(i)->processor);
  126. if (ioProc != 0)
  127. ioProc->setParentGraph (0);
  128. nodes.remove (i);
  129. triggerAsyncUpdate();
  130. return true;
  131. }
  132. }
  133. return false;
  134. }
  135. //==============================================================================
  136. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId,
  137. const int sourceChannelIndex,
  138. const uint32 destNodeId,
  139. const int destChannelIndex) const
  140. {
  141. for (int i = connections.size(); --i >= 0;)
  142. {
  143. const Connection* const c = connections.getUnchecked(i);
  144. if (c->sourceNodeId == sourceNodeId
  145. && c->destNodeId == destNodeId
  146. && c->sourceChannelIndex == sourceChannelIndex
  147. && c->destChannelIndex == destChannelIndex)
  148. {
  149. return c;
  150. }
  151. }
  152. return 0;
  153. }
  154. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  155. const uint32 possibleDestNodeId) const
  156. {
  157. for (int i = connections.size(); --i >= 0;)
  158. {
  159. const Connection* const c = connections.getUnchecked(i);
  160. if (c->sourceNodeId == possibleSourceNodeId
  161. && c->destNodeId == possibleDestNodeId)
  162. {
  163. return true;
  164. }
  165. }
  166. return false;
  167. }
  168. bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId,
  169. const int sourceChannelIndex,
  170. const uint32 destNodeId,
  171. const int destChannelIndex) const
  172. {
  173. if (sourceChannelIndex < 0
  174. || destChannelIndex < 0
  175. || sourceNodeId == destNodeId
  176. || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  177. return false;
  178. const Node* const source = getNodeForId (sourceNodeId);
  179. if (source == 0
  180. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getNumOutputChannels())
  181. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  182. return false;
  183. const Node* const dest = getNodeForId (destNodeId);
  184. if (dest == 0
  185. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getNumInputChannels())
  186. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  187. return false;
  188. return getConnectionBetween (sourceNodeId, sourceChannelIndex,
  189. destNodeId, destChannelIndex) == 0;
  190. }
  191. bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId,
  192. const int sourceChannelIndex,
  193. const uint32 destNodeId,
  194. const int destChannelIndex)
  195. {
  196. if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  197. return false;
  198. Connection* const c = new Connection();
  199. c->sourceNodeId = sourceNodeId;
  200. c->sourceChannelIndex = sourceChannelIndex;
  201. c->destNodeId = destNodeId;
  202. c->destChannelIndex = destChannelIndex;
  203. connections.add (c);
  204. triggerAsyncUpdate();
  205. return true;
  206. }
  207. void AudioProcessorGraph::removeConnection (const int index)
  208. {
  209. connections.remove (index);
  210. triggerAsyncUpdate();
  211. }
  212. bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex,
  213. const uint32 destNodeId, const int destChannelIndex)
  214. {
  215. bool doneAnything = false;
  216. for (int i = connections.size(); --i >= 0;)
  217. {
  218. const Connection* const c = connections.getUnchecked(i);
  219. if (c->sourceNodeId == sourceNodeId
  220. && c->destNodeId == destNodeId
  221. && c->sourceChannelIndex == sourceChannelIndex
  222. && c->destChannelIndex == destChannelIndex)
  223. {
  224. removeConnection (i);
  225. doneAnything = true;
  226. triggerAsyncUpdate();
  227. }
  228. }
  229. return doneAnything;
  230. }
  231. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  232. {
  233. bool doneAnything = false;
  234. for (int i = connections.size(); --i >= 0;)
  235. {
  236. const Connection* const c = connections.getUnchecked(i);
  237. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  238. {
  239. removeConnection (i);
  240. doneAnything = true;
  241. triggerAsyncUpdate();
  242. }
  243. }
  244. return doneAnything;
  245. }
  246. bool AudioProcessorGraph::removeIllegalConnections()
  247. {
  248. bool doneAnything = false;
  249. for (int i = connections.size(); --i >= 0;)
  250. {
  251. const Connection* const c = connections.getUnchecked(i);
  252. const Node* const source = getNodeForId (c->sourceNodeId);
  253. const Node* const dest = getNodeForId (c->destNodeId);
  254. if (source == 0 || dest == 0
  255. || (c->sourceChannelIndex != midiChannelIndex
  256. && (((unsigned int) c->sourceChannelIndex) >= (unsigned int) source->processor->getNumOutputChannels()))
  257. || (c->sourceChannelIndex == midiChannelIndex
  258. && ! source->processor->producesMidi())
  259. || (c->destChannelIndex != midiChannelIndex
  260. && (((unsigned int) c->destChannelIndex) >= (unsigned int) dest->processor->getNumInputChannels()))
  261. || (c->destChannelIndex == midiChannelIndex
  262. && ! dest->processor->acceptsMidi()))
  263. {
  264. removeConnection (i);
  265. doneAnything = true;
  266. triggerAsyncUpdate();
  267. }
  268. }
  269. return doneAnything;
  270. }
  271. //==============================================================================
  272. namespace GraphRenderingOps
  273. {
  274. //==============================================================================
  275. class AudioGraphRenderingOp
  276. {
  277. public:
  278. AudioGraphRenderingOp() {}
  279. virtual ~AudioGraphRenderingOp() {}
  280. virtual void perform (AudioSampleBuffer& sharedBufferChans,
  281. const OwnedArray <MidiBuffer>& sharedMidiBuffers,
  282. const int numSamples) = 0;
  283. juce_UseDebuggingNewOperator
  284. };
  285. //==============================================================================
  286. class ClearChannelOp : public AudioGraphRenderingOp
  287. {
  288. public:
  289. ClearChannelOp (const int channelNum_)
  290. : channelNum (channelNum_)
  291. {}
  292. ~ClearChannelOp() {}
  293. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>&, const int numSamples)
  294. {
  295. sharedBufferChans.clear (channelNum, 0, numSamples);
  296. }
  297. private:
  298. const int channelNum;
  299. ClearChannelOp (const ClearChannelOp&);
  300. ClearChannelOp& operator= (const ClearChannelOp&);
  301. };
  302. //==============================================================================
  303. class CopyChannelOp : public AudioGraphRenderingOp
  304. {
  305. public:
  306. CopyChannelOp (const int srcChannelNum_, const int dstChannelNum_)
  307. : srcChannelNum (srcChannelNum_),
  308. dstChannelNum (dstChannelNum_)
  309. {}
  310. ~CopyChannelOp() {}
  311. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>&, const int numSamples)
  312. {
  313. sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  314. }
  315. private:
  316. const int srcChannelNum, dstChannelNum;
  317. CopyChannelOp (const CopyChannelOp&);
  318. CopyChannelOp& operator= (const CopyChannelOp&);
  319. };
  320. //==============================================================================
  321. class AddChannelOp : public AudioGraphRenderingOp
  322. {
  323. public:
  324. AddChannelOp (const int srcChannelNum_, const int dstChannelNum_)
  325. : srcChannelNum (srcChannelNum_),
  326. dstChannelNum (dstChannelNum_)
  327. {}
  328. ~AddChannelOp() {}
  329. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>&, const int numSamples)
  330. {
  331. sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  332. }
  333. private:
  334. const int srcChannelNum, dstChannelNum;
  335. AddChannelOp (const AddChannelOp&);
  336. AddChannelOp& operator= (const AddChannelOp&);
  337. };
  338. //==============================================================================
  339. class ClearMidiBufferOp : public AudioGraphRenderingOp
  340. {
  341. public:
  342. ClearMidiBufferOp (const int bufferNum_)
  343. : bufferNum (bufferNum_)
  344. {}
  345. ~ClearMidiBufferOp() {}
  346. void perform (AudioSampleBuffer&, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int)
  347. {
  348. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  349. }
  350. private:
  351. const int bufferNum;
  352. ClearMidiBufferOp (const ClearMidiBufferOp&);
  353. ClearMidiBufferOp& operator= (const ClearMidiBufferOp&);
  354. };
  355. //==============================================================================
  356. class CopyMidiBufferOp : public AudioGraphRenderingOp
  357. {
  358. public:
  359. CopyMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_)
  360. : srcBufferNum (srcBufferNum_),
  361. dstBufferNum (dstBufferNum_)
  362. {}
  363. ~CopyMidiBufferOp() {}
  364. void perform (AudioSampleBuffer&, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int)
  365. {
  366. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  367. }
  368. private:
  369. const int srcBufferNum, dstBufferNum;
  370. CopyMidiBufferOp (const CopyMidiBufferOp&);
  371. CopyMidiBufferOp& operator= (const CopyMidiBufferOp&);
  372. };
  373. //==============================================================================
  374. class AddMidiBufferOp : public AudioGraphRenderingOp
  375. {
  376. public:
  377. AddMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_)
  378. : srcBufferNum (srcBufferNum_),
  379. dstBufferNum (dstBufferNum_)
  380. {}
  381. ~AddMidiBufferOp() {}
  382. void perform (AudioSampleBuffer&, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int numSamples)
  383. {
  384. sharedMidiBuffers.getUnchecked (dstBufferNum)
  385. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  386. }
  387. private:
  388. const int srcBufferNum, dstBufferNum;
  389. AddMidiBufferOp (const AddMidiBufferOp&);
  390. AddMidiBufferOp& operator= (const AddMidiBufferOp&);
  391. };
  392. //==============================================================================
  393. class ProcessBufferOp : public AudioGraphRenderingOp
  394. {
  395. public:
  396. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& node_,
  397. const Array <int>& audioChannelsToUse_,
  398. const int totalChans_,
  399. const int midiBufferToUse_)
  400. : node (node_),
  401. processor (node_->processor),
  402. audioChannelsToUse (audioChannelsToUse_),
  403. totalChans (jmax (1, totalChans_)),
  404. midiBufferToUse (midiBufferToUse_)
  405. {
  406. channels.calloc (totalChans);
  407. while (audioChannelsToUse.size() < totalChans)
  408. audioChannelsToUse.add (0);
  409. }
  410. ~ProcessBufferOp()
  411. {
  412. }
  413. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int numSamples)
  414. {
  415. for (int i = totalChans; --i >= 0;)
  416. channels[i] = sharedBufferChans.getSampleData (audioChannelsToUse.getUnchecked (i), 0);
  417. AudioSampleBuffer buffer (channels, totalChans, numSamples);
  418. processor->processBlock (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  419. }
  420. const AudioProcessorGraph::Node::Ptr node;
  421. AudioProcessor* const processor;
  422. private:
  423. Array <int> audioChannelsToUse;
  424. HeapBlock <float*> channels;
  425. int totalChans;
  426. int midiBufferToUse;
  427. ProcessBufferOp (const ProcessBufferOp&);
  428. ProcessBufferOp& operator= (const ProcessBufferOp&);
  429. };
  430. //==============================================================================
  431. /** Used to calculate the correct sequence of rendering ops needed, based on
  432. the best re-use of shared buffers at each stage.
  433. */
  434. class RenderingOpSequenceCalculator
  435. {
  436. public:
  437. //==============================================================================
  438. RenderingOpSequenceCalculator (AudioProcessorGraph& graph_,
  439. const VoidArray& orderedNodes_,
  440. VoidArray& renderingOps)
  441. : graph (graph_),
  442. orderedNodes (orderedNodes_)
  443. {
  444. nodeIds.add (-2); // first buffer is read-only zeros
  445. channels.add (0);
  446. midiNodeIds.add (-2);
  447. for (int i = 0; i < orderedNodes.size(); ++i)
  448. {
  449. createRenderingOpsForNode ((AudioProcessorGraph::Node*) orderedNodes.getUnchecked(i),
  450. renderingOps, i);
  451. markAnyUnusedBuffersAsFree (i);
  452. }
  453. }
  454. int getNumBuffersNeeded() const { return nodeIds.size(); }
  455. int getNumMidiBuffersNeeded() const { return midiNodeIds.size(); }
  456. //==============================================================================
  457. juce_UseDebuggingNewOperator
  458. private:
  459. AudioProcessorGraph& graph;
  460. const VoidArray& orderedNodes;
  461. Array <int> nodeIds, channels, midiNodeIds;
  462. //==============================================================================
  463. void createRenderingOpsForNode (AudioProcessorGraph::Node* const node,
  464. VoidArray& renderingOps,
  465. const int ourRenderingIndex)
  466. {
  467. const int numIns = node->processor->getNumInputChannels();
  468. const int numOuts = node->processor->getNumOutputChannels();
  469. const int totalChans = jmax (numIns, numOuts);
  470. Array <int> audioChannelsToUse;
  471. int midiBufferToUse = -1;
  472. for (int inputChan = 0; inputChan < numIns; ++inputChan)
  473. {
  474. // get a list of all the inputs to this node
  475. Array <int> sourceNodes, sourceOutputChans;
  476. for (int i = graph.getNumConnections(); --i >= 0;)
  477. {
  478. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  479. if (c->destNodeId == node->id && c->destChannelIndex == inputChan)
  480. {
  481. sourceNodes.add (c->sourceNodeId);
  482. sourceOutputChans.add (c->sourceChannelIndex);
  483. }
  484. }
  485. int bufIndex = -1;
  486. if (sourceNodes.size() == 0)
  487. {
  488. // unconnected input channel
  489. if (inputChan >= numOuts)
  490. {
  491. bufIndex = getReadOnlyEmptyBuffer();
  492. jassert (bufIndex >= 0);
  493. }
  494. else
  495. {
  496. bufIndex = getFreeBuffer (false);
  497. renderingOps.add (new ClearChannelOp (bufIndex));
  498. }
  499. }
  500. else if (sourceNodes.size() == 1)
  501. {
  502. // channel with a straightforward single input..
  503. const int srcNode = sourceNodes.getUnchecked(0);
  504. const int srcChan = sourceOutputChans.getUnchecked(0);
  505. bufIndex = getBufferContaining (srcNode, srcChan);
  506. if (bufIndex < 0)
  507. {
  508. // if not found, this is probably a feedback loop
  509. bufIndex = getReadOnlyEmptyBuffer();
  510. jassert (bufIndex >= 0);
  511. }
  512. if (inputChan < numOuts
  513. && isBufferNeededLater (ourRenderingIndex,
  514. inputChan,
  515. srcNode, srcChan))
  516. {
  517. // can't mess up this channel because it's needed later by another node, so we
  518. // need to use a copy of it..
  519. const int newFreeBuffer = getFreeBuffer (false);
  520. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer));
  521. bufIndex = newFreeBuffer;
  522. }
  523. }
  524. else
  525. {
  526. // channel with a mix of several inputs..
  527. // try to find a re-usable channel from our inputs..
  528. int reusableInputIndex = -1;
  529. for (int i = 0; i < sourceNodes.size(); ++i)
  530. {
  531. const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i),
  532. sourceOutputChans.getUnchecked(i));
  533. if (sourceBufIndex >= 0
  534. && ! isBufferNeededLater (ourRenderingIndex,
  535. inputChan,
  536. sourceNodes.getUnchecked(i),
  537. sourceOutputChans.getUnchecked(i)))
  538. {
  539. // we've found one of our input chans that can be re-used..
  540. reusableInputIndex = i;
  541. bufIndex = sourceBufIndex;
  542. break;
  543. }
  544. }
  545. if (reusableInputIndex < 0)
  546. {
  547. // can't re-use any of our input chans, so get a new one and copy everything into it..
  548. bufIndex = getFreeBuffer (false);
  549. jassert (bufIndex != 0);
  550. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0),
  551. sourceOutputChans.getUnchecked (0));
  552. if (srcIndex < 0)
  553. {
  554. // if not found, this is probably a feedback loop
  555. renderingOps.add (new ClearChannelOp (bufIndex));
  556. }
  557. else
  558. {
  559. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex));
  560. }
  561. reusableInputIndex = 0;
  562. }
  563. for (int j = 0; j < sourceNodes.size(); ++j)
  564. {
  565. if (j != reusableInputIndex)
  566. {
  567. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j),
  568. sourceOutputChans.getUnchecked(j));
  569. if (srcIndex >= 0)
  570. renderingOps.add (new AddChannelOp (srcIndex, bufIndex));
  571. }
  572. }
  573. }
  574. jassert (bufIndex >= 0);
  575. audioChannelsToUse.add (bufIndex);
  576. if (inputChan < numOuts)
  577. markBufferAsContaining (bufIndex, node->id, inputChan);
  578. }
  579. for (int outputChan = numIns; outputChan < numOuts; ++outputChan)
  580. {
  581. const int bufIndex = getFreeBuffer (false);
  582. jassert (bufIndex != 0);
  583. audioChannelsToUse.add (bufIndex);
  584. markBufferAsContaining (bufIndex, node->id, outputChan);
  585. }
  586. // Now the same thing for midi..
  587. Array <int> midiSourceNodes;
  588. for (int i = graph.getNumConnections(); --i >= 0;)
  589. {
  590. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  591. if (c->destNodeId == node->id && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex)
  592. midiSourceNodes.add (c->sourceNodeId);
  593. }
  594. if (midiSourceNodes.size() == 0)
  595. {
  596. // No midi inputs..
  597. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  598. if (node->processor->acceptsMidi() || node->processor->producesMidi())
  599. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  600. }
  601. else if (midiSourceNodes.size() == 1)
  602. {
  603. // One midi input..
  604. midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0),
  605. AudioProcessorGraph::midiChannelIndex);
  606. if (midiBufferToUse >= 0)
  607. {
  608. if (isBufferNeededLater (ourRenderingIndex,
  609. AudioProcessorGraph::midiChannelIndex,
  610. midiSourceNodes.getUnchecked(0),
  611. AudioProcessorGraph::midiChannelIndex))
  612. {
  613. // can't mess up this channel because it's needed later by another node, so we
  614. // need to use a copy of it..
  615. const int newFreeBuffer = getFreeBuffer (true);
  616. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  617. midiBufferToUse = newFreeBuffer;
  618. }
  619. }
  620. else
  621. {
  622. // probably a feedback loop, so just use an empty one..
  623. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  624. }
  625. }
  626. else
  627. {
  628. // More than one midi input being mixed..
  629. int reusableInputIndex = -1;
  630. for (int i = 0; i < midiSourceNodes.size(); ++i)
  631. {
  632. const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i),
  633. AudioProcessorGraph::midiChannelIndex);
  634. if (sourceBufIndex >= 0
  635. && ! isBufferNeededLater (ourRenderingIndex,
  636. AudioProcessorGraph::midiChannelIndex,
  637. midiSourceNodes.getUnchecked(i),
  638. AudioProcessorGraph::midiChannelIndex))
  639. {
  640. // we've found one of our input buffers that can be re-used..
  641. reusableInputIndex = i;
  642. midiBufferToUse = sourceBufIndex;
  643. break;
  644. }
  645. }
  646. if (reusableInputIndex < 0)
  647. {
  648. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  649. midiBufferToUse = getFreeBuffer (true);
  650. jassert (midiBufferToUse >= 0);
  651. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0),
  652. AudioProcessorGraph::midiChannelIndex);
  653. if (srcIndex >= 0)
  654. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  655. else
  656. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  657. reusableInputIndex = 0;
  658. }
  659. for (int j = 0; j < midiSourceNodes.size(); ++j)
  660. {
  661. if (j != reusableInputIndex)
  662. {
  663. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j),
  664. AudioProcessorGraph::midiChannelIndex);
  665. if (srcIndex >= 0)
  666. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  667. }
  668. }
  669. }
  670. if (node->processor->producesMidi())
  671. markBufferAsContaining (midiBufferToUse, node->id,
  672. AudioProcessorGraph::midiChannelIndex);
  673. renderingOps.add (new ProcessBufferOp (node, audioChannelsToUse,
  674. totalChans, midiBufferToUse));
  675. }
  676. //==============================================================================
  677. int getFreeBuffer (const bool forMidi)
  678. {
  679. if (forMidi)
  680. {
  681. for (int i = 1; i < midiNodeIds.size(); ++i)
  682. if (midiNodeIds.getUnchecked(i) < 0)
  683. return i;
  684. midiNodeIds.add (-1);
  685. return midiNodeIds.size() - 1;
  686. }
  687. else
  688. {
  689. for (int i = 1; i < nodeIds.size(); ++i)
  690. if (nodeIds.getUnchecked(i) < 0)
  691. return i;
  692. nodeIds.add (-1);
  693. channels.add (0);
  694. return nodeIds.size() - 1;
  695. }
  696. }
  697. int getReadOnlyEmptyBuffer() const
  698. {
  699. return 0;
  700. }
  701. int getBufferContaining (const int nodeId, const int outputChannel) const
  702. {
  703. if (outputChannel == AudioProcessorGraph::midiChannelIndex)
  704. {
  705. for (int i = midiNodeIds.size(); --i >= 0;)
  706. if (midiNodeIds.getUnchecked(i) == nodeId)
  707. return i;
  708. }
  709. else
  710. {
  711. for (int i = nodeIds.size(); --i >= 0;)
  712. if (nodeIds.getUnchecked(i) == nodeId
  713. && channels.getUnchecked(i) == outputChannel)
  714. return i;
  715. }
  716. return -1;
  717. }
  718. void markAnyUnusedBuffersAsFree (const int stepIndex)
  719. {
  720. int i;
  721. for (i = 0; i < nodeIds.size(); ++i)
  722. {
  723. if (nodeIds.getUnchecked(i) >= 0
  724. && ! isBufferNeededLater (stepIndex, -1,
  725. nodeIds.getUnchecked(i),
  726. channels.getUnchecked(i)))
  727. {
  728. nodeIds.set (i, -1);
  729. }
  730. }
  731. for (i = 0; i < midiNodeIds.size(); ++i)
  732. {
  733. if (midiNodeIds.getUnchecked(i) >= 0
  734. && ! isBufferNeededLater (stepIndex, -1,
  735. midiNodeIds.getUnchecked(i),
  736. AudioProcessorGraph::midiChannelIndex))
  737. {
  738. midiNodeIds.set (i, -1);
  739. }
  740. }
  741. }
  742. bool isBufferNeededLater (int stepIndexToSearchFrom,
  743. int inputChannelOfIndexToIgnore,
  744. const int nodeId,
  745. const int outputChanIndex) const
  746. {
  747. while (stepIndexToSearchFrom < orderedNodes.size())
  748. {
  749. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  750. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  751. {
  752. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  753. && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex,
  754. node->id, AudioProcessorGraph::midiChannelIndex) != 0)
  755. return true;
  756. }
  757. else
  758. {
  759. for (int i = 0; i < node->processor->getNumInputChannels(); ++i)
  760. if (i != inputChannelOfIndexToIgnore
  761. && graph.getConnectionBetween (nodeId, outputChanIndex,
  762. node->id, i) != 0)
  763. return true;
  764. }
  765. inputChannelOfIndexToIgnore = -1;
  766. ++stepIndexToSearchFrom;
  767. }
  768. return false;
  769. }
  770. void markBufferAsContaining (int bufferNum, int nodeId, int outputIndex)
  771. {
  772. if (outputIndex == AudioProcessorGraph::midiChannelIndex)
  773. {
  774. jassert (bufferNum > 0 && bufferNum < midiNodeIds.size());
  775. midiNodeIds.set (bufferNum, nodeId);
  776. }
  777. else
  778. {
  779. jassert (bufferNum >= 0 && bufferNum < nodeIds.size());
  780. nodeIds.set (bufferNum, nodeId);
  781. channels.set (bufferNum, outputIndex);
  782. }
  783. }
  784. RenderingOpSequenceCalculator (const RenderingOpSequenceCalculator&);
  785. RenderingOpSequenceCalculator& operator= (const RenderingOpSequenceCalculator&);
  786. };
  787. }
  788. //==============================================================================
  789. void AudioProcessorGraph::clearRenderingSequence()
  790. {
  791. const ScopedLock sl (renderLock);
  792. for (int i = renderingOps.size(); --i >= 0;)
  793. {
  794. GraphRenderingOps::AudioGraphRenderingOp* const r
  795. = (GraphRenderingOps::AudioGraphRenderingOp*) renderingOps.getUnchecked(i);
  796. renderingOps.remove (i);
  797. delete r;
  798. }
  799. }
  800. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  801. const uint32 possibleDestinationId,
  802. const int recursionCheck) const
  803. {
  804. if (recursionCheck > 0)
  805. {
  806. for (int i = connections.size(); --i >= 0;)
  807. {
  808. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  809. if (c->destNodeId == possibleDestinationId
  810. && (c->sourceNodeId == possibleInputId
  811. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  812. return true;
  813. }
  814. }
  815. return false;
  816. }
  817. void AudioProcessorGraph::buildRenderingSequence()
  818. {
  819. VoidArray newRenderingOps;
  820. int numRenderingBuffersNeeded = 2;
  821. int numMidiBuffersNeeded = 1;
  822. {
  823. MessageManagerLock mml;
  824. VoidArray orderedNodes;
  825. int i;
  826. for (i = 0; i < nodes.size(); ++i)
  827. {
  828. Node* const node = nodes.getUnchecked(i);
  829. node->prepare (getSampleRate(), getBlockSize(), this);
  830. int j = 0;
  831. for (; j < orderedNodes.size(); ++j)
  832. if (isAnInputTo (node->id,
  833. ((Node*) orderedNodes.getUnchecked (j))->id,
  834. nodes.size() + 1))
  835. break;
  836. orderedNodes.insert (j, node);
  837. }
  838. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  839. numRenderingBuffersNeeded = calculator.getNumBuffersNeeded();
  840. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  841. }
  842. VoidArray oldRenderingOps (renderingOps);
  843. {
  844. // swap over to the new rendering sequence..
  845. const ScopedLock sl (renderLock);
  846. renderingBuffers.setSize (numRenderingBuffersNeeded, getBlockSize());
  847. renderingBuffers.clear();
  848. for (int i = midiBuffers.size(); --i >= 0;)
  849. midiBuffers.getUnchecked(i)->clear();
  850. while (midiBuffers.size() < numMidiBuffersNeeded)
  851. midiBuffers.add (new MidiBuffer());
  852. renderingOps = newRenderingOps;
  853. }
  854. for (int i = oldRenderingOps.size(); --i >= 0;)
  855. delete (GraphRenderingOps::AudioGraphRenderingOp*) oldRenderingOps.getUnchecked(i);
  856. }
  857. void AudioProcessorGraph::handleAsyncUpdate()
  858. {
  859. buildRenderingSequence();
  860. }
  861. //==============================================================================
  862. void AudioProcessorGraph::prepareToPlay (double /*sampleRate*/, int estimatedSamplesPerBlock)
  863. {
  864. currentAudioInputBuffer = 0;
  865. currentAudioOutputBuffer.setSize (jmax (1, getNumOutputChannels()), estimatedSamplesPerBlock);
  866. currentMidiInputBuffer = 0;
  867. currentMidiOutputBuffer.clear();
  868. clearRenderingSequence();
  869. buildRenderingSequence();
  870. }
  871. void AudioProcessorGraph::releaseResources()
  872. {
  873. for (int i = 0; i < nodes.size(); ++i)
  874. nodes.getUnchecked(i)->unprepare();
  875. renderingBuffers.setSize (1, 1);
  876. midiBuffers.clear();
  877. currentAudioInputBuffer = 0;
  878. currentAudioOutputBuffer.setSize (1, 1);
  879. currentMidiInputBuffer = 0;
  880. currentMidiOutputBuffer.clear();
  881. }
  882. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  883. {
  884. const int numSamples = buffer.getNumSamples();
  885. const ScopedLock sl (renderLock);
  886. currentAudioInputBuffer = &buffer;
  887. currentAudioOutputBuffer.setSize (jmax (1, buffer.getNumChannels()), numSamples);
  888. currentAudioOutputBuffer.clear();
  889. currentMidiInputBuffer = &midiMessages;
  890. currentMidiOutputBuffer.clear();
  891. int i;
  892. for (i = 0; i < renderingOps.size(); ++i)
  893. {
  894. GraphRenderingOps::AudioGraphRenderingOp* const op
  895. = (GraphRenderingOps::AudioGraphRenderingOp*) renderingOps.getUnchecked(i);
  896. op->perform (renderingBuffers, midiBuffers, numSamples);
  897. }
  898. for (i = 0; i < buffer.getNumChannels(); ++i)
  899. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  900. midiMessages.clear();
  901. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  902. }
  903. const String AudioProcessorGraph::getInputChannelName (const int channelIndex) const
  904. {
  905. return "Input " + String (channelIndex + 1);
  906. }
  907. const String AudioProcessorGraph::getOutputChannelName (const int channelIndex) const
  908. {
  909. return "Output " + String (channelIndex + 1);
  910. }
  911. bool AudioProcessorGraph::isInputChannelStereoPair (int /*index*/) const
  912. {
  913. return true;
  914. }
  915. bool AudioProcessorGraph::isOutputChannelStereoPair (int /*index*/) const
  916. {
  917. return true;
  918. }
  919. bool AudioProcessorGraph::acceptsMidi() const
  920. {
  921. return true;
  922. }
  923. bool AudioProcessorGraph::producesMidi() const
  924. {
  925. return true;
  926. }
  927. void AudioProcessorGraph::getStateInformation (JUCE_NAMESPACE::MemoryBlock& /*destData*/)
  928. {
  929. }
  930. void AudioProcessorGraph::setStateInformation (const void* /*data*/, int /*sizeInBytes*/)
  931. {
  932. }
  933. //==============================================================================
  934. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType type_)
  935. : type (type_),
  936. graph (0)
  937. {
  938. }
  939. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  940. {
  941. }
  942. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  943. {
  944. switch (type)
  945. {
  946. case audioOutputNode:
  947. return "Audio Output";
  948. case audioInputNode:
  949. return "Audio Input";
  950. case midiOutputNode:
  951. return "Midi Output";
  952. case midiInputNode:
  953. return "Midi Input";
  954. default:
  955. break;
  956. }
  957. return String::empty;
  958. }
  959. void AudioProcessorGraph::AudioGraphIOProcessor::fillInPluginDescription (PluginDescription& d) const
  960. {
  961. d.name = getName();
  962. d.uid = d.name.hashCode();
  963. d.category = "I/O devices";
  964. d.pluginFormatName = "Internal";
  965. d.manufacturerName = "Raw Material Software";
  966. d.version = "1.0";
  967. d.isInstrument = false;
  968. d.numInputChannels = getNumInputChannels();
  969. if (type == audioOutputNode && graph != 0)
  970. d.numInputChannels = graph->getNumInputChannels();
  971. d.numOutputChannels = getNumOutputChannels();
  972. if (type == audioInputNode && graph != 0)
  973. d.numOutputChannels = graph->getNumOutputChannels();
  974. }
  975. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  976. {
  977. jassert (graph != 0);
  978. }
  979. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  980. {
  981. }
  982. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  983. MidiBuffer& midiMessages)
  984. {
  985. jassert (graph != 0);
  986. switch (type)
  987. {
  988. case audioOutputNode:
  989. {
  990. for (int i = jmin (graph->currentAudioOutputBuffer.getNumChannels(),
  991. buffer.getNumChannels()); --i >= 0;)
  992. {
  993. graph->currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  994. }
  995. break;
  996. }
  997. case audioInputNode:
  998. {
  999. for (int i = jmin (graph->currentAudioInputBuffer->getNumChannels(),
  1000. buffer.getNumChannels()); --i >= 0;)
  1001. {
  1002. buffer.copyFrom (i, 0, *graph->currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1003. }
  1004. break;
  1005. }
  1006. case midiOutputNode:
  1007. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1008. break;
  1009. case midiInputNode:
  1010. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1011. break;
  1012. default:
  1013. break;
  1014. }
  1015. }
  1016. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1017. {
  1018. return type == midiOutputNode;
  1019. }
  1020. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1021. {
  1022. return type == midiInputNode;
  1023. }
  1024. const String AudioProcessorGraph::AudioGraphIOProcessor::getInputChannelName (const int channelIndex) const
  1025. {
  1026. switch (type)
  1027. {
  1028. case audioOutputNode:
  1029. return "Output " + String (channelIndex + 1);
  1030. case midiOutputNode:
  1031. return "Midi Output";
  1032. default:
  1033. break;
  1034. }
  1035. return String::empty;
  1036. }
  1037. const String AudioProcessorGraph::AudioGraphIOProcessor::getOutputChannelName (const int channelIndex) const
  1038. {
  1039. switch (type)
  1040. {
  1041. case audioInputNode:
  1042. return "Input " + String (channelIndex + 1);
  1043. case midiInputNode:
  1044. return "Midi Input";
  1045. default:
  1046. break;
  1047. }
  1048. return String::empty;
  1049. }
  1050. bool AudioProcessorGraph::AudioGraphIOProcessor::isInputChannelStereoPair (int /*index*/) const
  1051. {
  1052. return type == audioInputNode || type == audioOutputNode;
  1053. }
  1054. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutputChannelStereoPair (int index) const
  1055. {
  1056. return isInputChannelStereoPair (index);
  1057. }
  1058. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const
  1059. {
  1060. return type == audioInputNode || type == midiInputNode;
  1061. }
  1062. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const
  1063. {
  1064. return type == audioOutputNode || type == midiOutputNode;
  1065. }
  1066. AudioProcessorEditor* AudioProcessorGraph::AudioGraphIOProcessor::createEditor()
  1067. {
  1068. return 0;
  1069. }
  1070. int AudioProcessorGraph::AudioGraphIOProcessor::getNumParameters() { return 0; }
  1071. const String AudioProcessorGraph::AudioGraphIOProcessor::getParameterName (int) { return String::empty; }
  1072. float AudioProcessorGraph::AudioGraphIOProcessor::getParameter (int) { return 0.0f; }
  1073. const String AudioProcessorGraph::AudioGraphIOProcessor::getParameterText (int) { return String::empty; }
  1074. void AudioProcessorGraph::AudioGraphIOProcessor::setParameter (int, float) { }
  1075. int AudioProcessorGraph::AudioGraphIOProcessor::getNumPrograms() { return 0; }
  1076. int AudioProcessorGraph::AudioGraphIOProcessor::getCurrentProgram() { return 0; }
  1077. void AudioProcessorGraph::AudioGraphIOProcessor::setCurrentProgram (int) { }
  1078. const String AudioProcessorGraph::AudioGraphIOProcessor::getProgramName (int) { return String::empty; }
  1079. void AudioProcessorGraph::AudioGraphIOProcessor::changeProgramName (int, const String&) { }
  1080. void AudioProcessorGraph::AudioGraphIOProcessor::getStateInformation (JUCE_NAMESPACE::MemoryBlock&)
  1081. {
  1082. }
  1083. void AudioProcessorGraph::AudioGraphIOProcessor::setStateInformation (const void*, int)
  1084. {
  1085. }
  1086. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph)
  1087. {
  1088. graph = newGraph;
  1089. if (graph != 0)
  1090. {
  1091. setPlayConfigDetails (type == audioOutputNode ? graph->getNumOutputChannels() : 0,
  1092. type == audioInputNode ? graph->getNumInputChannels() : 0,
  1093. getSampleRate(),
  1094. getBlockSize());
  1095. updateHostDisplay();
  1096. }
  1097. }
  1098. END_JUCE_NAMESPACE