The JUCE cross-platform C++ framework, with DISTRHO/KXStudio specific changes
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1359 lines
45KB

  1. /*
  2. ==============================================================================
  3. This file is part of the JUCE library - "Jules' Utility Class Extensions"
  4. Copyright 2004-9 by Raw Material Software Ltd.
  5. ------------------------------------------------------------------------------
  6. JUCE can be redistributed and/or modified under the terms of the GNU General
  7. Public License (Version 2), as published by the Free Software Foundation.
  8. A copy of the license is included in the JUCE distribution, or can be found
  9. online at www.gnu.org/licenses.
  10. JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
  12. A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  13. ------------------------------------------------------------------------------
  14. To release a closed-source product which uses JUCE, commercial licenses are
  15. available: visit www.rawmaterialsoftware.com/juce for more information.
  16. ==============================================================================
  17. */
  18. #include "../../core/juce_StandardHeader.h"
  19. BEGIN_JUCE_NAMESPACE
  20. #include "juce_AudioProcessorGraph.h"
  21. #include "../../events/juce_MessageManager.h"
  22. const int AudioProcessorGraph::midiChannelIndex = 0x1000;
  23. //==============================================================================
  24. AudioProcessorGraph::Node::Node (const uint32 id_,
  25. AudioProcessor* const processor_) throw()
  26. : id (id_),
  27. processor (processor_),
  28. isPrepared (false)
  29. {
  30. jassert (processor_ != 0);
  31. }
  32. AudioProcessorGraph::Node::~Node()
  33. {
  34. delete processor;
  35. }
  36. void AudioProcessorGraph::Node::prepare (const double sampleRate, const int blockSize,
  37. AudioProcessorGraph* const graph)
  38. {
  39. if (! isPrepared)
  40. {
  41. isPrepared = true;
  42. AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  43. = dynamic_cast <AudioProcessorGraph::AudioGraphIOProcessor*> (processor);
  44. if (ioProc != 0)
  45. ioProc->setParentGraph (graph);
  46. processor->setPlayConfigDetails (processor->getNumInputChannels(),
  47. processor->getNumOutputChannels(),
  48. sampleRate, blockSize);
  49. processor->prepareToPlay (sampleRate, blockSize);
  50. }
  51. }
  52. void AudioProcessorGraph::Node::unprepare()
  53. {
  54. if (isPrepared)
  55. {
  56. isPrepared = false;
  57. processor->releaseResources();
  58. }
  59. }
  60. //==============================================================================
  61. AudioProcessorGraph::AudioProcessorGraph()
  62. : lastNodeId (0),
  63. renderingBuffers (1, 1),
  64. currentAudioOutputBuffer (1, 1)
  65. {
  66. }
  67. AudioProcessorGraph::~AudioProcessorGraph()
  68. {
  69. clearRenderingSequence();
  70. clear();
  71. }
  72. const String AudioProcessorGraph::getName() const
  73. {
  74. return "Audio Graph";
  75. }
  76. //==============================================================================
  77. void AudioProcessorGraph::clear()
  78. {
  79. nodes.clear();
  80. connections.clear();
  81. triggerAsyncUpdate();
  82. }
  83. AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const throw()
  84. {
  85. for (int i = nodes.size(); --i >= 0;)
  86. if (nodes.getUnchecked(i)->id == nodeId)
  87. return nodes.getUnchecked(i);
  88. return 0;
  89. }
  90. AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor,
  91. uint32 nodeId)
  92. {
  93. if (newProcessor == 0)
  94. {
  95. jassertfalse
  96. return 0;
  97. }
  98. if (nodeId == 0)
  99. {
  100. nodeId = ++lastNodeId;
  101. }
  102. else
  103. {
  104. // you can't add a node with an id that already exists in the graph..
  105. jassert (getNodeForId (nodeId) == 0);
  106. removeNode (nodeId);
  107. }
  108. lastNodeId = nodeId;
  109. Node* const n = new Node (nodeId, newProcessor);
  110. nodes.add (n);
  111. triggerAsyncUpdate();
  112. AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  113. = dynamic_cast <AudioProcessorGraph::AudioGraphIOProcessor*> (n->processor);
  114. if (ioProc != 0)
  115. ioProc->setParentGraph (this);
  116. return n;
  117. }
  118. bool AudioProcessorGraph::removeNode (const uint32 nodeId)
  119. {
  120. disconnectNode (nodeId);
  121. for (int i = nodes.size(); --i >= 0;)
  122. {
  123. if (nodes.getUnchecked(i)->id == nodeId)
  124. {
  125. AudioProcessorGraph::AudioGraphIOProcessor* const ioProc
  126. = dynamic_cast <AudioProcessorGraph::AudioGraphIOProcessor*> (nodes.getUnchecked(i)->processor);
  127. if (ioProc != 0)
  128. ioProc->setParentGraph (0);
  129. nodes.remove (i);
  130. triggerAsyncUpdate();
  131. return true;
  132. }
  133. }
  134. return false;
  135. }
  136. //==============================================================================
  137. const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId,
  138. const int sourceChannelIndex,
  139. const uint32 destNodeId,
  140. const int destChannelIndex) const throw()
  141. {
  142. for (int i = connections.size(); --i >= 0;)
  143. {
  144. const Connection* const c = connections.getUnchecked(i);
  145. if (c->sourceNodeId == sourceNodeId
  146. && c->destNodeId == destNodeId
  147. && c->sourceChannelIndex == sourceChannelIndex
  148. && c->destChannelIndex == destChannelIndex)
  149. {
  150. return c;
  151. }
  152. }
  153. return 0;
  154. }
  155. bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId,
  156. const uint32 possibleDestNodeId) const throw()
  157. {
  158. for (int i = connections.size(); --i >= 0;)
  159. {
  160. const Connection* const c = connections.getUnchecked(i);
  161. if (c->sourceNodeId == possibleSourceNodeId
  162. && c->destNodeId == possibleDestNodeId)
  163. {
  164. return true;
  165. }
  166. }
  167. return false;
  168. }
  169. bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId,
  170. const int sourceChannelIndex,
  171. const uint32 destNodeId,
  172. const int destChannelIndex) const throw()
  173. {
  174. if (sourceChannelIndex < 0
  175. || destChannelIndex < 0
  176. || sourceNodeId == destNodeId
  177. || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex))
  178. return false;
  179. const Node* const source = getNodeForId (sourceNodeId);
  180. if (source == 0
  181. || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getNumOutputChannels())
  182. || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi()))
  183. return false;
  184. const Node* const dest = getNodeForId (destNodeId);
  185. if (dest == 0
  186. || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getNumInputChannels())
  187. || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi()))
  188. return false;
  189. return getConnectionBetween (sourceNodeId, sourceChannelIndex,
  190. destNodeId, destChannelIndex) == 0;
  191. }
  192. bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId,
  193. const int sourceChannelIndex,
  194. const uint32 destNodeId,
  195. const int destChannelIndex)
  196. {
  197. if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex))
  198. return false;
  199. Connection* const c = new Connection();
  200. c->sourceNodeId = sourceNodeId;
  201. c->sourceChannelIndex = sourceChannelIndex;
  202. c->destNodeId = destNodeId;
  203. c->destChannelIndex = destChannelIndex;
  204. connections.add (c);
  205. triggerAsyncUpdate();
  206. return true;
  207. }
  208. void AudioProcessorGraph::removeConnection (const int index)
  209. {
  210. connections.remove (index);
  211. triggerAsyncUpdate();
  212. }
  213. bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex,
  214. const uint32 destNodeId, const int destChannelIndex)
  215. {
  216. bool doneAnything = false;
  217. for (int i = connections.size(); --i >= 0;)
  218. {
  219. const Connection* const c = connections.getUnchecked(i);
  220. if (c->sourceNodeId == sourceNodeId
  221. && c->destNodeId == destNodeId
  222. && c->sourceChannelIndex == sourceChannelIndex
  223. && c->destChannelIndex == destChannelIndex)
  224. {
  225. removeConnection (i);
  226. doneAnything = true;
  227. triggerAsyncUpdate();
  228. }
  229. }
  230. return doneAnything;
  231. }
  232. bool AudioProcessorGraph::disconnectNode (const uint32 nodeId)
  233. {
  234. bool doneAnything = false;
  235. for (int i = connections.size(); --i >= 0;)
  236. {
  237. const Connection* const c = connections.getUnchecked(i);
  238. if (c->sourceNodeId == nodeId || c->destNodeId == nodeId)
  239. {
  240. removeConnection (i);
  241. doneAnything = true;
  242. triggerAsyncUpdate();
  243. }
  244. }
  245. return doneAnything;
  246. }
  247. bool AudioProcessorGraph::removeIllegalConnections()
  248. {
  249. bool doneAnything = false;
  250. for (int i = connections.size(); --i >= 0;)
  251. {
  252. const Connection* const c = connections.getUnchecked(i);
  253. const Node* const source = getNodeForId (c->sourceNodeId);
  254. const Node* const dest = getNodeForId (c->destNodeId);
  255. if (source == 0 || dest == 0
  256. || (c->sourceChannelIndex != midiChannelIndex
  257. && (((unsigned int) c->sourceChannelIndex) >= (unsigned int) source->processor->getNumOutputChannels()))
  258. || (c->sourceChannelIndex == midiChannelIndex
  259. && ! source->processor->producesMidi())
  260. || (c->destChannelIndex != midiChannelIndex
  261. && (((unsigned int) c->destChannelIndex) >= (unsigned int) dest->processor->getNumInputChannels()))
  262. || (c->destChannelIndex == midiChannelIndex
  263. && ! dest->processor->acceptsMidi()))
  264. {
  265. removeConnection (i);
  266. doneAnything = true;
  267. triggerAsyncUpdate();
  268. }
  269. }
  270. return doneAnything;
  271. }
  272. //==============================================================================
  273. namespace GraphRenderingOps
  274. {
  275. //==============================================================================
  276. class AudioGraphRenderingOp
  277. {
  278. public:
  279. AudioGraphRenderingOp() throw() {}
  280. virtual ~AudioGraphRenderingOp() throw() {}
  281. virtual void perform (AudioSampleBuffer& sharedBufferChans,
  282. const OwnedArray <MidiBuffer>& sharedMidiBuffers,
  283. const int numSamples) throw() = 0;
  284. juce_UseDebuggingNewOperator
  285. };
  286. //==============================================================================
  287. class ClearChannelOp : public AudioGraphRenderingOp
  288. {
  289. public:
  290. ClearChannelOp (const int channelNum_) throw()
  291. : channelNum (channelNum_)
  292. {}
  293. ~ClearChannelOp() throw() {}
  294. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>&, const int numSamples) throw()
  295. {
  296. sharedBufferChans.clear (channelNum, 0, numSamples);
  297. }
  298. private:
  299. const int channelNum;
  300. ClearChannelOp (const ClearChannelOp&);
  301. const ClearChannelOp& operator= (const ClearChannelOp&);
  302. };
  303. //==============================================================================
  304. class CopyChannelOp : public AudioGraphRenderingOp
  305. {
  306. public:
  307. CopyChannelOp (const int srcChannelNum_, const int dstChannelNum_) throw()
  308. : srcChannelNum (srcChannelNum_),
  309. dstChannelNum (dstChannelNum_)
  310. {}
  311. ~CopyChannelOp() throw() {}
  312. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>&, const int numSamples) throw()
  313. {
  314. sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  315. }
  316. private:
  317. const int srcChannelNum, dstChannelNum;
  318. CopyChannelOp (const CopyChannelOp&);
  319. const CopyChannelOp& operator= (const CopyChannelOp&);
  320. };
  321. //==============================================================================
  322. class AddChannelOp : public AudioGraphRenderingOp
  323. {
  324. public:
  325. AddChannelOp (const int srcChannelNum_, const int dstChannelNum_) throw()
  326. : srcChannelNum (srcChannelNum_),
  327. dstChannelNum (dstChannelNum_)
  328. {}
  329. ~AddChannelOp() throw() {}
  330. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>&, const int numSamples) throw()
  331. {
  332. sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples);
  333. }
  334. private:
  335. const int srcChannelNum, dstChannelNum;
  336. AddChannelOp (const AddChannelOp&);
  337. const AddChannelOp& operator= (const AddChannelOp&);
  338. };
  339. //==============================================================================
  340. class ClearMidiBufferOp : public AudioGraphRenderingOp
  341. {
  342. public:
  343. ClearMidiBufferOp (const int bufferNum_) throw()
  344. : bufferNum (bufferNum_)
  345. {}
  346. ~ClearMidiBufferOp() throw() {}
  347. void perform (AudioSampleBuffer&, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int) throw()
  348. {
  349. sharedMidiBuffers.getUnchecked (bufferNum)->clear();
  350. }
  351. private:
  352. const int bufferNum;
  353. ClearMidiBufferOp (const ClearMidiBufferOp&);
  354. const ClearMidiBufferOp& operator= (const ClearMidiBufferOp&);
  355. };
  356. //==============================================================================
  357. class CopyMidiBufferOp : public AudioGraphRenderingOp
  358. {
  359. public:
  360. CopyMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) throw()
  361. : srcBufferNum (srcBufferNum_),
  362. dstBufferNum (dstBufferNum_)
  363. {}
  364. ~CopyMidiBufferOp() throw() {}
  365. void perform (AudioSampleBuffer&, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int) throw()
  366. {
  367. *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum);
  368. }
  369. private:
  370. const int srcBufferNum, dstBufferNum;
  371. CopyMidiBufferOp (const CopyMidiBufferOp&);
  372. const CopyMidiBufferOp& operator= (const CopyMidiBufferOp&);
  373. };
  374. //==============================================================================
  375. class AddMidiBufferOp : public AudioGraphRenderingOp
  376. {
  377. public:
  378. AddMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) throw()
  379. : srcBufferNum (srcBufferNum_),
  380. dstBufferNum (dstBufferNum_)
  381. {}
  382. ~AddMidiBufferOp() throw() {}
  383. void perform (AudioSampleBuffer&, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int numSamples) throw()
  384. {
  385. sharedMidiBuffers.getUnchecked (dstBufferNum)
  386. ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0);
  387. }
  388. private:
  389. const int srcBufferNum, dstBufferNum;
  390. AddMidiBufferOp (const AddMidiBufferOp&);
  391. const AddMidiBufferOp& operator= (const AddMidiBufferOp&);
  392. };
  393. //==============================================================================
  394. class ProcessBufferOp : public AudioGraphRenderingOp
  395. {
  396. public:
  397. ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& node_,
  398. const Array <int>& audioChannelsToUse_,
  399. const int totalChans_,
  400. const int midiBufferToUse_) throw()
  401. : node (node_),
  402. processor (node_->processor),
  403. audioChannelsToUse (audioChannelsToUse_),
  404. totalChans (jmax (1, totalChans_)),
  405. midiBufferToUse (midiBufferToUse_)
  406. {
  407. channels.calloc (totalChans);
  408. while (audioChannelsToUse.size() < totalChans)
  409. audioChannelsToUse.add (0);
  410. }
  411. ~ProcessBufferOp() throw()
  412. {
  413. }
  414. void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray <MidiBuffer>& sharedMidiBuffers, const int numSamples) throw()
  415. {
  416. for (int i = totalChans; --i >= 0;)
  417. channels[i] = sharedBufferChans.getSampleData (audioChannelsToUse.getUnchecked (i), 0);
  418. AudioSampleBuffer buffer (channels, totalChans, numSamples);
  419. processor->processBlock (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse));
  420. }
  421. const AudioProcessorGraph::Node::Ptr node;
  422. AudioProcessor* const processor;
  423. private:
  424. Array <int> audioChannelsToUse;
  425. HeapBlock <float*> channels;
  426. int totalChans;
  427. int midiBufferToUse;
  428. ProcessBufferOp (const ProcessBufferOp&);
  429. const ProcessBufferOp& operator= (const ProcessBufferOp&);
  430. };
  431. //==============================================================================
  432. /** Used to calculate the correct sequence of rendering ops needed, based on
  433. the best re-use of shared buffers at each stage.
  434. */
  435. class RenderingOpSequenceCalculator
  436. {
  437. public:
  438. //==============================================================================
  439. RenderingOpSequenceCalculator (AudioProcessorGraph& graph_,
  440. const VoidArray& orderedNodes_,
  441. VoidArray& renderingOps)
  442. : graph (graph_),
  443. orderedNodes (orderedNodes_)
  444. {
  445. nodeIds.add (-2); // first buffer is read-only zeros
  446. channels.add (0);
  447. midiNodeIds.add (-2);
  448. for (int i = 0; i < orderedNodes.size(); ++i)
  449. {
  450. createRenderingOpsForNode ((AudioProcessorGraph::Node*) orderedNodes.getUnchecked(i),
  451. renderingOps, i);
  452. markAnyUnusedBuffersAsFree (i);
  453. }
  454. }
  455. int getNumBuffersNeeded() const throw() { return nodeIds.size(); }
  456. int getNumMidiBuffersNeeded() const throw() { return midiNodeIds.size(); }
  457. //==============================================================================
  458. juce_UseDebuggingNewOperator
  459. private:
  460. AudioProcessorGraph& graph;
  461. const VoidArray& orderedNodes;
  462. Array <int> nodeIds, channels, midiNodeIds;
  463. //==============================================================================
  464. void createRenderingOpsForNode (AudioProcessorGraph::Node* const node,
  465. VoidArray& renderingOps,
  466. const int ourRenderingIndex)
  467. {
  468. const int numIns = node->processor->getNumInputChannels();
  469. const int numOuts = node->processor->getNumOutputChannels();
  470. const int totalChans = jmax (numIns, numOuts);
  471. Array <int> audioChannelsToUse;
  472. int midiBufferToUse = -1;
  473. for (int inputChan = 0; inputChan < numIns; ++inputChan)
  474. {
  475. // get a list of all the inputs to this node
  476. Array <int> sourceNodes, sourceOutputChans;
  477. for (int i = graph.getNumConnections(); --i >= 0;)
  478. {
  479. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  480. if (c->destNodeId == node->id && c->destChannelIndex == inputChan)
  481. {
  482. sourceNodes.add (c->sourceNodeId);
  483. sourceOutputChans.add (c->sourceChannelIndex);
  484. }
  485. }
  486. int bufIndex = -1;
  487. if (sourceNodes.size() == 0)
  488. {
  489. // unconnected input channel
  490. if (inputChan >= numOuts)
  491. {
  492. bufIndex = getReadOnlyEmptyBuffer();
  493. jassert (bufIndex >= 0);
  494. }
  495. else
  496. {
  497. bufIndex = getFreeBuffer (false);
  498. renderingOps.add (new ClearChannelOp (bufIndex));
  499. }
  500. }
  501. else if (sourceNodes.size() == 1)
  502. {
  503. // channel with a straightforward single input..
  504. const int srcNode = sourceNodes.getUnchecked(0);
  505. const int srcChan = sourceOutputChans.getUnchecked(0);
  506. bufIndex = getBufferContaining (srcNode, srcChan);
  507. if (bufIndex < 0)
  508. {
  509. // if not found, this is probably a feedback loop
  510. bufIndex = getReadOnlyEmptyBuffer();
  511. jassert (bufIndex >= 0);
  512. }
  513. if (inputChan < numOuts
  514. && isBufferNeededLater (ourRenderingIndex,
  515. inputChan,
  516. srcNode, srcChan))
  517. {
  518. // can't mess up this channel because it's needed later by another node, so we
  519. // need to use a copy of it..
  520. const int newFreeBuffer = getFreeBuffer (false);
  521. renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer));
  522. bufIndex = newFreeBuffer;
  523. }
  524. }
  525. else
  526. {
  527. // channel with a mix of several inputs..
  528. // try to find a re-usable channel from our inputs..
  529. int reusableInputIndex = -1;
  530. for (int i = 0; i < sourceNodes.size(); ++i)
  531. {
  532. const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i),
  533. sourceOutputChans.getUnchecked(i));
  534. if (sourceBufIndex >= 0
  535. && ! isBufferNeededLater (ourRenderingIndex,
  536. inputChan,
  537. sourceNodes.getUnchecked(i),
  538. sourceOutputChans.getUnchecked(i)))
  539. {
  540. // we've found one of our input chans that can be re-used..
  541. reusableInputIndex = i;
  542. bufIndex = sourceBufIndex;
  543. break;
  544. }
  545. }
  546. if (reusableInputIndex < 0)
  547. {
  548. // can't re-use any of our input chans, so get a new one and copy everything into it..
  549. bufIndex = getFreeBuffer (false);
  550. jassert (bufIndex != 0);
  551. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0),
  552. sourceOutputChans.getUnchecked (0));
  553. if (srcIndex < 0)
  554. {
  555. // if not found, this is probably a feedback loop
  556. renderingOps.add (new ClearChannelOp (bufIndex));
  557. }
  558. else
  559. {
  560. renderingOps.add (new CopyChannelOp (srcIndex, bufIndex));
  561. }
  562. reusableInputIndex = 0;
  563. }
  564. for (int j = 0; j < sourceNodes.size(); ++j)
  565. {
  566. if (j != reusableInputIndex)
  567. {
  568. const int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j),
  569. sourceOutputChans.getUnchecked(j));
  570. if (srcIndex >= 0)
  571. renderingOps.add (new AddChannelOp (srcIndex, bufIndex));
  572. }
  573. }
  574. }
  575. jassert (bufIndex >= 0);
  576. audioChannelsToUse.add (bufIndex);
  577. if (inputChan < numOuts)
  578. markBufferAsContaining (bufIndex, node->id, inputChan);
  579. }
  580. for (int outputChan = numIns; outputChan < numOuts; ++outputChan)
  581. {
  582. const int bufIndex = getFreeBuffer (false);
  583. jassert (bufIndex != 0);
  584. audioChannelsToUse.add (bufIndex);
  585. markBufferAsContaining (bufIndex, node->id, outputChan);
  586. }
  587. // Now the same thing for midi..
  588. Array <int> midiSourceNodes;
  589. for (int i = graph.getNumConnections(); --i >= 0;)
  590. {
  591. const AudioProcessorGraph::Connection* const c = graph.getConnection (i);
  592. if (c->destNodeId == node->id && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex)
  593. midiSourceNodes.add (c->sourceNodeId);
  594. }
  595. if (midiSourceNodes.size() == 0)
  596. {
  597. // No midi inputs..
  598. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  599. if (node->processor->acceptsMidi() || node->processor->producesMidi())
  600. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  601. }
  602. else if (midiSourceNodes.size() == 1)
  603. {
  604. // One midi input..
  605. midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0),
  606. AudioProcessorGraph::midiChannelIndex);
  607. if (midiBufferToUse >= 0)
  608. {
  609. if (isBufferNeededLater (ourRenderingIndex,
  610. AudioProcessorGraph::midiChannelIndex,
  611. midiSourceNodes.getUnchecked(0),
  612. AudioProcessorGraph::midiChannelIndex))
  613. {
  614. // can't mess up this channel because it's needed later by another node, so we
  615. // need to use a copy of it..
  616. const int newFreeBuffer = getFreeBuffer (true);
  617. renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer));
  618. midiBufferToUse = newFreeBuffer;
  619. }
  620. }
  621. else
  622. {
  623. // probably a feedback loop, so just use an empty one..
  624. midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi
  625. }
  626. }
  627. else
  628. {
  629. // More than one midi input being mixed..
  630. int reusableInputIndex = -1;
  631. for (int i = 0; i < midiSourceNodes.size(); ++i)
  632. {
  633. const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i),
  634. AudioProcessorGraph::midiChannelIndex);
  635. if (sourceBufIndex >= 0
  636. && ! isBufferNeededLater (ourRenderingIndex,
  637. AudioProcessorGraph::midiChannelIndex,
  638. midiSourceNodes.getUnchecked(i),
  639. AudioProcessorGraph::midiChannelIndex))
  640. {
  641. // we've found one of our input buffers that can be re-used..
  642. reusableInputIndex = i;
  643. midiBufferToUse = sourceBufIndex;
  644. break;
  645. }
  646. }
  647. if (reusableInputIndex < 0)
  648. {
  649. // can't re-use any of our input buffers, so get a new one and copy everything into it..
  650. midiBufferToUse = getFreeBuffer (true);
  651. jassert (midiBufferToUse >= 0);
  652. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0),
  653. AudioProcessorGraph::midiChannelIndex);
  654. if (srcIndex >= 0)
  655. renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse));
  656. else
  657. renderingOps.add (new ClearMidiBufferOp (midiBufferToUse));
  658. reusableInputIndex = 0;
  659. }
  660. for (int j = 0; j < midiSourceNodes.size(); ++j)
  661. {
  662. if (j != reusableInputIndex)
  663. {
  664. const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j),
  665. AudioProcessorGraph::midiChannelIndex);
  666. if (srcIndex >= 0)
  667. renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse));
  668. }
  669. }
  670. }
  671. if (node->processor->producesMidi())
  672. markBufferAsContaining (midiBufferToUse, node->id,
  673. AudioProcessorGraph::midiChannelIndex);
  674. renderingOps.add (new ProcessBufferOp (node, audioChannelsToUse,
  675. totalChans, midiBufferToUse));
  676. }
  677. //==============================================================================
  678. int getFreeBuffer (const bool forMidi)
  679. {
  680. if (forMidi)
  681. {
  682. for (int i = 1; i < midiNodeIds.size(); ++i)
  683. if (midiNodeIds.getUnchecked(i) < 0)
  684. return i;
  685. midiNodeIds.add (-1);
  686. return midiNodeIds.size() - 1;
  687. }
  688. else
  689. {
  690. for (int i = 1; i < nodeIds.size(); ++i)
  691. if (nodeIds.getUnchecked(i) < 0)
  692. return i;
  693. nodeIds.add (-1);
  694. channels.add (0);
  695. return nodeIds.size() - 1;
  696. }
  697. }
  698. int getReadOnlyEmptyBuffer() const throw()
  699. {
  700. return 0;
  701. }
  702. int getBufferContaining (const int nodeId, const int outputChannel) const throw()
  703. {
  704. if (outputChannel == AudioProcessorGraph::midiChannelIndex)
  705. {
  706. for (int i = midiNodeIds.size(); --i >= 0;)
  707. if (midiNodeIds.getUnchecked(i) == nodeId)
  708. return i;
  709. }
  710. else
  711. {
  712. for (int i = nodeIds.size(); --i >= 0;)
  713. if (nodeIds.getUnchecked(i) == nodeId
  714. && channels.getUnchecked(i) == outputChannel)
  715. return i;
  716. }
  717. return -1;
  718. }
  719. void markAnyUnusedBuffersAsFree (const int stepIndex)
  720. {
  721. int i;
  722. for (i = 0; i < nodeIds.size(); ++i)
  723. {
  724. if (nodeIds.getUnchecked(i) >= 0
  725. && ! isBufferNeededLater (stepIndex, -1,
  726. nodeIds.getUnchecked(i),
  727. channels.getUnchecked(i)))
  728. {
  729. nodeIds.set (i, -1);
  730. }
  731. }
  732. for (i = 0; i < midiNodeIds.size(); ++i)
  733. {
  734. if (midiNodeIds.getUnchecked(i) >= 0
  735. && ! isBufferNeededLater (stepIndex, -1,
  736. midiNodeIds.getUnchecked(i),
  737. AudioProcessorGraph::midiChannelIndex))
  738. {
  739. midiNodeIds.set (i, -1);
  740. }
  741. }
  742. }
  743. bool isBufferNeededLater (int stepIndexToSearchFrom,
  744. int inputChannelOfIndexToIgnore,
  745. const int nodeId,
  746. const int outputChanIndex) const throw()
  747. {
  748. while (stepIndexToSearchFrom < orderedNodes.size())
  749. {
  750. const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom);
  751. if (outputChanIndex == AudioProcessorGraph::midiChannelIndex)
  752. {
  753. if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex
  754. && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex,
  755. node->id, AudioProcessorGraph::midiChannelIndex) != 0)
  756. return true;
  757. }
  758. else
  759. {
  760. for (int i = 0; i < node->processor->getNumInputChannels(); ++i)
  761. if (i != inputChannelOfIndexToIgnore
  762. && graph.getConnectionBetween (nodeId, outputChanIndex,
  763. node->id, i) != 0)
  764. return true;
  765. }
  766. inputChannelOfIndexToIgnore = -1;
  767. ++stepIndexToSearchFrom;
  768. }
  769. return false;
  770. }
  771. void markBufferAsContaining (int bufferNum, int nodeId, int outputIndex)
  772. {
  773. if (outputIndex == AudioProcessorGraph::midiChannelIndex)
  774. {
  775. jassert (bufferNum > 0 && bufferNum < midiNodeIds.size());
  776. midiNodeIds.set (bufferNum, nodeId);
  777. }
  778. else
  779. {
  780. jassert (bufferNum >= 0 && bufferNum < nodeIds.size());
  781. nodeIds.set (bufferNum, nodeId);
  782. channels.set (bufferNum, outputIndex);
  783. }
  784. }
  785. RenderingOpSequenceCalculator (const RenderingOpSequenceCalculator&);
  786. const RenderingOpSequenceCalculator& operator= (const RenderingOpSequenceCalculator&);
  787. };
  788. }
  789. //==============================================================================
  790. void AudioProcessorGraph::clearRenderingSequence()
  791. {
  792. const ScopedLock sl (renderLock);
  793. for (int i = renderingOps.size(); --i >= 0;)
  794. {
  795. GraphRenderingOps::AudioGraphRenderingOp* const r
  796. = (GraphRenderingOps::AudioGraphRenderingOp*) renderingOps.getUnchecked(i);
  797. renderingOps.remove (i);
  798. delete r;
  799. }
  800. }
  801. bool AudioProcessorGraph::isAnInputTo (const uint32 possibleInputId,
  802. const uint32 possibleDestinationId,
  803. const int recursionCheck) const throw()
  804. {
  805. if (recursionCheck > 0)
  806. {
  807. for (int i = connections.size(); --i >= 0;)
  808. {
  809. const AudioProcessorGraph::Connection* const c = connections.getUnchecked (i);
  810. if (c->destNodeId == possibleDestinationId
  811. && (c->sourceNodeId == possibleInputId
  812. || isAnInputTo (possibleInputId, c->sourceNodeId, recursionCheck - 1)))
  813. return true;
  814. }
  815. }
  816. return false;
  817. }
  818. void AudioProcessorGraph::buildRenderingSequence()
  819. {
  820. VoidArray newRenderingOps;
  821. int numRenderingBuffersNeeded = 2;
  822. int numMidiBuffersNeeded = 1;
  823. {
  824. MessageManagerLock mml;
  825. VoidArray orderedNodes;
  826. int i;
  827. for (i = 0; i < nodes.size(); ++i)
  828. {
  829. Node* const node = nodes.getUnchecked(i);
  830. node->prepare (getSampleRate(), getBlockSize(), this);
  831. int j = 0;
  832. for (; j < orderedNodes.size(); ++j)
  833. if (isAnInputTo (node->id,
  834. ((Node*) orderedNodes.getUnchecked (j))->id,
  835. nodes.size() + 1))
  836. break;
  837. orderedNodes.insert (j, node);
  838. }
  839. GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps);
  840. numRenderingBuffersNeeded = calculator.getNumBuffersNeeded();
  841. numMidiBuffersNeeded = calculator.getNumMidiBuffersNeeded();
  842. }
  843. VoidArray oldRenderingOps (renderingOps);
  844. {
  845. // swap over to the new rendering sequence..
  846. const ScopedLock sl (renderLock);
  847. renderingBuffers.setSize (numRenderingBuffersNeeded, getBlockSize());
  848. renderingBuffers.clear();
  849. for (int i = midiBuffers.size(); --i >= 0;)
  850. midiBuffers.getUnchecked(i)->clear();
  851. while (midiBuffers.size() < numMidiBuffersNeeded)
  852. midiBuffers.add (new MidiBuffer());
  853. renderingOps = newRenderingOps;
  854. }
  855. for (int i = oldRenderingOps.size(); --i >= 0;)
  856. delete (GraphRenderingOps::AudioGraphRenderingOp*) oldRenderingOps.getUnchecked(i);
  857. }
  858. void AudioProcessorGraph::handleAsyncUpdate()
  859. {
  860. buildRenderingSequence();
  861. }
  862. //==============================================================================
  863. void AudioProcessorGraph::prepareToPlay (double /*sampleRate*/, int estimatedSamplesPerBlock)
  864. {
  865. currentAudioInputBuffer = 0;
  866. currentAudioOutputBuffer.setSize (jmax (1, getNumOutputChannels()), estimatedSamplesPerBlock);
  867. currentMidiInputBuffer = 0;
  868. currentMidiOutputBuffer.clear();
  869. clearRenderingSequence();
  870. buildRenderingSequence();
  871. }
  872. void AudioProcessorGraph::releaseResources()
  873. {
  874. for (int i = 0; i < nodes.size(); ++i)
  875. nodes.getUnchecked(i)->unprepare();
  876. renderingBuffers.setSize (1, 1);
  877. midiBuffers.clear();
  878. currentAudioInputBuffer = 0;
  879. currentAudioOutputBuffer.setSize (1, 1);
  880. currentMidiInputBuffer = 0;
  881. currentMidiOutputBuffer.clear();
  882. }
  883. void AudioProcessorGraph::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
  884. {
  885. const int numSamples = buffer.getNumSamples();
  886. const ScopedLock sl (renderLock);
  887. currentAudioInputBuffer = &buffer;
  888. currentAudioOutputBuffer.setSize (jmax (1, buffer.getNumChannels()), numSamples);
  889. currentAudioOutputBuffer.clear();
  890. currentMidiInputBuffer = &midiMessages;
  891. currentMidiOutputBuffer.clear();
  892. int i;
  893. for (i = 0; i < renderingOps.size(); ++i)
  894. {
  895. GraphRenderingOps::AudioGraphRenderingOp* const op
  896. = (GraphRenderingOps::AudioGraphRenderingOp*) renderingOps.getUnchecked(i);
  897. op->perform (renderingBuffers, midiBuffers, numSamples);
  898. }
  899. for (i = 0; i < buffer.getNumChannels(); ++i)
  900. buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);
  901. midiMessages.clear();
  902. midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
  903. }
  904. const String AudioProcessorGraph::getInputChannelName (const int channelIndex) const
  905. {
  906. return "Input " + String (channelIndex + 1);
  907. }
  908. const String AudioProcessorGraph::getOutputChannelName (const int channelIndex) const
  909. {
  910. return "Output " + String (channelIndex + 1);
  911. }
  912. bool AudioProcessorGraph::isInputChannelStereoPair (int /*index*/) const
  913. {
  914. return true;
  915. }
  916. bool AudioProcessorGraph::isOutputChannelStereoPair (int /*index*/) const
  917. {
  918. return true;
  919. }
  920. bool AudioProcessorGraph::acceptsMidi() const
  921. {
  922. return true;
  923. }
  924. bool AudioProcessorGraph::producesMidi() const
  925. {
  926. return true;
  927. }
  928. void AudioProcessorGraph::getStateInformation (JUCE_NAMESPACE::MemoryBlock& /*destData*/)
  929. {
  930. }
  931. void AudioProcessorGraph::setStateInformation (const void* /*data*/, int /*sizeInBytes*/)
  932. {
  933. }
  934. //==============================================================================
  935. AudioProcessorGraph::AudioGraphIOProcessor::AudioGraphIOProcessor (const IODeviceType type_)
  936. : type (type_),
  937. graph (0)
  938. {
  939. }
  940. AudioProcessorGraph::AudioGraphIOProcessor::~AudioGraphIOProcessor()
  941. {
  942. }
  943. const String AudioProcessorGraph::AudioGraphIOProcessor::getName() const
  944. {
  945. switch (type)
  946. {
  947. case audioOutputNode:
  948. return "Audio Output";
  949. case audioInputNode:
  950. return "Audio Input";
  951. case midiOutputNode:
  952. return "Midi Output";
  953. case midiInputNode:
  954. return "Midi Input";
  955. default:
  956. break;
  957. }
  958. return String::empty;
  959. }
  960. void AudioProcessorGraph::AudioGraphIOProcessor::fillInPluginDescription (PluginDescription& d) const
  961. {
  962. d.name = getName();
  963. d.uid = d.name.hashCode();
  964. d.category = "I/O devices";
  965. d.pluginFormatName = "Internal";
  966. d.manufacturerName = "Raw Material Software";
  967. d.version = "1.0";
  968. d.isInstrument = false;
  969. d.numInputChannels = getNumInputChannels();
  970. if (type == audioOutputNode && graph != 0)
  971. d.numInputChannels = graph->getNumInputChannels();
  972. d.numOutputChannels = getNumOutputChannels();
  973. if (type == audioInputNode && graph != 0)
  974. d.numOutputChannels = graph->getNumOutputChannels();
  975. }
  976. void AudioProcessorGraph::AudioGraphIOProcessor::prepareToPlay (double, int)
  977. {
  978. jassert (graph != 0);
  979. }
  980. void AudioProcessorGraph::AudioGraphIOProcessor::releaseResources()
  981. {
  982. }
  983. void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer& buffer,
  984. MidiBuffer& midiMessages)
  985. {
  986. jassert (graph != 0);
  987. switch (type)
  988. {
  989. case audioOutputNode:
  990. {
  991. for (int i = jmin (graph->currentAudioOutputBuffer.getNumChannels(),
  992. buffer.getNumChannels()); --i >= 0;)
  993. {
  994. graph->currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples());
  995. }
  996. break;
  997. }
  998. case audioInputNode:
  999. {
  1000. for (int i = jmin (graph->currentAudioInputBuffer->getNumChannels(),
  1001. buffer.getNumChannels()); --i >= 0;)
  1002. {
  1003. buffer.copyFrom (i, 0, *graph->currentAudioInputBuffer, i, 0, buffer.getNumSamples());
  1004. }
  1005. break;
  1006. }
  1007. case midiOutputNode:
  1008. graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0);
  1009. break;
  1010. case midiInputNode:
  1011. midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0);
  1012. break;
  1013. default:
  1014. break;
  1015. }
  1016. }
  1017. bool AudioProcessorGraph::AudioGraphIOProcessor::acceptsMidi() const
  1018. {
  1019. return type == midiOutputNode;
  1020. }
  1021. bool AudioProcessorGraph::AudioGraphIOProcessor::producesMidi() const
  1022. {
  1023. return type == midiInputNode;
  1024. }
  1025. const String AudioProcessorGraph::AudioGraphIOProcessor::getInputChannelName (const int channelIndex) const
  1026. {
  1027. switch (type)
  1028. {
  1029. case audioOutputNode:
  1030. return "Output " + String (channelIndex + 1);
  1031. case midiOutputNode:
  1032. return "Midi Output";
  1033. default:
  1034. break;
  1035. }
  1036. return String::empty;
  1037. }
  1038. const String AudioProcessorGraph::AudioGraphIOProcessor::getOutputChannelName (const int channelIndex) const
  1039. {
  1040. switch (type)
  1041. {
  1042. case audioInputNode:
  1043. return "Input " + String (channelIndex + 1);
  1044. case midiInputNode:
  1045. return "Midi Input";
  1046. default:
  1047. break;
  1048. }
  1049. return String::empty;
  1050. }
  1051. bool AudioProcessorGraph::AudioGraphIOProcessor::isInputChannelStereoPair (int /*index*/) const
  1052. {
  1053. return type == audioInputNode || type == audioOutputNode;
  1054. }
  1055. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutputChannelStereoPair (int index) const
  1056. {
  1057. return isInputChannelStereoPair (index);
  1058. }
  1059. bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const throw()
  1060. {
  1061. return type == audioInputNode || type == midiInputNode;
  1062. }
  1063. bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const throw()
  1064. {
  1065. return type == audioOutputNode || type == midiOutputNode;
  1066. }
  1067. AudioProcessorEditor* AudioProcessorGraph::AudioGraphIOProcessor::createEditor()
  1068. {
  1069. return 0;
  1070. }
  1071. int AudioProcessorGraph::AudioGraphIOProcessor::getNumParameters() { return 0; }
  1072. const String AudioProcessorGraph::AudioGraphIOProcessor::getParameterName (int) { return String::empty; }
  1073. float AudioProcessorGraph::AudioGraphIOProcessor::getParameter (int) { return 0.0f; }
  1074. const String AudioProcessorGraph::AudioGraphIOProcessor::getParameterText (int) { return String::empty; }
  1075. void AudioProcessorGraph::AudioGraphIOProcessor::setParameter (int, float) { }
  1076. int AudioProcessorGraph::AudioGraphIOProcessor::getNumPrograms() { return 0; }
  1077. int AudioProcessorGraph::AudioGraphIOProcessor::getCurrentProgram() { return 0; }
  1078. void AudioProcessorGraph::AudioGraphIOProcessor::setCurrentProgram (int) { }
  1079. const String AudioProcessorGraph::AudioGraphIOProcessor::getProgramName (int) { return String::empty; }
  1080. void AudioProcessorGraph::AudioGraphIOProcessor::changeProgramName (int, const String&) { }
  1081. void AudioProcessorGraph::AudioGraphIOProcessor::getStateInformation (JUCE_NAMESPACE::MemoryBlock&)
  1082. {
  1083. }
  1084. void AudioProcessorGraph::AudioGraphIOProcessor::setStateInformation (const void*, int)
  1085. {
  1086. }
  1087. void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph) throw()
  1088. {
  1089. graph = newGraph;
  1090. if (graph != 0)
  1091. {
  1092. setPlayConfigDetails (type == audioOutputNode ? graph->getNumOutputChannels() : 0,
  1093. type == audioInputNode ? graph->getNumInputChannels() : 0,
  1094. getSampleRate(),
  1095. getBlockSize());
  1096. updateHostDisplay();
  1097. }
  1098. }
  1099. END_JUCE_NAMESPACE