diff --git a/extras/audio plugin host/Source/FilterGraph.cpp b/extras/audio plugin host/Source/FilterGraph.cpp index 1c0772b5ac..54c0c80a3e 100644 --- a/extras/audio plugin host/Source/FilterGraph.cpp +++ b/extras/audio plugin host/Source/FilterGraph.cpp @@ -295,7 +295,7 @@ static XmlElement* createNodeXml (AudioProcessorGraph::Node* const node) noexcep } XmlElement* e = new XmlElement ("FILTER"); - e->setAttribute ("uid", (int) node->id); + e->setAttribute ("uid", (int) node->nodeId); e->setAttribute ("x", node->properties ["x"].toString()); e->setAttribute ("y", node->properties ["y"].toString()); e->setAttribute ("uiLastX", node->properties ["uiLastX"].toString()); diff --git a/extras/audio plugin host/Source/GraphEditorPanel.cpp b/extras/audio plugin host/Source/GraphEditorPanel.cpp index b5455ee62d..6ed7eae717 100644 --- a/extras/audio plugin host/Source/GraphEditorPanel.cpp +++ b/extras/audio plugin host/Source/GraphEditorPanel.cpp @@ -55,7 +55,7 @@ PluginWindow::PluginWindow (Component* const uiComp, void PluginWindow::closeCurrentlyOpenWindowsFor (const uint32 nodeId) { for (int i = activePluginWindows.size(); --i >= 0;) - if (activePluginWindows.getUnchecked(i)->owner->id == nodeId) + if (activePluginWindows.getUnchecked(i)->owner->nodeId == nodeId) delete activePluginWindows.getUnchecked(i); } @@ -839,9 +839,9 @@ void GraphEditorPanel::updateComponents() { const AudioProcessorGraph::Node::Ptr f (graph.getNode (i)); - if (getComponentForFilter (f->id) == 0) + if (getComponentForFilter (f->nodeId) == 0) { - FilterComponent* const comp = new FilterComponent (graph, f->id); + FilterComponent* const comp = new FilterComponent (graph, f->nodeId); addAndMakeVisible (comp); comp->update(); } diff --git a/juce_amalgamated.cpp b/juce_amalgamated.cpp index f01d81ec59..07e46ba107 100644 --- a/juce_amalgamated.cpp +++ b/juce_amalgamated.cpp @@ -18197,6 +18197,21 @@ BEGIN_JUCE_NAMESPACE extern void juce_initialiseMacMainMenu(); #endif +class AppBroadcastCallback : public ActionListener +{ +public: + AppBroadcastCallback() { MessageManager::getInstance()->registerBroadcastListener (this); } + ~AppBroadcastCallback() { MessageManager::getInstance()->deregisterBroadcastListener (this); } + + void actionListenerCallback (const String& message) + { + JUCEApplication* const app = JUCEApplication::getInstance(); + + if (app != 0 && message.startsWith (app->getApplicationName() + "/")) + app->anotherInstanceStarted (message.substring (app->getApplicationName().length() + 1)); + } +}; + JUCEApplication::JUCEApplication() : appReturnValue (0), stillInitialising (true) @@ -18244,12 +18259,6 @@ void JUCEApplication::setApplicationReturnValue (const int newReturnValue) noexc appReturnValue = newReturnValue; } -void JUCEApplication::actionListenerCallback (const String& message) -{ - if (message.startsWith (getApplicationName() + "/")) - anotherInstanceStarted (message.substring (getApplicationName().length() + 1)); -} - void JUCEApplication::unhandledException (const std::exception*, const String&, const int) @@ -18303,7 +18312,7 @@ bool JUCEApplication::initialiseApp (const String& commandLine) { commandLineParameters = commandLine.trim(); -#if ! JUCE_IOS + #if ! JUCE_IOS jassert (appLock == nullptr); // initialiseApp must only be called once! if (! moreThanOneInstanceAllowed()) @@ -18319,17 +18328,18 @@ bool JUCEApplication::initialiseApp (const String& commandLine) return false; } } -#endif + #endif // let the app do its setting-up.. initialise (commandLineParameters); -#if JUCE_MAC + #if JUCE_MAC juce_initialiseMacMainMenu(); // needs to be called after the app object has created, to get its name -#endif + #endif - // register for broadcast new app messages - MessageManager::getInstance()->registerBroadcastListener (this); + #if ! JUCE_IOS + broadcastCallback = new AppBroadcastCallback(); + #endif stillInitialising = false; return true; @@ -18339,7 +18349,7 @@ int JUCEApplication::shutdownApp() { jassert (appInstance == this); - MessageManager::getInstance()->deregisterBroadcastListener (this); + broadcastCallback = nullptr; JUCE_TRY { @@ -18402,20 +18412,20 @@ int JUCEApplication::main (int argc, const char* argv[]) { JUCE_AUTORELEASEPOOL - #if ! JUCE_WINDOWS + #if ! JUCE_WINDOWS jassert (createInstance != nullptr); juce_Argv0 = argv[0]; - #endif + #endif - #if JUCE_IOS + #if JUCE_IOS return juce_iOSMain (argc, argv); - #else + #else String cmd; for (int i = 1; i < argc; ++i) cmd << argv[i] << ' '; return JUCEApplication::main (cmd); - #endif + #endif } #endif @@ -20246,6 +20256,212 @@ BEGIN_JUCE_NAMESPACE static const char* const aiffFormatName = "AIFF file"; static const char* const aiffExtensions[] = { ".aiff", ".aif", 0 }; +namespace AiffFileHelpers +{ + inline int chunkName (const char* const name) { return (int) ByteOrder::littleEndianInt (name); } + +#if JUCE_MSVC + #pragma pack (push, 1) + #define PACKED +#elif JUCE_GCC + #define PACKED __attribute__((packed)) +#else + #define PACKED +#endif + +struct InstChunk +{ + struct Loop + { + uint16 type; // these are different in AIFF and WAV + uint16 startIdentifier; + uint16 endIdentifier; + } PACKED; + + int8 baseNote; + int8 detune; + int8 lowNote; + int8 highNote; + int8 lowVelocity; + int8 highVelocity; + int16 gain; + Loop sustainLoop; + Loop releaseLoop; + + void copyTo (StringPairArray& values) const + { + values.set ("MidiUnityNote", String (baseNote)); + values.set ("Detune", String (detune)); + + values.set ("LowNote", String (lowNote)); + values.set ("HighNote", String (highNote)); + values.set ("LowVelocity", String (lowVelocity)); + values.set ("HighVelocity", String (highVelocity)); + + values.set ("Gain", String ((int16) ByteOrder::swapIfLittleEndian ((uint16) gain))); + + values.set ("NumSampleLoops", String (2)); // always 2 with AIFF, WAV can have more + values.set ("Loop0Type", String (ByteOrder::swapIfLittleEndian (sustainLoop.type))); + values.set ("Loop0StartIdentifier", String (ByteOrder::swapIfLittleEndian (sustainLoop.startIdentifier))); + values.set ("Loop0EndIdentifier", String (ByteOrder::swapIfLittleEndian (sustainLoop.endIdentifier))); + values.set ("Loop1Type", String (ByteOrder::swapIfLittleEndian (releaseLoop.type))); + values.set ("Loop1StartIdentifier", String (ByteOrder::swapIfLittleEndian (releaseLoop.startIdentifier))); + values.set ("Loop1EndIdentifier", String (ByteOrder::swapIfLittleEndian (releaseLoop.endIdentifier))); + } + + static void create (MemoryBlock& block, const StringPairArray& values) + { + if (values.getAllKeys().contains ("MidiUnityNote", true)) + { + block.setSize ((sizeof (InstChunk) + 3) & ~3, true); + InstChunk* const inst = static_cast (block.getData()); + + inst->baseNote = (int8) values.getValue ("MidiUnityNote", "60").getIntValue(); + inst->detune = (int8) values.getValue ("Detune", "0").getIntValue(); + inst->lowNote = (int8) values.getValue ("LowNote", "0").getIntValue(); + inst->highNote = (int8) values.getValue ("HighNote", "127").getIntValue(); + inst->lowVelocity = (int8) values.getValue ("LowVelocity", "1").getIntValue(); + inst->highVelocity = (int8) values.getValue ("HighVelocity", "127").getIntValue(); + inst->gain = (int16) ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Gain", "0").getIntValue()); + + inst->sustainLoop.type = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop0Type", "0").getIntValue()); + inst->sustainLoop.startIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop0StartIdentifier", "0").getIntValue()); + inst->sustainLoop.endIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop0EndIdentifier", "0").getIntValue()); + inst->releaseLoop.type = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop1Type", "0").getIntValue()); + inst->releaseLoop.startIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop1StartIdentifier", "0").getIntValue()); + inst->releaseLoop.endIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop1EndIdentifier", "0").getIntValue()); + } + } + +} PACKED; + +#if JUCE_MSVC + #pragma pack (pop) +#endif + +#undef PACKED + +namespace MarkChunk +{ + bool metaDataContainsZeroIdentifiers (const StringPairArray& values) + { + // (zero cue identifiers are valid for WAV but not for AIFF) + const String cueString ("Cue"); + const String noteString ("CueNote"); + const String identifierString ("Identifier"); + + const StringArray& keys = values.getAllKeys(); + + for (int i = 0; i < keys.size(); ++i) + { + const String key (keys[i]); + + if (key.startsWith (noteString)) + continue; // zero identifier IS valid in a COMT chunk + + if (key.startsWith (cueString) && key.contains (identifierString)) + { + const int value = values.getValue (key, "-1").getIntValue(); + + if (value == 0) + return true; + } + } + + return false; + } + + void create (MemoryBlock& block, const StringPairArray& values) + { + const int numCues = values.getValue ("NumCuePoints", "0").getIntValue(); + + if (numCues > 0) + { + MemoryOutputStream out (block, false); + + out.writeShortBigEndian ((short) numCues); + + const int numCueLabels = values.getValue ("NumCueLabels", "0").getIntValue(); + const int idOffset = metaDataContainsZeroIdentifiers (values) ? 1 : 0; // can't have zero IDs in AIFF + + #if JUCE_DEBUG + Array identifiers; + #endif + + for (int i = 0; i < numCues; ++i) + { + const String prefixCue ("Cue" + String (i)); + const String prefixLabel ("CueLabel" + String (i)); + + const int identifier = idOffset + values.getValue (prefixCue + "Identifier", "1").getIntValue(); + + #if JUCE_DEBUG + jassert (! identifiers.contains (identifier)); + identifiers.add (identifier); + #endif + + const int offset = values.getValue (prefixCue + "Offset", "0").getIntValue(); + + String label (prefixLabel); + + for (int labelIndex = 0; labelIndex < numCueLabels; ++labelIndex) + { + const String prefixLabel ("CueLabel" + String (labelIndex)); + const int labelIdentifier = idOffset + values.getValue (prefixLabel + "Identifier", "1").getIntValue(); + + if (labelIdentifier == identifier) + { + label = values.getValue (prefixLabel + "Text", label); + break; + } + } + + out.writeShortBigEndian ((short) identifier); + out.writeIntBigEndian (offset); + + const int labelLength = jmin (254, label.getNumBytesAsUTF8()); // seems to need null terminator even though it's a pstring + out.writeByte ((char) labelLength + 1); + out.write (label.toUTF8(), labelLength); + out.writeByte (0); + } + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + } +} + +namespace COMTChunk +{ + void create (MemoryBlock& block, const StringPairArray& values) + { + const int numNotes = values.getValue ("NumCueNotes", "0").getIntValue(); + + if (numNotes > 0) + { + MemoryOutputStream out (block, false); + out.writeShortBigEndian ((short) numNotes); + + for (int i = 0; i < numNotes; ++i) + { + const String prefix ("CueNote" + String (i)); + + out.writeIntBigEndian (values.getValue (prefix + "TimeStamp", "0").getIntValue()); + out.writeShortBigEndian ((short) values.getValue (prefix + "Identifier", "0").getIntValue()); + + const String comment (values.getValue (prefix + "Text", String::empty)); + out.write (comment.toUTF8(), jmin (comment.getNumBytesAsUTF8(), 65534)); + out.writeByte (0); + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + } + } +} + +} + class AiffAudioFormatReader : public AudioFormatReader { public: @@ -20256,6 +20472,8 @@ public: AiffAudioFormatReader (InputStream* in) : AudioFormatReader (in, TRANS (aiffFormatName)) { + using namespace AiffFileHelpers; + if (input->readInt() == chunkName ("FORM")) { const int len = input->readIntBigEndian(); @@ -20337,6 +20555,67 @@ public: dataChunkStart = input->getPosition() + 4 + offset; lengthInSamples = (bytesPerFrame > 0) ? jmin (lengthInSamples, (int64) (length / bytesPerFrame)) : 0; } + else if (type == chunkName ("MARK")) + { + const uint16 numCues = (uint16) input->readShortBigEndian(); + + // these two are always the same for AIFF-read files + metadataValues.set ("NumCuePoints", String (numCues)); + metadataValues.set ("NumCueLabels", String (numCues)); + + for (uint16 i = 0; i < numCues; ++i) + { + uint16 identifier = (uint16) input->readShortBigEndian(); + uint32 offset = (uint32) input->readIntBigEndian(); + uint8 stringLength = (uint8) input->readByte(); + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength); + + // if the stringLength is even then read one more byte as the + // string needs to be an even number of bytes INCLUDING the + // leading length character in the pascal string + if ((stringLength & 1) == 0) + input->readByte(); + + const String text = String::fromUTF8 ((const char*)textBlock.getData(), stringLength); + + const String prefixCue ("Cue" + String (i)); + metadataValues.set (prefixCue + "Identifier", String (identifier)); + metadataValues.set (prefixCue + "Offset", String (offset)); + + const String prefixLabel ("CueLabel" + String (i)); + metadataValues.set (prefixLabel + "Identifier", String (identifier)); + metadataValues.set (prefixLabel + "Text", text); + } + } + else if (type == chunkName ("COMT")) + { + const uint16 numNotes = (uint16) input->readShortBigEndian(); + metadataValues.set ("NumCueNotes", String (numNotes)); + + for (uint16 i = 0; i < numNotes; ++i) + { + uint32 timestamp = (uint32) input->readIntBigEndian(); + uint16 identifier = (uint16) input->readShortBigEndian(); // may be zero in this case + uint16 stringLength = (uint16) input->readShortBigEndian(); + + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength + (stringLength & 1)); + const String text = String::fromUTF8 ((const char*)textBlock.getData(), stringLength); + + const String prefix ("CueNote" + String (i)); + metadataValues.set (prefix + "TimeStamp", String (timestamp)); + metadataValues.set (prefix + "Identifier", String (identifier)); + metadataValues.set (prefix + "Text", text); + } + } + else if (type == chunkName ("INST")) + { + HeapBlock inst; + inst.calloc (jmax ((size_t) length + 1, sizeof (InstChunk)), 1); + input->read (inst, length); + inst->copyTo (metadataValues); + } else if ((hasGotVer && hasGotData && hasGotType) || chunkEnd < input->getPosition() || input->isExhausted()) @@ -20348,6 +20627,9 @@ public: } } } + + if (metadataValues.size() > 0) + metadataValues.set ("MetaDataSource", "AIFF"); } bool readSamples (int** destSamples, int numDestChannels, int startOffsetInDestBuffer, @@ -20416,8 +20698,6 @@ public: } private: - static inline int chunkName (const char* const name) { return (int) ByteOrder::littleEndianInt (name); } - JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AiffAudioFormatReader); }; @@ -20425,12 +20705,28 @@ class AiffAudioFormatWriter : public AudioFormatWriter { public: - AiffAudioFormatWriter (OutputStream* out, double sampleRate_, unsigned int numChans, int bits) + AiffAudioFormatWriter (OutputStream* out, double sampleRate_, + unsigned int numChans, int bits, + const StringPairArray& metadataValues) : AudioFormatWriter (out, TRANS (aiffFormatName), sampleRate_, numChans, bits), lengthInSamples (0), bytesWritten (0), writeFailed (false) { + using namespace AiffFileHelpers; + + if (metadataValues.size() > 0) + { + // The meta data should have been santised for the AIFF format. + // If it was originally sourced from a WAV file the MetaDataSource + // key should be removed (or set to "AIFF") once this has been done + jassert (metadataValues.getValue ("MetaDataSource", "None") != "WAV"); + + MarkChunk::create (markChunk, metadataValues); + COMTChunk::create (comtChunk, metadataValues); + InstChunk::create (instChunk, metadataValues); + } + headerPosition = out->getPosition(); writeHeader(); } @@ -20482,15 +20778,15 @@ public: } private: - MemoryBlock tempBlock; + MemoryBlock tempBlock, markChunk, comtChunk, instChunk; uint32 lengthInSamples, bytesWritten; int64 headerPosition; bool writeFailed; - static inline int chunkName (const char* const name) { return (int) ByteOrder::littleEndianInt (name); } - void writeHeader() { + using namespace AiffFileHelpers; + const bool couldSeekOk = output->setPosition (headerPosition); (void) couldSeekOk; @@ -20498,7 +20794,9 @@ private: // to be able to seek back to write the header jassert (couldSeekOk); - const int headerLen = 54; + const int headerLen = 54 + (markChunk.getSize() > 0 ? markChunk.getSize() + 8 : 0) + + (comtChunk.getSize() > 0 ? comtChunk.getSize() + 8 : 0) + + (instChunk.getSize() > 0 ? instChunk.getSize() + 8 : 0); int audioBytes = lengthInSamples * ((bitsPerSample * numChannels) / 8); audioBytes += (audioBytes & 1); @@ -20554,6 +20852,27 @@ private: output->write (sampleRateBytes, 10); + if (markChunk.getSize() > 0) + { + output->writeInt (chunkName ("MARK")); + output->writeIntBigEndian ((int) markChunk.getSize()); + output->write (markChunk.getData(), (int) markChunk.getSize()); + } + + if (comtChunk.getSize() > 0) + { + output->writeInt (chunkName ("COMT")); + output->writeIntBigEndian ((int) comtChunk.getSize()); + output->write (comtChunk.getData(), (int) comtChunk.getSize()); + } + + if (instChunk.getSize() > 0) + { + output->writeInt (chunkName ("INST")); + output->writeIntBigEndian ((int) instChunk.getSize()); + output->write (instChunk.getData(), (int) instChunk.getSize()); + } + output->writeInt (chunkName ("SSND")); output->writeIntBigEndian (audioBytes + 8); output->writeInt (0); @@ -20618,11 +20937,11 @@ AudioFormatWriter* AiffAudioFormat::createWriterFor (OutputStream* out, double sampleRate, unsigned int numberOfChannels, int bitsPerSample, - const StringPairArray& /*metadataValues*/, + const StringPairArray& metadataValues, int /*qualityOptionIndex*/) { if (getPossibleBitDepths().contains (bitsPerSample)) - return new AiffAudioFormatWriter (out, sampleRate, numberOfChannels, bitsPerSample); + return new AiffAudioFormatWriter (out, sampleRate, numberOfChannels, bitsPerSample, metadataValues); return nullptr; } @@ -21501,7 +21820,7 @@ struct AudioThumbnail::MinMaxValue char minValue; char maxValue; - MinMaxValue() : minValue (0), maxValue (0) + MinMaxValue() noexcept : minValue (0), maxValue (0) { } @@ -21742,7 +22061,7 @@ public: return data.size(); } - void getMinMax (int startSample, int endSample, MinMaxValue& result) noexcept + void getMinMax (int startSample, int endSample, MinMaxValue& result) const noexcept { if (startSample >= 0) { @@ -21784,12 +22103,12 @@ public: dest[i] = source[i]; } - void resetPeak() + void resetPeak() noexcept { peakLevel = -1; } - int getPeak() + int getPeak() noexcept { if (peakLevel < 0) { @@ -22199,6 +22518,24 @@ float AudioThumbnail::getApproximatePeak() const return jlimit (0, 127, peak) / 127.0f; } +void AudioThumbnail::getApproximateMinMax (const double startTime, const double endTime, const int channelIndex, + float& minValue, float& maxValue) const noexcept +{ + MinMaxValue result; + const ThumbData* const data = channels [channelIndex]; + + if (data != nullptr && sampleRate > 0) + { + const int firstThumbIndex = (int) ((startTime * sampleRate) / samplesPerThumbSample); + const int lastThumbIndex = (int) (((endTime * sampleRate) + samplesPerThumbSample - 1) / samplesPerThumbSample); + + data->getMinMax (jmax (0, firstThumbIndex), lastThumbIndex, result); + } + + minValue = result.minValue / 128.0f; + maxValue = result.maxValue / 128.0f; +} + void AudioThumbnail::drawChannel (Graphics& g, const Rectangle& area, double startTime, double endTime, int channelNum, float verticalZoomFactor) { @@ -22794,7 +23131,7 @@ struct SMPLChunk struct SampleLoop { uint32 identifier; - uint32 type; + uint32 type; // these are different in AIFF and WAV uint32 start; uint32 end; uint32 fraction; @@ -22852,8 +23189,6 @@ struct SMPLChunk SMPLChunk* const s = static_cast (data.getData()); - // Allow these calls to overwrite an extra byte at the end, which is fine as long - // as they get called in the right order.. s->manufacturer = ByteOrder::swapIfBigEndian ((uint32) values.getValue ("Manufacturer", "0").getIntValue()); s->product = ByteOrder::swapIfBigEndian ((uint32) values.getValue ("Product", "0").getIntValue()); s->samplePeriod = ByteOrder::swapIfBigEndian ((uint32) values.getValue ("SamplePeriod", "0").getIntValue()); @@ -22879,6 +23214,51 @@ struct SMPLChunk } } PACKED; +struct InstChunk +{ + int8 baseNote; + int8 detune; + int8 gain; + int8 lowNote; + int8 highNote; + int8 lowVelocity; + int8 highVelocity; + + void copyTo (StringPairArray& values) const + { + values.set ("MidiUnityNote", String (baseNote)); + values.set ("Detune", String (detune)); + values.set ("Gain", String (gain)); + values.set ("LowNote", String (lowNote)); + values.set ("HighNote", String (highNote)); + values.set ("LowVelocity", String (lowVelocity)); + values.set ("HighVelocity", String (highVelocity)); + } + + static MemoryBlock createFrom (const StringPairArray& values) + { + const StringArray& keys = values.getAllKeys(); + + if (! (keys.contains ("LowNote", true) && keys.contains ("HighNote", true))) + return MemoryBlock(); + + MemoryBlock data (8); + data.fillWith (0); + + InstChunk* const inst = static_cast (data.getData()); + + inst->baseNote = (int8) values.getValue ("MidiUnityNote", "60").getIntValue(); + inst->detune = (int8) values.getValue ("Detune", "0").getIntValue(); + inst->gain = (int8) values.getValue ("Gain", "0").getIntValue(); + inst->lowNote = (int8) values.getValue ("LowNote", "0").getIntValue(); + inst->highNote = (int8) values.getValue ("HighNote", "127").getIntValue(); + inst->lowVelocity = (int8) values.getValue ("LowVelocity", "1").getIntValue(); + inst->highVelocity = (int8) values.getValue ("HighVelocity", "127").getIntValue(); + + return data; + } +} PACKED; + struct CueChunk { struct Cue @@ -22913,39 +23293,117 @@ struct CueChunk } } - static MemoryBlock createFrom (const StringPairArray& values) + static void create (MemoryBlock& data, const StringPairArray& values) { const int numCues = values.getValue ("NumCuePoints", "0").getIntValue(); - if (numCues <= 0) - return MemoryBlock(); + if (numCues > 0) + { + const size_t sizeNeeded = sizeof (CueChunk) + (numCues - 1) * sizeof (Cue); + data.setSize ((sizeNeeded + 3) & ~3, true); - const size_t sizeNeeded = sizeof (CueChunk) + (numCues - 1) * sizeof (Cue); - MemoryBlock data ((sizeNeeded + 3) & ~3); - data.fillWith (0); + CueChunk* const c = static_cast (data.getData()); - CueChunk* const c = static_cast (data.getData()); + c->numCues = ByteOrder::swapIfBigEndian ((uint32) numCues); - c->numCues = ByteOrder::swapIfBigEndian ((uint32) numCues); + const String dataChunkID (chunkName ("data")); - const String dataChunkID (chunkName ("data")); + int nextOrder = 0; - for (int i = 0; i < numCues; ++i) - { - const String prefix ("Cue" + String(i)); - c->cues[i].identifier = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Identifier", "0").getIntValue()); - c->cues[i].order = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Order", "0").getIntValue()); - c->cues[i].chunkID = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkID", dataChunkID).getIntValue()); - c->cues[i].chunkStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkStart", "0").getIntValue()); - c->cues[i].blockStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "BlockStart", "0").getIntValue()); - c->cues[i].offset = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Offset", "0").getIntValue()); - } + #if JUCE_DEBUG + Array identifiers; + #endif - return data; + for (int i = 0; i < numCues; ++i) + { + const String prefix ("Cue" + String (i)); + + uint32 identifier = values.getValue (prefix + "Identifier", "0").getIntValue(); + + #if JUCE_DEBUG + jassert (! identifiers.contains (identifier)); + identifiers.add (identifier); + #endif + + c->cues[i].identifier = ByteOrder::swapIfBigEndian ((uint32) identifier); + + const int order = values.getValue (prefix + "Order", String (nextOrder)).getIntValue(); + nextOrder = jmax (nextOrder, order) + 1; + + c->cues[i].order = ByteOrder::swapIfBigEndian ((uint32) order); + c->cues[i].chunkID = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkID", dataChunkID).getIntValue()); + c->cues[i].chunkStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkStart", "0").getIntValue()); + c->cues[i].blockStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "BlockStart", "0").getIntValue()); + c->cues[i].offset = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Offset", "0").getIntValue()); + } + } } } PACKED; +namespace ListChunk +{ + void appendLabelOrNoteChunk (const StringPairArray& values, const String& prefix, + const int chunkType, MemoryOutputStream& out) + { + const String label (values.getValue (prefix + "Text", prefix)); + const int labelLength = label.getNumBytesAsUTF8() + 1; + const int chunkLength = 4 + labelLength + (labelLength & 1); + + out.writeInt (chunkType); + out.writeInt (chunkLength); + out.writeInt (values.getValue (prefix + "Identifier", "0").getIntValue()); + out.write (label.toUTF8(), labelLength); + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + + void appendExtraChunk (const StringPairArray& values, const String& prefix, MemoryOutputStream& out) + { + const String text (values.getValue (prefix + "Text", prefix)); + + const int textLength = text.getNumBytesAsUTF8() + 1; // include null terminator + uint32 chunkLength = textLength + 20 + (textLength & 1); + + out.writeInt (chunkName ("ltxt")); + out.writeInt (chunkLength); + out.writeInt (values.getValue (prefix + "Identifier", "0").getIntValue()); + out.writeInt (values.getValue (prefix + "SampleLength", "0").getIntValue()); + out.writeInt (values.getValue (prefix + "Purpose", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "Country", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "Language", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "Dialect", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "CodePage", "0").getIntValue()); + out.write (text.toUTF8(), textLength); + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + + void create (MemoryBlock& block, const StringPairArray& values) + { + const int numCueLabels = values.getValue ("NumCueLabels", "0").getIntValue(); + const int numCueNotes = values.getValue ("NumCueNotes", "0").getIntValue(); + const int numCueRegions = values.getValue ("NumCueRegions", "0").getIntValue(); + + if (numCueLabels > 0 || numCueNotes > 0 || numCueRegions > 0) + { + MemoryOutputStream out (block, false); + + int i; + for (i = 0; i < numCueLabels; ++i) + appendLabelOrNoteChunk (values, "CueLabel" + String (i), chunkName ("labl"), out); + + for (i = 0; i < numCueNotes; ++i) + appendLabelOrNoteChunk (values, "CueNote" + String (i), chunkName ("note"), out); + + for (i = 0; i < numCueRegions; ++i) + appendExtraChunk (values, "CueRegion" + String (i), out); + } + } +} + struct ExtensibleWavSubFormat { uint32 data1; @@ -22988,6 +23446,9 @@ public: int64 end = 0; bool hasGotType = false; bool hasGotData = false; + int cueNoteIndex = 0; + int cueLabelIndex = 0; + int cueRegionIndex = 0; const int firstChunkType = input->readInt(); @@ -23112,6 +23573,13 @@ public: input->read (smpl, length); smpl->copyTo (metadataValues, length); } + else if (chunkType == chunkName ("inst") || chunkType == chunkName ("INST")) // need to check which... + { + HeapBlock inst; + inst.calloc (jmax ((size_t) length + 1, sizeof (InstChunk)), 1); + input->read (inst, length); + inst->copyTo (metadataValues); + } else if (chunkType == chunkName ("cue ")) { HeapBlock cue; @@ -23119,6 +23587,65 @@ public: input->read (cue, length); cue->copyTo (metadataValues, length); } + else if (chunkType == chunkName ("LIST")) + { + if (input->readInt() == chunkName ("adtl")) + { + while (input->getPosition() < chunkEnd) + { + const int adtlChunkType = input->readInt(); + const uint32 adtlLength = (uint32) input->readInt(); + const int64 adtlChunkEnd = input->getPosition() + (adtlLength + (adtlLength & 1)); + + if (adtlChunkType == chunkName ("labl") || adtlChunkType == chunkName ("note")) + { + String prefix; + + if (adtlChunkType == chunkName ("labl")) + prefix << "CueLabel" << cueLabelIndex++; + else if (adtlChunkType == chunkName ("note")) + prefix << "CueNote" << cueNoteIndex++; + + const uint32 identifier = (uint32) input->readInt(); + const uint32 stringLength = adtlLength - 4; + + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength); + const String text (String::fromUTF8 (static_cast (textBlock.getData()), textBlock.getSize())); + + metadataValues.set (prefix + "Identifier", String (identifier)); + metadataValues.set (prefix + "Text", text); + } + else if (adtlChunkType == chunkName ("ltxt")) + { + const String prefix ("CueRegion" + String (cueRegionIndex++)); + const uint32 identifier = (uint32) input->readInt(); + const uint32 sampleLength = (uint32) input->readInt(); + const uint32 purpose = (uint32) input->readInt(); + const uint16 country = (uint16) input->readInt(); + const uint16 language = (uint16) input->readInt(); + const uint16 dialect = (uint16) input->readInt(); + const uint16 codePage = (uint16) input->readInt(); + const uint32 stringLength = adtlLength - 20; + + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength); + const String text = String::fromUTF8 ((const char*)textBlock.getData(), textBlock.getSize()); + + metadataValues.set (prefix + "Identifier", String (identifier)); + metadataValues.set (prefix + "SampleLength", String (sampleLength)); + metadataValues.set (prefix + "Purpose", String (purpose)); + metadataValues.set (prefix + "Country", String (country)); + metadataValues.set (prefix + "Language", String (language)); + metadataValues.set (prefix + "Dialect", String (dialect)); + metadataValues.set (prefix + "CodePage", String (codePage)); + metadataValues.set (prefix + "Text", text); + } + + input->setPosition (adtlChunkEnd); + } + } + } else if (chunkEnd <= input->getPosition()) { break; @@ -23127,6 +23654,11 @@ public: input->setPosition (chunkEnd); } } + + if (cueLabelIndex > 0) metadataValues.set ("NumCueLabels", String (cueLabelIndex)); + if (cueNoteIndex > 0) metadataValues.set ("NumCueNotes", String (cueNoteIndex)); + if (cueRegionIndex > 0) metadataValues.set ("NumCueRegions", String (cueRegionIndex)); + if (metadataValues.size() > 0) metadataValues.set ("MetaDataSource", "WAV"); } bool readSamples (int** destSamples, int numDestChannels, int startOffsetInDestBuffer, @@ -23207,9 +23739,16 @@ public: if (metadataValues.size() > 0) { + // The meta data should have been santised for the WAV format. + // If it was originally sourced from an AIFF file the MetaDataSource + // key should be removed (or set to "WAV") once this has been done + jassert (metadataValues.getValue ("MetaDataSource", "None") != "AIFF"); + bwavChunk = BWAVChunk::createFrom (metadataValues); smplChunk = SMPLChunk::createFrom (metadataValues); - cueChunk = CueChunk ::createFrom (metadataValues); + instChunk = InstChunk::createFrom (metadataValues); + CueChunk ::create (cueChunk, metadataValues); + ListChunk::create (listChunk, metadataValues); } headerPosition = out->getPosition(); @@ -23266,7 +23805,7 @@ public: private: ScopedPointer converter; - MemoryBlock tempBlock, bwavChunk, smplChunk, cueChunk; + MemoryBlock tempBlock, bwavChunk, smplChunk, instChunk, cueChunk, listChunk; uint64 lengthInSamples, bytesWritten; int64 headerPosition; bool writeFailed; @@ -23305,7 +23844,9 @@ private: + 8 + audioDataSize + (audioDataSize & 1) + (bwavChunk.getSize() > 0 ? (8 + bwavChunk.getSize()) : 0) + (smplChunk.getSize() > 0 ? (8 + smplChunk.getSize()) : 0) + + (instChunk.getSize() > 0 ? (8 + instChunk.getSize()) : 0) + (cueChunk .getSize() > 0 ? (8 + cueChunk .getSize()) : 0) + + (listChunk.getSize() > 0 ? (12 + listChunk.getSize()) : 0) + (8 + 28); // (ds64 chunk) riffChunkSize += (riffChunkSize & 0x1); @@ -23384,6 +23925,13 @@ private: output->write (smplChunk.getData(), (int) smplChunk.getSize()); } + if (instChunk.getSize() > 0) + { + output->writeInt (chunkName ("inst")); + output->writeInt (7); + output->write (instChunk.getData(), (int) instChunk.getSize()); + } + if (cueChunk.getSize() > 0) { output->writeInt (chunkName ("cue ")); @@ -23391,6 +23939,14 @@ private: output->write (cueChunk.getData(), (int) cueChunk.getSize()); } + if (listChunk.getSize() > 0) + { + output->writeInt (chunkName ("LIST")); + output->writeInt ((int) listChunk.getSize() + 4); + output->writeInt (chunkName ("adtl")); + output->write (listChunk.getData(), (int) listChunk.getSize()); + } + output->writeInt (chunkName ("data")); output->writeInt (isRF64 ? -1 : (int) (lengthInSamples * bytesPerFrame)); @@ -35893,616 +36449,320 @@ BEGIN_JUCE_NAMESPACE const int AudioProcessorGraph::midiChannelIndex = 0x1000; -AudioProcessorGraph::Node::Node (const uint32 id_, AudioProcessor* const processor_) - : id (id_), - processor (processor_), - isPrepared (false) +namespace GraphRenderingOps { - jassert (processor_ != nullptr); -} -void AudioProcessorGraph::Node::prepare (const double sampleRate, const int blockSize, - AudioProcessorGraph* const graph) +class AudioGraphRenderingOp { - if (! isPrepared) - { - isPrepared = true; - - AudioProcessorGraph::AudioGraphIOProcessor* const ioProc - = dynamic_cast (static_cast (processor)); - - if (ioProc != nullptr) - ioProc->setParentGraph (graph); +public: + AudioGraphRenderingOp() {} + virtual ~AudioGraphRenderingOp() {} - processor->setPlayConfigDetails (processor->getNumInputChannels(), - processor->getNumOutputChannels(), - sampleRate, blockSize); + virtual void perform (AudioSampleBuffer& sharedBufferChans, + const OwnedArray & sharedMidiBuffers, + const int numSamples) = 0; - processor->prepareToPlay (sampleRate, blockSize); - } -} + JUCE_LEAK_DETECTOR (AudioGraphRenderingOp); +}; -void AudioProcessorGraph::Node::unprepare() +class ClearChannelOp : public AudioGraphRenderingOp { - if (isPrepared) +public: + ClearChannelOp (const int channelNum_) + : channelNum (channelNum_) + {} + + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) { - isPrepared = false; - processor->releaseResources(); + sharedBufferChans.clear (channelNum, 0, numSamples); } -} -AudioProcessorGraph::AudioProcessorGraph() - : lastNodeId (0), - renderingBuffers (1, 1), - currentAudioOutputBuffer (1, 1) -{ -} +private: + const int channelNum; -AudioProcessorGraph::~AudioProcessorGraph() -{ - clearRenderingSequence(); - clear(); -} + JUCE_DECLARE_NON_COPYABLE (ClearChannelOp); +}; -const String AudioProcessorGraph::getName() const +class CopyChannelOp : public AudioGraphRenderingOp { - return "Audio Graph"; -} +public: + CopyChannelOp (const int srcChannelNum_, const int dstChannelNum_) + : srcChannelNum (srcChannelNum_), + dstChannelNum (dstChannelNum_) + {} -void AudioProcessorGraph::clear() -{ - nodes.clear(); - connections.clear(); - triggerAsyncUpdate(); -} + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) + { + sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); + } -AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const -{ - for (int i = nodes.size(); --i >= 0;) - if (nodes.getUnchecked(i)->id == nodeId) - return nodes.getUnchecked(i); +private: + const int srcChannelNum, dstChannelNum; - return nullptr; -} + JUCE_DECLARE_NON_COPYABLE (CopyChannelOp); +}; -AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, - uint32 nodeId) +class AddChannelOp : public AudioGraphRenderingOp { - if (newProcessor == nullptr) - { - jassertfalse; - return nullptr; - } +public: + AddChannelOp (const int srcChannelNum_, const int dstChannelNum_) + : srcChannelNum (srcChannelNum_), + dstChannelNum (dstChannelNum_) + {} - if (nodeId == 0) - { - nodeId = ++lastNodeId; - } - else + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) { - // you can't add a node with an id that already exists in the graph.. - jassert (getNodeForId (nodeId) == nullptr); - removeNode (nodeId); + sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); } - lastNodeId = nodeId; +private: + const int srcChannelNum, dstChannelNum; - Node* const n = new Node (nodeId, newProcessor); - nodes.add (n); - triggerAsyncUpdate(); + JUCE_DECLARE_NON_COPYABLE (AddChannelOp); +}; - AudioProcessorGraph::AudioGraphIOProcessor* const ioProc - = dynamic_cast (static_cast (n->processor)); +class ClearMidiBufferOp : public AudioGraphRenderingOp +{ +public: + ClearMidiBufferOp (const int bufferNum_) + : bufferNum (bufferNum_) + {} - if (ioProc != nullptr) - ioProc->setParentGraph (this); + void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) + { + sharedMidiBuffers.getUnchecked (bufferNum)->clear(); + } - return n; -} +private: + const int bufferNum; -bool AudioProcessorGraph::removeNode (const uint32 nodeId) + JUCE_DECLARE_NON_COPYABLE (ClearMidiBufferOp); +}; + +class CopyMidiBufferOp : public AudioGraphRenderingOp { - disconnectNode (nodeId); +public: + CopyMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) + : srcBufferNum (srcBufferNum_), + dstBufferNum (dstBufferNum_) + {} - for (int i = nodes.size(); --i >= 0;) + void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) { - if (nodes.getUnchecked(i)->id == nodeId) - { - AudioProcessorGraph::AudioGraphIOProcessor* const ioProc - = dynamic_cast (static_cast (nodes.getUnchecked(i)->processor)); + *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum); + } - if (ioProc != nullptr) - ioProc->setParentGraph (nullptr); +private: + const int srcBufferNum, dstBufferNum; - nodes.remove (i); - triggerAsyncUpdate(); + JUCE_DECLARE_NON_COPYABLE (CopyMidiBufferOp); +}; - return true; - } +class AddMidiBufferOp : public AudioGraphRenderingOp +{ +public: + AddMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) + : srcBufferNum (srcBufferNum_), + dstBufferNum (dstBufferNum_) + {} + + void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int numSamples) + { + sharedMidiBuffers.getUnchecked (dstBufferNum) + ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0); } - return false; -} +private: + const int srcBufferNum, dstBufferNum; -const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId, - const int sourceChannelIndex, - const uint32 destNodeId, - const int destChannelIndex) const + JUCE_DECLARE_NON_COPYABLE (AddMidiBufferOp); +}; + +class ProcessBufferOp : public AudioGraphRenderingOp { - for (int i = connections.size(); --i >= 0;) +public: + ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& node_, + const Array & audioChannelsToUse_, + const int totalChans_, + const int midiBufferToUse_) + : node (node_), + processor (node_->getProcessor()), + audioChannelsToUse (audioChannelsToUse_), + totalChans (jmax (1, totalChans_)), + midiBufferToUse (midiBufferToUse_) { - const Connection* const c = connections.getUnchecked(i); + channels.calloc (totalChans); - if (c->sourceNodeId == sourceNodeId - && c->destNodeId == destNodeId - && c->sourceChannelIndex == sourceChannelIndex - && c->destChannelIndex == destChannelIndex) - { - return c; - } + while (audioChannelsToUse.size() < totalChans) + audioChannelsToUse.add (0); } - return nullptr; -} - -bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId, - const uint32 possibleDestNodeId) const -{ - for (int i = connections.size(); --i >= 0;) + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray & sharedMidiBuffers, const int numSamples) { - const Connection* const c = connections.getUnchecked(i); + for (int i = totalChans; --i >= 0;) + channels[i] = sharedBufferChans.getSampleData (audioChannelsToUse.getUnchecked (i), 0); - if (c->sourceNodeId == possibleSourceNodeId - && c->destNodeId == possibleDestNodeId) - { - return true; - } + AudioSampleBuffer buffer (channels, totalChans, numSamples); + + processor->processBlock (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse)); } - return false; -} + const AudioProcessorGraph::Node::Ptr node; + AudioProcessor* const processor; -bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId, - const int sourceChannelIndex, - const uint32 destNodeId, - const int destChannelIndex) const -{ - if (sourceChannelIndex < 0 - || destChannelIndex < 0 - || sourceNodeId == destNodeId - || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex)) - return false; +private: + Array audioChannelsToUse; + HeapBlock channels; + int totalChans; + int midiBufferToUse; - const Node* const source = getNodeForId (sourceNodeId); - - if (source == nullptr - || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getNumOutputChannels()) - || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi())) - return false; - - const Node* const dest = getNodeForId (destNodeId); - - if (dest == nullptr - || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getNumInputChannels()) - || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi())) - return false; - - return getConnectionBetween (sourceNodeId, sourceChannelIndex, - destNodeId, destChannelIndex) == nullptr; -} - -bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId, - const int sourceChannelIndex, - const uint32 destNodeId, - const int destChannelIndex) -{ - if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex)) - return false; - - Connection* const c = new Connection(); - c->sourceNodeId = sourceNodeId; - c->sourceChannelIndex = sourceChannelIndex; - c->destNodeId = destNodeId; - c->destChannelIndex = destChannelIndex; - - connections.add (c); - triggerAsyncUpdate(); - - return true; -} - -void AudioProcessorGraph::removeConnection (const int index) -{ - connections.remove (index); - triggerAsyncUpdate(); -} + JUCE_DECLARE_NON_COPYABLE (ProcessBufferOp); +}; -bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex, - const uint32 destNodeId, const int destChannelIndex) +/** Used to calculate the correct sequence of rendering ops needed, based on + the best re-use of shared buffers at each stage. +*/ +class RenderingOpSequenceCalculator { - bool doneAnything = false; +public: - for (int i = connections.size(); --i >= 0;) + RenderingOpSequenceCalculator (AudioProcessorGraph& graph_, + const Array& orderedNodes_, + Array& renderingOps) + : graph (graph_), + orderedNodes (orderedNodes_) { - const Connection* const c = connections.getUnchecked(i); + nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros + channels.add (0); - if (c->sourceNodeId == sourceNodeId - && c->destNodeId == destNodeId - && c->sourceChannelIndex == sourceChannelIndex - && c->destChannelIndex == destChannelIndex) + midiNodeIds.add ((uint32) zeroNodeID); + + for (int i = 0; i < orderedNodes.size(); ++i) { - removeConnection (i); - doneAnything = true; - triggerAsyncUpdate(); + createRenderingOpsForNode ((AudioProcessorGraph::Node*) orderedNodes.getUnchecked(i), + renderingOps, i); + + markAnyUnusedBuffersAsFree (i); } } - return doneAnything; -} - -bool AudioProcessorGraph::disconnectNode (const uint32 nodeId) -{ - bool doneAnything = false; + int getNumBuffersNeeded() const { return nodeIds.size(); } + int getNumMidiBuffersNeeded() const { return midiNodeIds.size(); } - for (int i = connections.size(); --i >= 0;) - { - const Connection* const c = connections.getUnchecked(i); +private: - if (c->sourceNodeId == nodeId || c->destNodeId == nodeId) - { - removeConnection (i); - doneAnything = true; - triggerAsyncUpdate(); - } - } + AudioProcessorGraph& graph; + const Array& orderedNodes; + Array channels; + Array nodeIds, midiNodeIds; - return doneAnything; -} + enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe }; -bool AudioProcessorGraph::removeIllegalConnections() -{ - bool doneAnything = false; + static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; } - for (int i = connections.size(); --i >= 0;) + void createRenderingOpsForNode (AudioProcessorGraph::Node* const node, + Array& renderingOps, + const int ourRenderingIndex) { - const Connection* const c = connections.getUnchecked(i); + const int numIns = node->getProcessor()->getNumInputChannels(); + const int numOuts = node->getProcessor()->getNumOutputChannels(); + const int totalChans = jmax (numIns, numOuts); - const Node* const source = getNodeForId (c->sourceNodeId); - const Node* const dest = getNodeForId (c->destNodeId); + Array audioChannelsToUse; + int midiBufferToUse = -1; - if (source == nullptr || dest == nullptr - || (c->sourceChannelIndex != midiChannelIndex - && ! isPositiveAndBelow (c->sourceChannelIndex, source->processor->getNumOutputChannels())) - || (c->sourceChannelIndex == midiChannelIndex - && ! source->processor->producesMidi()) - || (c->destChannelIndex != midiChannelIndex - && ! isPositiveAndBelow (c->destChannelIndex, dest->processor->getNumInputChannels())) - || (c->destChannelIndex == midiChannelIndex - && ! dest->processor->acceptsMidi())) + for (int inputChan = 0; inputChan < numIns; ++inputChan) { - removeConnection (i); - doneAnything = true; - triggerAsyncUpdate(); - } - } + // get a list of all the inputs to this node + Array sourceNodes, sourceOutputChans; - return doneAnything; -} + for (int i = graph.getNumConnections(); --i >= 0;) + { + const AudioProcessorGraph::Connection* const c = graph.getConnection (i); -namespace GraphRenderingOps -{ + if (c->destNodeId == node->nodeId && c->destChannelIndex == inputChan) + { + sourceNodes.add (c->sourceNodeId); + sourceOutputChans.add (c->sourceChannelIndex); + } + } -class AudioGraphRenderingOp -{ -public: - AudioGraphRenderingOp() {} - virtual ~AudioGraphRenderingOp() {} + int bufIndex = -1; - virtual void perform (AudioSampleBuffer& sharedBufferChans, - const OwnedArray & sharedMidiBuffers, - const int numSamples) = 0; + if (sourceNodes.size() == 0) + { + // unconnected input channel - JUCE_LEAK_DETECTOR (AudioGraphRenderingOp); -}; + if (inputChan >= numOuts) + { + bufIndex = getReadOnlyEmptyBuffer(); + jassert (bufIndex >= 0); + } + else + { + bufIndex = getFreeBuffer (false); + renderingOps.add (new ClearChannelOp (bufIndex)); + } + } + else if (sourceNodes.size() == 1) + { + // channel with a straightforward single input.. + const int srcNode = sourceNodes.getUnchecked(0); + const int srcChan = sourceOutputChans.getUnchecked(0); -class ClearChannelOp : public AudioGraphRenderingOp -{ -public: - ClearChannelOp (const int channelNum_) - : channelNum (channelNum_) - {} + bufIndex = getBufferContaining (srcNode, srcChan); - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) - { - sharedBufferChans.clear (channelNum, 0, numSamples); - } + if (bufIndex < 0) + { + // if not found, this is probably a feedback loop + bufIndex = getReadOnlyEmptyBuffer(); + jassert (bufIndex >= 0); + } -private: - const int channelNum; + if (inputChan < numOuts + && isBufferNeededLater (ourRenderingIndex, + inputChan, + srcNode, srcChan)) + { + // can't mess up this channel because it's needed later by another node, so we + // need to use a copy of it.. + const int newFreeBuffer = getFreeBuffer (false); - JUCE_DECLARE_NON_COPYABLE (ClearChannelOp); -}; + renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer)); -class CopyChannelOp : public AudioGraphRenderingOp -{ -public: - CopyChannelOp (const int srcChannelNum_, const int dstChannelNum_) - : srcChannelNum (srcChannelNum_), - dstChannelNum (dstChannelNum_) - {} + bufIndex = newFreeBuffer; + } + } + else + { + // channel with a mix of several inputs.. - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) - { - sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); - } + // try to find a re-usable channel from our inputs.. + int reusableInputIndex = -1; -private: - const int srcChannelNum, dstChannelNum; + for (int i = 0; i < sourceNodes.size(); ++i) + { + const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i), + sourceOutputChans.getUnchecked(i)); - JUCE_DECLARE_NON_COPYABLE (CopyChannelOp); -}; + if (sourceBufIndex >= 0 + && ! isBufferNeededLater (ourRenderingIndex, + inputChan, + sourceNodes.getUnchecked(i), + sourceOutputChans.getUnchecked(i))) + { + // we've found one of our input chans that can be re-used.. + reusableInputIndex = i; + bufIndex = sourceBufIndex; + break; + } + } -class AddChannelOp : public AudioGraphRenderingOp -{ -public: - AddChannelOp (const int srcChannelNum_, const int dstChannelNum_) - : srcChannelNum (srcChannelNum_), - dstChannelNum (dstChannelNum_) - {} - - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) - { - sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); - } - -private: - const int srcChannelNum, dstChannelNum; - - JUCE_DECLARE_NON_COPYABLE (AddChannelOp); -}; - -class ClearMidiBufferOp : public AudioGraphRenderingOp -{ -public: - ClearMidiBufferOp (const int bufferNum_) - : bufferNum (bufferNum_) - {} - - void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) - { - sharedMidiBuffers.getUnchecked (bufferNum)->clear(); - } - -private: - const int bufferNum; - - JUCE_DECLARE_NON_COPYABLE (ClearMidiBufferOp); -}; - -class CopyMidiBufferOp : public AudioGraphRenderingOp -{ -public: - CopyMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) - : srcBufferNum (srcBufferNum_), - dstBufferNum (dstBufferNum_) - {} - - void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) - { - *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum); - } - -private: - const int srcBufferNum, dstBufferNum; - - JUCE_DECLARE_NON_COPYABLE (CopyMidiBufferOp); -}; - -class AddMidiBufferOp : public AudioGraphRenderingOp -{ -public: - AddMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) - : srcBufferNum (srcBufferNum_), - dstBufferNum (dstBufferNum_) - {} - - void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int numSamples) - { - sharedMidiBuffers.getUnchecked (dstBufferNum) - ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0); - } - -private: - const int srcBufferNum, dstBufferNum; - - JUCE_DECLARE_NON_COPYABLE (AddMidiBufferOp); -}; - -class ProcessBufferOp : public AudioGraphRenderingOp -{ -public: - ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& node_, - const Array & audioChannelsToUse_, - const int totalChans_, - const int midiBufferToUse_) - : node (node_), - processor (node_->getProcessor()), - audioChannelsToUse (audioChannelsToUse_), - totalChans (jmax (1, totalChans_)), - midiBufferToUse (midiBufferToUse_) - { - channels.calloc (totalChans); - - while (audioChannelsToUse.size() < totalChans) - audioChannelsToUse.add (0); - } - - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray & sharedMidiBuffers, const int numSamples) - { - for (int i = totalChans; --i >= 0;) - channels[i] = sharedBufferChans.getSampleData (audioChannelsToUse.getUnchecked (i), 0); - - AudioSampleBuffer buffer (channels, totalChans, numSamples); - - processor->processBlock (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse)); - } - - const AudioProcessorGraph::Node::Ptr node; - AudioProcessor* const processor; - -private: - Array audioChannelsToUse; - HeapBlock channels; - int totalChans; - int midiBufferToUse; - - JUCE_DECLARE_NON_COPYABLE (ProcessBufferOp); -}; - -/** Used to calculate the correct sequence of rendering ops needed, based on - the best re-use of shared buffers at each stage. -*/ -class RenderingOpSequenceCalculator -{ -public: - - RenderingOpSequenceCalculator (AudioProcessorGraph& graph_, - const Array& orderedNodes_, - Array& renderingOps) - : graph (graph_), - orderedNodes (orderedNodes_) - { - nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros - channels.add (0); - - midiNodeIds.add ((uint32) zeroNodeID); - - for (int i = 0; i < orderedNodes.size(); ++i) - { - createRenderingOpsForNode ((AudioProcessorGraph::Node*) orderedNodes.getUnchecked(i), - renderingOps, i); - - markAnyUnusedBuffersAsFree (i); - } - } - - int getNumBuffersNeeded() const { return nodeIds.size(); } - int getNumMidiBuffersNeeded() const { return midiNodeIds.size(); } - -private: - - AudioProcessorGraph& graph; - const Array& orderedNodes; - Array channels; - Array nodeIds, midiNodeIds; - - enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe }; - - static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; } - - void createRenderingOpsForNode (AudioProcessorGraph::Node* const node, - Array& renderingOps, - const int ourRenderingIndex) - { - const int numIns = node->getProcessor()->getNumInputChannels(); - const int numOuts = node->getProcessor()->getNumOutputChannels(); - const int totalChans = jmax (numIns, numOuts); - - Array audioChannelsToUse; - int midiBufferToUse = -1; - - for (int inputChan = 0; inputChan < numIns; ++inputChan) - { - // get a list of all the inputs to this node - Array sourceNodes, sourceOutputChans; - - for (int i = graph.getNumConnections(); --i >= 0;) - { - const AudioProcessorGraph::Connection* const c = graph.getConnection (i); - - if (c->destNodeId == node->id && c->destChannelIndex == inputChan) - { - sourceNodes.add (c->sourceNodeId); - sourceOutputChans.add (c->sourceChannelIndex); - } - } - - int bufIndex = -1; - - if (sourceNodes.size() == 0) - { - // unconnected input channel - - if (inputChan >= numOuts) - { - bufIndex = getReadOnlyEmptyBuffer(); - jassert (bufIndex >= 0); - } - else - { - bufIndex = getFreeBuffer (false); - renderingOps.add (new ClearChannelOp (bufIndex)); - } - } - else if (sourceNodes.size() == 1) - { - // channel with a straightforward single input.. - const int srcNode = sourceNodes.getUnchecked(0); - const int srcChan = sourceOutputChans.getUnchecked(0); - - bufIndex = getBufferContaining (srcNode, srcChan); - - if (bufIndex < 0) - { - // if not found, this is probably a feedback loop - bufIndex = getReadOnlyEmptyBuffer(); - jassert (bufIndex >= 0); - } - - if (inputChan < numOuts - && isBufferNeededLater (ourRenderingIndex, - inputChan, - srcNode, srcChan)) - { - // can't mess up this channel because it's needed later by another node, so we - // need to use a copy of it.. - const int newFreeBuffer = getFreeBuffer (false); - - renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer)); - - bufIndex = newFreeBuffer; - } - } - else - { - // channel with a mix of several inputs.. - - // try to find a re-usable channel from our inputs.. - int reusableInputIndex = -1; - - for (int i = 0; i < sourceNodes.size(); ++i) - { - const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i), - sourceOutputChans.getUnchecked(i)); - - if (sourceBufIndex >= 0 - && ! isBufferNeededLater (ourRenderingIndex, - inputChan, - sourceNodes.getUnchecked(i), - sourceOutputChans.getUnchecked(i))) - { - // we've found one of our input chans that can be re-used.. - reusableInputIndex = i; - bufIndex = sourceBufIndex; - break; - } - } - - if (reusableInputIndex < 0) - { - // can't re-use any of our input chans, so get a new one and copy everything into it.. - bufIndex = getFreeBuffer (false); - jassert (bufIndex != 0); + if (reusableInputIndex < 0) + { + // can't re-use any of our input chans, so get a new one and copy everything into it.. + bufIndex = getFreeBuffer (false); + jassert (bufIndex != 0); const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0), sourceOutputChans.getUnchecked (0)); @@ -36535,7 +36795,7 @@ private: audioChannelsToUse.add (bufIndex); if (inputChan < numOuts) - markBufferAsContaining (bufIndex, node->id, inputChan); + markBufferAsContaining (bufIndex, node->nodeId, inputChan); } for (int outputChan = numIns; outputChan < numOuts; ++outputChan) @@ -36544,7 +36804,7 @@ private: jassert (bufIndex != 0); audioChannelsToUse.add (bufIndex); - markBufferAsContaining (bufIndex, node->id, outputChan); + markBufferAsContaining (bufIndex, node->nodeId, outputChan); } // Now the same thing for midi.. @@ -36554,7 +36814,7 @@ private: { const AudioProcessorGraph::Connection* const c = graph.getConnection (i); - if (c->destNodeId == node->id && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex) + if (c->destNodeId == node->nodeId && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex) midiSourceNodes.add (c->sourceNodeId); } @@ -36597,185 +36857,591 @@ private: // More than one midi input being mixed.. int reusableInputIndex = -1; - for (int i = 0; i < midiSourceNodes.size(); ++i) - { - const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i), - AudioProcessorGraph::midiChannelIndex); + for (int i = 0; i < midiSourceNodes.size(); ++i) + { + const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i), + AudioProcessorGraph::midiChannelIndex); + + if (sourceBufIndex >= 0 + && ! isBufferNeededLater (ourRenderingIndex, + AudioProcessorGraph::midiChannelIndex, + midiSourceNodes.getUnchecked(i), + AudioProcessorGraph::midiChannelIndex)) + { + // we've found one of our input buffers that can be re-used.. + reusableInputIndex = i; + midiBufferToUse = sourceBufIndex; + break; + } + } + + if (reusableInputIndex < 0) + { + // can't re-use any of our input buffers, so get a new one and copy everything into it.. + midiBufferToUse = getFreeBuffer (true); + jassert (midiBufferToUse >= 0); + + const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0), + AudioProcessorGraph::midiChannelIndex); + if (srcIndex >= 0) + renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse)); + else + renderingOps.add (new ClearMidiBufferOp (midiBufferToUse)); + + reusableInputIndex = 0; + } + + for (int j = 0; j < midiSourceNodes.size(); ++j) + { + if (j != reusableInputIndex) + { + const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j), + AudioProcessorGraph::midiChannelIndex); + if (srcIndex >= 0) + renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse)); + } + } + } + + if (node->getProcessor()->producesMidi()) + markBufferAsContaining (midiBufferToUse, node->nodeId, + AudioProcessorGraph::midiChannelIndex); + + renderingOps.add (new ProcessBufferOp (node, audioChannelsToUse, + totalChans, midiBufferToUse)); + } + + int getFreeBuffer (const bool forMidi) + { + if (forMidi) + { + for (int i = 1; i < midiNodeIds.size(); ++i) + if (midiNodeIds.getUnchecked(i) == freeNodeID) + return i; + + midiNodeIds.add ((uint32) freeNodeID); + return midiNodeIds.size() - 1; + } + else + { + for (int i = 1; i < nodeIds.size(); ++i) + if (nodeIds.getUnchecked(i) == freeNodeID) + return i; + + nodeIds.add ((uint32) freeNodeID); + channels.add (0); + return nodeIds.size() - 1; + } + } + + int getReadOnlyEmptyBuffer() const noexcept + { + return 0; + } + + int getBufferContaining (const uint32 nodeId, const int outputChannel) const noexcept + { + if (outputChannel == AudioProcessorGraph::midiChannelIndex) + { + for (int i = midiNodeIds.size(); --i >= 0;) + if (midiNodeIds.getUnchecked(i) == nodeId) + return i; + } + else + { + for (int i = nodeIds.size(); --i >= 0;) + if (nodeIds.getUnchecked(i) == nodeId + && channels.getUnchecked(i) == outputChannel) + return i; + } + + return -1; + } + + void markAnyUnusedBuffersAsFree (const int stepIndex) + { + int i; + for (i = 0; i < nodeIds.size(); ++i) + { + if (isNodeBusy (nodeIds.getUnchecked(i)) + && ! isBufferNeededLater (stepIndex, -1, + nodeIds.getUnchecked(i), + channels.getUnchecked(i))) + { + nodeIds.set (i, (uint32) freeNodeID); + } + } + + for (i = 0; i < midiNodeIds.size(); ++i) + { + if (isNodeBusy (midiNodeIds.getUnchecked(i)) + && ! isBufferNeededLater (stepIndex, -1, + midiNodeIds.getUnchecked(i), + AudioProcessorGraph::midiChannelIndex)) + { + midiNodeIds.set (i, (uint32) freeNodeID); + } + } + } + + bool isBufferNeededLater (int stepIndexToSearchFrom, + int inputChannelOfIndexToIgnore, + const uint32 nodeId, + const int outputChanIndex) const + { + while (stepIndexToSearchFrom < orderedNodes.size()) + { + const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom); + + if (outputChanIndex == AudioProcessorGraph::midiChannelIndex) + { + if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex + && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex, + node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr) + return true; + } + else + { + for (int i = 0; i < node->getProcessor()->getNumInputChannels(); ++i) + if (i != inputChannelOfIndexToIgnore + && graph.getConnectionBetween (nodeId, outputChanIndex, + node->nodeId, i) != nullptr) + return true; + } + + inputChannelOfIndexToIgnore = -1; + ++stepIndexToSearchFrom; + } + + return false; + } + + void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex) + { + if (outputIndex == AudioProcessorGraph::midiChannelIndex) + { + jassert (bufferNum > 0 && bufferNum < midiNodeIds.size()); + + midiNodeIds.set (bufferNum, nodeId); + } + else + { + jassert (bufferNum >= 0 && bufferNum < nodeIds.size()); + + nodeIds.set (bufferNum, nodeId); + channels.set (bufferNum, outputIndex); + } + } + + JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (RenderingOpSequenceCalculator); +}; + +// Holds a fast lookup table for checking which nodes are inputs to others. +class ConnectionLookupTable +{ +public: + explicit ConnectionLookupTable (const OwnedArray& connections) + { + for (int i = 0; i < connections.size(); ++i) + { + const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i); + + int index; + Entry* entry = findEntry (c->destNodeId, index); + + if (entry == nullptr) + { + entry = new Entry (c->destNodeId); + entries.insert (index, entry); + } + + entry->srcNodes.add (c->sourceNodeId); + } + } + + bool isAnInputTo (const uint32 possibleInputId, + const uint32 possibleDestinationId) const noexcept + { + return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size()); + } + +private: + + struct Entry + { + explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {} + + const uint32 destNodeId; + SortedSet srcNodes; + + JUCE_DECLARE_NON_COPYABLE (Entry); + }; + + OwnedArray entries; + + bool isAnInputToRecursive (const uint32 possibleInputId, + const uint32 possibleDestinationId, + int recursionCheck) const noexcept + { + int index; + const Entry* const entry = findEntry (possibleDestinationId, index); + + if (entry != nullptr) + { + const SortedSet& srcNodes = entry->srcNodes; + + if (srcNodes.contains (possibleInputId)) + return true; + + if (--recursionCheck >= 0) + { + for (int i = 0; i < srcNodes.size(); ++i) + if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck)) + return true; + } + } + + return false; + } + + Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept + { + Entry* result = nullptr; + int firstElement = 0, lastElement = entries.size(); + + while (firstElement < lastElement) + { + Entry* const firstEntry = entries.getUnchecked (firstElement); + if (destNodeId == firstEntry->destNodeId) + { + result = firstEntry; + break; + } + + const int halfway = (firstElement + lastElement) / 2; + + if (halfway <= firstElement) + break; + + if (destNodeId >= entries.getUnchecked (halfway)->destNodeId) + firstElement = halfway; + else + lastElement = halfway; + } + + insertIndex = firstElement; + return result; + } + + JUCE_DECLARE_NON_COPYABLE (ConnectionLookupTable); +}; + +struct ConnectionSorter +{ + static int compareElements (const AudioProcessorGraph::Connection* const first, + const AudioProcessorGraph::Connection* const second) noexcept + { + if (first->sourceNodeId < second->sourceNodeId) return -1; + else if (first->sourceNodeId > second->sourceNodeId) return 1; + else if (first->destNodeId < second->destNodeId) return -1; + else if (first->destNodeId > second->destNodeId) return 1; + else if (first->sourceChannelIndex < second->sourceChannelIndex) return -1; + else if (first->sourceChannelIndex > second->sourceChannelIndex) return 1; + else if (first->destChannelIndex < second->destChannelIndex) return -1; + else if (first->destChannelIndex > second->destChannelIndex) return 1; + + return 0; + } +}; + +} + +AudioProcessorGraph::Connection::Connection (const uint32 sourceNodeId_, const int sourceChannelIndex_, + const uint32 destNodeId_, const int destChannelIndex_) noexcept + : sourceNodeId (sourceNodeId_), sourceChannelIndex (sourceChannelIndex_), + destNodeId (destNodeId_), destChannelIndex (destChannelIndex_) +{ +} + +AudioProcessorGraph::Node::Node (const uint32 nodeId_, AudioProcessor* const processor_) noexcept + : nodeId (nodeId_), + processor (processor_), + isPrepared (false) +{ + jassert (processor_ != nullptr); +} + +void AudioProcessorGraph::Node::prepare (const double sampleRate, const int blockSize, + AudioProcessorGraph* const graph) +{ + if (! isPrepared) + { + isPrepared = true; + + AudioProcessorGraph::AudioGraphIOProcessor* const ioProc + = dynamic_cast (static_cast (processor)); + + if (ioProc != nullptr) + ioProc->setParentGraph (graph); + + processor->setPlayConfigDetails (processor->getNumInputChannels(), + processor->getNumOutputChannels(), + sampleRate, blockSize); + + processor->prepareToPlay (sampleRate, blockSize); + } +} + +void AudioProcessorGraph::Node::unprepare() +{ + if (isPrepared) + { + isPrepared = false; + processor->releaseResources(); + } +} + +AudioProcessorGraph::AudioProcessorGraph() + : lastNodeId (0), + renderingBuffers (1, 1), + currentAudioOutputBuffer (1, 1) +{ +} + +AudioProcessorGraph::~AudioProcessorGraph() +{ + clearRenderingSequence(); + clear(); +} + +const String AudioProcessorGraph::getName() const +{ + return "Audio Graph"; +} + +void AudioProcessorGraph::clear() +{ + nodes.clear(); + connections.clear(); + triggerAsyncUpdate(); +} + +AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const +{ + for (int i = nodes.size(); --i >= 0;) + if (nodes.getUnchecked(i)->nodeId == nodeId) + return nodes.getUnchecked(i); + + return nullptr; +} + +AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, + uint32 nodeId) +{ + if (newProcessor == nullptr) + { + jassertfalse; + return nullptr; + } + + if (nodeId == 0) + { + nodeId = ++lastNodeId; + } + else + { + // you can't add a node with an id that already exists in the graph.. + jassert (getNodeForId (nodeId) == nullptr); + removeNode (nodeId); + } + + lastNodeId = nodeId; + + Node* const n = new Node (nodeId, newProcessor); + nodes.add (n); + triggerAsyncUpdate(); + + AudioProcessorGraph::AudioGraphIOProcessor* const ioProc + = dynamic_cast (static_cast (n->processor)); + + if (ioProc != nullptr) + ioProc->setParentGraph (this); + + return n; +} + +bool AudioProcessorGraph::removeNode (const uint32 nodeId) +{ + disconnectNode (nodeId); + + for (int i = nodes.size(); --i >= 0;) + { + if (nodes.getUnchecked(i)->nodeId == nodeId) + { + AudioProcessorGraph::AudioGraphIOProcessor* const ioProc + = dynamic_cast (static_cast (nodes.getUnchecked(i)->processor)); + + if (ioProc != nullptr) + ioProc->setParentGraph (nullptr); + + nodes.remove (i); + triggerAsyncUpdate(); + + return true; + } + } + + return false; +} + +const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId, + const int sourceChannelIndex, + const uint32 destNodeId, + const int destChannelIndex) const +{ + const Connection c (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex); + GraphRenderingOps::ConnectionSorter sorter; + return connections [connections.indexOfSorted (sorter, &c)]; +} + +bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId, + const uint32 possibleDestNodeId) const +{ + for (int i = connections.size(); --i >= 0;) + { + const Connection* const c = connections.getUnchecked(i); + + if (c->sourceNodeId == possibleSourceNodeId + && c->destNodeId == possibleDestNodeId) + { + return true; + } + } + + return false; +} + +bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId, + const int sourceChannelIndex, + const uint32 destNodeId, + const int destChannelIndex) const +{ + if (sourceChannelIndex < 0 + || destChannelIndex < 0 + || sourceNodeId == destNodeId + || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex)) + return false; + + const Node* const source = getNodeForId (sourceNodeId); + + if (source == nullptr + || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getNumOutputChannels()) + || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi())) + return false; + + const Node* const dest = getNodeForId (destNodeId); - if (sourceBufIndex >= 0 - && ! isBufferNeededLater (ourRenderingIndex, - AudioProcessorGraph::midiChannelIndex, - midiSourceNodes.getUnchecked(i), - AudioProcessorGraph::midiChannelIndex)) - { - // we've found one of our input buffers that can be re-used.. - reusableInputIndex = i; - midiBufferToUse = sourceBufIndex; - break; - } - } + if (dest == nullptr + || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getNumInputChannels()) + || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi())) + return false; - if (reusableInputIndex < 0) - { - // can't re-use any of our input buffers, so get a new one and copy everything into it.. - midiBufferToUse = getFreeBuffer (true); - jassert (midiBufferToUse >= 0); + return getConnectionBetween (sourceNodeId, sourceChannelIndex, + destNodeId, destChannelIndex) == nullptr; +} - const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0), - AudioProcessorGraph::midiChannelIndex); - if (srcIndex >= 0) - renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse)); - else - renderingOps.add (new ClearMidiBufferOp (midiBufferToUse)); +bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId, + const int sourceChannelIndex, + const uint32 destNodeId, + const int destChannelIndex) +{ + if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex)) + return false; - reusableInputIndex = 0; - } + GraphRenderingOps::ConnectionSorter sorter; + connections.addSorted (sorter, new Connection (sourceNodeId, sourceChannelIndex, + destNodeId, destChannelIndex)); + triggerAsyncUpdate(); - for (int j = 0; j < midiSourceNodes.size(); ++j) - { - if (j != reusableInputIndex) - { - const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j), - AudioProcessorGraph::midiChannelIndex); - if (srcIndex >= 0) - renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse)); - } - } - } + return true; +} - if (node->getProcessor()->producesMidi()) - markBufferAsContaining (midiBufferToUse, node->id, - AudioProcessorGraph::midiChannelIndex); +void AudioProcessorGraph::removeConnection (const int index) +{ + connections.remove (index); + triggerAsyncUpdate(); +} - renderingOps.add (new ProcessBufferOp (node, audioChannelsToUse, - totalChans, midiBufferToUse)); - } +bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex, + const uint32 destNodeId, const int destChannelIndex) +{ + bool doneAnything = false; - int getFreeBuffer (const bool forMidi) + for (int i = connections.size(); --i >= 0;) { - if (forMidi) - { - for (int i = 1; i < midiNodeIds.size(); ++i) - if (midiNodeIds.getUnchecked(i) == freeNodeID) - return i; + const Connection* const c = connections.getUnchecked(i); - midiNodeIds.add ((uint32) freeNodeID); - return midiNodeIds.size() - 1; - } - else + if (c->sourceNodeId == sourceNodeId + && c->destNodeId == destNodeId + && c->sourceChannelIndex == sourceChannelIndex + && c->destChannelIndex == destChannelIndex) { - for (int i = 1; i < nodeIds.size(); ++i) - if (nodeIds.getUnchecked(i) == freeNodeID) - return i; - - nodeIds.add ((uint32) freeNodeID); - channels.add (0); - return nodeIds.size() - 1; + removeConnection (i); + doneAnything = true; + triggerAsyncUpdate(); } } - int getReadOnlyEmptyBuffer() const - { - return 0; - } - - int getBufferContaining (const uint32 nodeId, const int outputChannel) const - { - if (outputChannel == AudioProcessorGraph::midiChannelIndex) - { - for (int i = midiNodeIds.size(); --i >= 0;) - if (midiNodeIds.getUnchecked(i) == nodeId) - return i; - } - else - { - for (int i = nodeIds.size(); --i >= 0;) - if (nodeIds.getUnchecked(i) == nodeId - && channels.getUnchecked(i) == outputChannel) - return i; - } + return doneAnything; +} - return -1; - } +bool AudioProcessorGraph::disconnectNode (const uint32 nodeId) +{ + bool doneAnything = false; - void markAnyUnusedBuffersAsFree (const int stepIndex) + for (int i = connections.size(); --i >= 0;) { - int i; - for (i = 0; i < nodeIds.size(); ++i) - { - if (isNodeBusy (nodeIds.getUnchecked(i)) - && ! isBufferNeededLater (stepIndex, -1, - nodeIds.getUnchecked(i), - channels.getUnchecked(i))) - { - nodeIds.set (i, (uint32) freeNodeID); - } - } + const Connection* const c = connections.getUnchecked(i); - for (i = 0; i < midiNodeIds.size(); ++i) + if (c->sourceNodeId == nodeId || c->destNodeId == nodeId) { - if (isNodeBusy (midiNodeIds.getUnchecked(i)) - && ! isBufferNeededLater (stepIndex, -1, - midiNodeIds.getUnchecked(i), - AudioProcessorGraph::midiChannelIndex)) - { - midiNodeIds.set (i, (uint32) freeNodeID); - } + removeConnection (i); + doneAnything = true; + triggerAsyncUpdate(); } } - bool isBufferNeededLater (int stepIndexToSearchFrom, - int inputChannelOfIndexToIgnore, - const uint32 nodeId, - const int outputChanIndex) const - { - while (stepIndexToSearchFrom < orderedNodes.size()) - { - const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom); - - if (outputChanIndex == AudioProcessorGraph::midiChannelIndex) - { - if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex - && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex, - node->id, AudioProcessorGraph::midiChannelIndex) != nullptr) - return true; - } - else - { - for (int i = 0; i < node->getProcessor()->getNumInputChannels(); ++i) - if (i != inputChannelOfIndexToIgnore - && graph.getConnectionBetween (nodeId, outputChanIndex, - node->id, i) != nullptr) - return true; - } - - inputChannelOfIndexToIgnore = -1; - ++stepIndexToSearchFrom; - } + return doneAnything; +} - return false; - } +bool AudioProcessorGraph::removeIllegalConnections() +{ + bool doneAnything = false; - void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex) + for (int i = connections.size(); --i >= 0;) { - if (outputIndex == AudioProcessorGraph::midiChannelIndex) - { - jassert (bufferNum > 0 && bufferNum < midiNodeIds.size()); + const Connection* const c = connections.getUnchecked(i); - midiNodeIds.set (bufferNum, nodeId); - } - else - { - jassert (bufferNum >= 0 && bufferNum < nodeIds.size()); + const Node* const source = getNodeForId (c->sourceNodeId); + const Node* const dest = getNodeForId (c->destNodeId); - nodeIds.set (bufferNum, nodeId); - channels.set (bufferNum, outputIndex); + if (source == nullptr || dest == nullptr + || (c->sourceChannelIndex != midiChannelIndex + && ! isPositiveAndBelow (c->sourceChannelIndex, source->processor->getNumOutputChannels())) + || (c->sourceChannelIndex == midiChannelIndex + && ! source->processor->producesMidi()) + || (c->destChannelIndex != midiChannelIndex + && ! isPositiveAndBelow (c->destChannelIndex, dest->processor->getNumInputChannels())) + || (c->destChannelIndex == midiChannelIndex + && ! dest->processor->acceptsMidi())) + { + removeConnection (i); + doneAnything = true; + triggerAsyncUpdate(); } } - JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (RenderingOpSequenceCalculator); -}; - + return doneAnything; } void AudioProcessorGraph::clearRenderingSequence() @@ -36823,21 +37489,22 @@ void AudioProcessorGraph::buildRenderingSequence() Array orderedNodes; - int i; - for (i = 0; i < nodes.size(); ++i) { - Node* const node = nodes.getUnchecked(i); + const GraphRenderingOps::ConnectionLookupTable table (connections); + + for (int i = 0; i < nodes.size(); ++i) + { + Node* const node = nodes.getUnchecked(i); - node->prepare (getSampleRate(), getBlockSize(), this); + node->prepare (getSampleRate(), getBlockSize(), this); - int j = 0; - for (; j < orderedNodes.size(); ++j) - if (isAnInputTo (node->id, - ((Node*) orderedNodes.getUnchecked (j))->id, - nodes.size() + 1)) - break; + int j = 0; + for (; j < orderedNodes.size(); ++j) + if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId)) + break; - orderedNodes.insert (j, node); + orderedNodes.insert (j, node); + } } GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps); @@ -37002,38 +37669,38 @@ void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer switch (type) { - case audioOutputNode: - { - for (int i = jmin (graph->currentAudioOutputBuffer.getNumChannels(), - buffer.getNumChannels()); --i >= 0;) + case audioOutputNode: { - graph->currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples()); - } + for (int i = jmin (graph->currentAudioOutputBuffer.getNumChannels(), + buffer.getNumChannels()); --i >= 0;) + { + graph->currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples()); + } - break; - } + break; + } - case audioInputNode: - { - for (int i = jmin (graph->currentAudioInputBuffer->getNumChannels(), - buffer.getNumChannels()); --i >= 0;) + case audioInputNode: { - buffer.copyFrom (i, 0, *graph->currentAudioInputBuffer, i, 0, buffer.getNumSamples()); - } + for (int i = jmin (graph->currentAudioInputBuffer->getNumChannels(), + buffer.getNumChannels()); --i >= 0;) + { + buffer.copyFrom (i, 0, *graph->currentAudioInputBuffer, i, 0, buffer.getNumSamples()); + } - break; - } + break; + } - case midiOutputNode: - graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0); - break; + case midiOutputNode: + graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0); + break; - case midiInputNode: - midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0); - break; + case midiInputNode: + midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0); + break; - default: - break; + default: + break; } } @@ -37081,15 +37748,8 @@ bool AudioProcessorGraph::AudioGraphIOProcessor::isOutputChannelStereoPair (int return isInputChannelStereoPair (index); } -bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const -{ - return type == audioInputNode || type == midiInputNode; -} - -bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const -{ - return type == audioOutputNode || type == midiOutputNode; -} +bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const { return type == audioInputNode || type == midiInputNode; } +bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const { return type == audioOutputNode || type == midiOutputNode; } bool AudioProcessorGraph::AudioGraphIOProcessor::hasEditor() const { return false; } AudioProcessorEditor* AudioProcessorGraph::AudioGraphIOProcessor::createEditor() { return nullptr; } @@ -37106,15 +37766,10 @@ int AudioProcessorGraph::AudioGraphIOProcessor::getCurrentProgram() { return 0 void AudioProcessorGraph::AudioGraphIOProcessor::setCurrentProgram (int) { } const String AudioProcessorGraph::AudioGraphIOProcessor::getProgramName (int) { return String::empty; } -void AudioProcessorGraph::AudioGraphIOProcessor::changeProgramName (int, const String&) { } - -void AudioProcessorGraph::AudioGraphIOProcessor::getStateInformation (JUCE_NAMESPACE::MemoryBlock&) -{ -} +void AudioProcessorGraph::AudioGraphIOProcessor::changeProgramName (int, const String&) {} -void AudioProcessorGraph::AudioGraphIOProcessor::setStateInformation (const void*, int) -{ -} +void AudioProcessorGraph::AudioGraphIOProcessor::getStateInformation (JUCE_NAMESPACE::MemoryBlock&) {} +void AudioProcessorGraph::AudioGraphIOProcessor::setStateInformation (const void*, int) {} void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph) { @@ -38693,10 +39348,9 @@ static const int quitMessageId = 0xfffff321; MessageManager::MessageManager() noexcept : quitMessagePosted (false), quitMessageReceived (false), + messageThreadId (Thread::getCurrentThreadId()), threadWithLock (0) { - messageThreadId = Thread::getCurrentThreadId(); - if (JUCEApplication::isStandaloneApp()) Thread::setCurrentThreadName ("Juce Message Thread"); } diff --git a/juce_amalgamated.h b/juce_amalgamated.h index 1783cc4133..fa6c31acb8 100644 --- a/juce_amalgamated.h +++ b/juce_amalgamated.h @@ -73,7 +73,7 @@ namespace JuceDummyNamespace {} */ #define JUCE_MAJOR_VERSION 1 #define JUCE_MINOR_VERSION 53 -#define JUCE_BUILDNUMBER 73 +#define JUCE_BUILDNUMBER 74 /** Current Juce version number. @@ -23112,8 +23112,9 @@ public: /** Returns one of the registered clients. */ TimeSliceClient* getClient (int index) const; - /** @internal */ + #ifndef DOXYGEN void run(); + #endif private: CriticalSection callbackLock, listLock; @@ -33344,8 +33345,7 @@ public: @see MessageManager, DeletedAtShutdown */ -class JUCE_API JUCEApplication : public ApplicationCommandTarget, - private ActionListener +class JUCE_API JUCEApplication : public ApplicationCommandTarget { protected: @@ -33497,18 +33497,9 @@ public: */ const String getCommandLineParameters() const noexcept { return commandLineParameters; } - // These are used by the START_JUCE_APPLICATION() macro and aren't for public use. - - /** @internal */ - static int main (const String& commandLine); - /** @internal */ - static int main (int argc, const char* argv[]); - /** @internal */ - static void sendUnhandledException (const std::exception* e, const char* sourceFile, int lineNumber); - /** Returns true if this executable is running as an app (as opposed to being a plugin or other kind of shared library. */ - static inline bool isStandaloneApp() noexcept { return createInstance != 0; } + static inline bool isStandaloneApp() noexcept { return createInstance != 0; } /** @internal */ ApplicationCommandTarget* getNextCommandTarget(); @@ -33518,26 +33509,28 @@ public: void getAllCommands (Array & commands); /** @internal */ bool perform (const InvocationInfo& info); - /** @internal */ - void actionListenerCallback (const String& message); - /** @internal */ + + #ifndef DOXYGEN + // The following methods are internal calls - not for public use. + static int main (const String& commandLine); + static int main (int argc, const char* argv[]); + static void sendUnhandledException (const std::exception* e, const char* sourceFile, int lineNumber); bool initialiseApp (const String& commandLine); - /** @internal */ int shutdownApp(); - /** @internal */ static void appWillTerminateByForce(); - /** @internal */ typedef JUCEApplication* (*CreateInstanceFunction)(); - /** @internal */ static CreateInstanceFunction createInstance; + #endif private: + static JUCEApplication* appInstance; + String commandLineParameters; + ScopedPointer appLock; + ScopedPointer broadcastCallback; int appReturnValue; bool stillInitialising; - ScopedPointer appLock; - static JUCEApplication* appInstance; JUCE_DECLARE_NON_COPYABLE (JUCEApplication); }; @@ -36550,9 +36543,10 @@ public: const Array getPossibleBitDepths(); bool canDoStereo(); bool canDoMono(); -#if JUCE_MAC + + #if JUCE_MAC bool canHandleFile (const File& fileToTest); -#endif + #endif AudioFormatReader* createReaderFor (InputStream* sourceStream, bool deleteStreamIfOpeningFails); @@ -37224,12 +37218,19 @@ public: */ float getApproximatePeak() const; + /** Reads the approximate min and max levels from a section of the thumbnail. + The lowest and highest samples are returned in minValue and maxValue, but obviously + because the thumb only stores low-resolution data, these numbers will only be a rough + approximation of the true values. + */ + void getApproximateMinMax (double startTime, double endTime, int channelIndex, + float& minValue, float& maxValue) const noexcept; + /** Returns the hash code that was set by setSource() or setReader(). */ int64 getHashCode() const; #ifndef DOXYGEN - // (this is only public to avoid a VC6 bug) - class LevelDataSource; + class LevelDataSource; // (this is only public to avoid a VC6 bug) #endif private: @@ -47098,10 +47099,9 @@ public: public: /** The ID number assigned to this node. - This is assigned by the graph that owns it, and can't be changed. */ - const uint32 id; + const uint32 nodeId; /** The actual processor object that this node represents. */ AudioProcessor* getProcessor() const noexcept { return processor; } @@ -47125,7 +47125,7 @@ public: const ScopedPointer processor; bool isPrepared; - Node (uint32 id, AudioProcessor* processor); + Node (uint32 nodeId, AudioProcessor* processor) noexcept; void prepare (double sampleRate, int blockSize, AudioProcessorGraph* graph); void unprepare(); @@ -47141,6 +47141,9 @@ public: { public: + Connection (uint32 sourceNodeId, int sourceChannelIndex, + uint32 destNodeId, int destChannelIndex) noexcept; + /** The ID number of the node which is the input source for this connection. @see AudioProcessorGraph::getNodeForId */ @@ -48766,8 +48769,7 @@ public: @returns the value that the callback function returns. @see MessageManagerLock */ - void* callFunctionOnMessageThread (MessageCallbackFunction* callback, - void* userData); + void* callFunctionOnMessageThread (MessageCallbackFunction* callback, void* userData); /** Returns true if the caller-thread is the message thread. */ bool isThisTheMessageThread() const noexcept; @@ -48815,12 +48817,12 @@ public: /** Deregisters a broadcast listener. */ void deregisterBroadcastListener (ActionListener* listener); - /** @internal */ + #ifndef DOXYGEN + // Internal methods - do not use! void deliverMessage (Message*); - /** @internal */ void deliverBroadcastMessage (const String&); - /** @internal */ ~MessageManager() noexcept; + #endif private: @@ -48946,7 +48948,6 @@ public: ~MessageManagerLock() noexcept; /** Returns true if the lock was successfully acquired. - (See the constructor that takes a Thread for more info). */ bool lockWasGained() const noexcept { return locked; } @@ -56566,7 +56567,7 @@ protected: /** @internal */ int getDesktopWindowStyleFlags() const; -#if JUCE_DEBUG + #if JUCE_DEBUG /** Overridden to warn people about adding components directly to this component instead of using setContentOwned(). @@ -56581,7 +56582,7 @@ protected: a base-class method call to Component::addAndMakeVisible(), to side-step this warning. */ void addAndMakeVisible (Component* child, int zOrder = -1); -#endif + #endif ScopedPointer resizableCorner; ScopedPointer resizableBorder; @@ -59438,6 +59439,7 @@ public: and feel class how this is used. */ }; + #ifndef DOXYGEN /** @internal */ void paint (Graphics& g); /** @internal */ @@ -59460,6 +59462,7 @@ public: void parentHierarchyChanged(); /** @internal */ const Rectangle getTitleBarArea(); + #endif private: @@ -66942,8 +66945,9 @@ public: void removeListener (Listener* listenerToRemove); protected: - /** @internal */ + #ifndef DOXYGEN CameraDevice (const String& name, int index); + #endif private: void* internal; diff --git a/src/application/juce_Application.cpp b/src/application/juce_Application.cpp index dca0c570cf..92c923c519 100644 --- a/src/application/juce_Application.cpp +++ b/src/application/juce_Application.cpp @@ -38,6 +38,22 @@ BEGIN_JUCE_NAMESPACE extern void juce_initialiseMacMainMenu(); #endif +//============================================================================== +class AppBroadcastCallback : public ActionListener +{ +public: + AppBroadcastCallback() { MessageManager::getInstance()->registerBroadcastListener (this); } + ~AppBroadcastCallback() { MessageManager::getInstance()->deregisterBroadcastListener (this); } + + void actionListenerCallback (const String& message) + { + JUCEApplication* const app = JUCEApplication::getInstance(); + + if (app != 0 && message.startsWith (app->getApplicationName() + "/")) + app->anotherInstanceStarted (message.substring (app->getApplicationName().length() + 1)); + } +}; + //============================================================================== JUCEApplication::JUCEApplication() : appReturnValue (0), @@ -87,12 +103,6 @@ void JUCEApplication::setApplicationReturnValue (const int newReturnValue) noexc appReturnValue = newReturnValue; } -void JUCEApplication::actionListenerCallback (const String& message) -{ - if (message.startsWith (getApplicationName() + "/")) - anotherInstanceStarted (message.substring (getApplicationName().length() + 1)); -} - //============================================================================== void JUCEApplication::unhandledException (const std::exception*, const String&, @@ -149,7 +159,7 @@ bool JUCEApplication::initialiseApp (const String& commandLine) { commandLineParameters = commandLine.trim(); -#if ! JUCE_IOS + #if ! JUCE_IOS jassert (appLock == nullptr); // initialiseApp must only be called once! if (! moreThanOneInstanceAllowed()) @@ -165,17 +175,18 @@ bool JUCEApplication::initialiseApp (const String& commandLine) return false; } } -#endif + #endif // let the app do its setting-up.. initialise (commandLineParameters); -#if JUCE_MAC + #if JUCE_MAC juce_initialiseMacMainMenu(); // needs to be called after the app object has created, to get its name -#endif + #endif - // register for broadcast new app messages - MessageManager::getInstance()->registerBroadcastListener (this); + #if ! JUCE_IOS + broadcastCallback = new AppBroadcastCallback(); + #endif stillInitialising = false; return true; @@ -185,7 +196,7 @@ int JUCEApplication::shutdownApp() { jassert (appInstance == this); - MessageManager::getInstance()->deregisterBroadcastListener (this); + broadcastCallback = nullptr; JUCE_TRY { @@ -249,20 +260,20 @@ int JUCEApplication::main (int argc, const char* argv[]) { JUCE_AUTORELEASEPOOL - #if ! JUCE_WINDOWS + #if ! JUCE_WINDOWS jassert (createInstance != nullptr); juce_Argv0 = argv[0]; - #endif + #endif - #if JUCE_IOS + #if JUCE_IOS return juce_iOSMain (argc, argv); - #else + #else String cmd; for (int i = 1; i < argc; ++i) cmd << argv[i] << ' '; return JUCEApplication::main (cmd); - #endif + #endif } #endif diff --git a/src/application/juce_Application.h b/src/application/juce_Application.h index a5fad93dfc..f1c8acb8d9 100644 --- a/src/application/juce_Application.h +++ b/src/application/juce_Application.h @@ -87,8 +87,7 @@ @see MessageManager, DeletedAtShutdown */ -class JUCE_API JUCEApplication : public ApplicationCommandTarget, - private ActionListener +class JUCE_API JUCEApplication : public ApplicationCommandTarget { protected: //============================================================================== @@ -244,20 +243,11 @@ public: */ const String getCommandLineParameters() const noexcept { return commandLineParameters; } - //============================================================================== - // These are used by the START_JUCE_APPLICATION() macro and aren't for public use. - - /** @internal */ - static int main (const String& commandLine); - /** @internal */ - static int main (int argc, const char* argv[]); - /** @internal */ - static void sendUnhandledException (const std::exception* e, const char* sourceFile, int lineNumber); - /** Returns true if this executable is running as an app (as opposed to being a plugin or other kind of shared library. */ - static inline bool isStandaloneApp() noexcept { return createInstance != 0; } + static inline bool isStandaloneApp() noexcept { return createInstance != 0; } + //============================================================================== /** @internal */ ApplicationCommandTarget* getNextCommandTarget(); /** @internal */ @@ -266,26 +256,29 @@ public: void getAllCommands (Array & commands); /** @internal */ bool perform (const InvocationInfo& info); - /** @internal */ - void actionListenerCallback (const String& message); - /** @internal */ + + //============================================================================== + #ifndef DOXYGEN + // The following methods are internal calls - not for public use. + static int main (const String& commandLine); + static int main (int argc, const char* argv[]); + static void sendUnhandledException (const std::exception* e, const char* sourceFile, int lineNumber); bool initialiseApp (const String& commandLine); - /** @internal */ int shutdownApp(); - /** @internal */ static void appWillTerminateByForce(); - /** @internal */ typedef JUCEApplication* (*CreateInstanceFunction)(); - /** @internal */ static CreateInstanceFunction createInstance; + #endif private: //============================================================================== + static JUCEApplication* appInstance; + String commandLineParameters; + ScopedPointer appLock; + ScopedPointer broadcastCallback; int appReturnValue; bool stillInitialising; - ScopedPointer appLock; - static JUCEApplication* appInstance; JUCE_DECLARE_NON_COPYABLE (JUCEApplication); }; diff --git a/src/audio/audio_file_formats/juce_AiffAudioFormat.cpp b/src/audio/audio_file_formats/juce_AiffAudioFormat.cpp index f6eadee2bc..3ba1986e12 100644 --- a/src/audio/audio_file_formats/juce_AiffAudioFormat.cpp +++ b/src/audio/audio_file_formats/juce_AiffAudioFormat.cpp @@ -29,6 +29,7 @@ BEGIN_JUCE_NAMESPACE #include "juce_AiffAudioFormat.h" #include "../../io/streams/juce_BufferedInputStream.h" +#include "../../io/streams/juce_MemoryOutputStream.h" #include "../../core/juce_PlatformUtilities.h" #include "../../text/juce_LocalisedStrings.h" @@ -37,6 +38,215 @@ BEGIN_JUCE_NAMESPACE static const char* const aiffFormatName = "AIFF file"; static const char* const aiffExtensions[] = { ".aiff", ".aif", 0 }; +//============================================================================== +namespace AiffFileHelpers +{ + inline int chunkName (const char* const name) { return (int) ByteOrder::littleEndianInt (name); } + +#if JUCE_MSVC + #pragma pack (push, 1) + #define PACKED +#elif JUCE_GCC + #define PACKED __attribute__((packed)) +#else + #define PACKED +#endif + +//============================================================================== +struct InstChunk +{ + struct Loop + { + uint16 type; // these are different in AIFF and WAV + uint16 startIdentifier; + uint16 endIdentifier; + } PACKED; + + int8 baseNote; + int8 detune; + int8 lowNote; + int8 highNote; + int8 lowVelocity; + int8 highVelocity; + int16 gain; + Loop sustainLoop; + Loop releaseLoop; + + void copyTo (StringPairArray& values) const + { + values.set ("MidiUnityNote", String (baseNote)); + values.set ("Detune", String (detune)); + + values.set ("LowNote", String (lowNote)); + values.set ("HighNote", String (highNote)); + values.set ("LowVelocity", String (lowVelocity)); + values.set ("HighVelocity", String (highVelocity)); + + values.set ("Gain", String ((int16) ByteOrder::swapIfLittleEndian ((uint16) gain))); + + values.set ("NumSampleLoops", String (2)); // always 2 with AIFF, WAV can have more + values.set ("Loop0Type", String (ByteOrder::swapIfLittleEndian (sustainLoop.type))); + values.set ("Loop0StartIdentifier", String (ByteOrder::swapIfLittleEndian (sustainLoop.startIdentifier))); + values.set ("Loop0EndIdentifier", String (ByteOrder::swapIfLittleEndian (sustainLoop.endIdentifier))); + values.set ("Loop1Type", String (ByteOrder::swapIfLittleEndian (releaseLoop.type))); + values.set ("Loop1StartIdentifier", String (ByteOrder::swapIfLittleEndian (releaseLoop.startIdentifier))); + values.set ("Loop1EndIdentifier", String (ByteOrder::swapIfLittleEndian (releaseLoop.endIdentifier))); + } + + static void create (MemoryBlock& block, const StringPairArray& values) + { + if (values.getAllKeys().contains ("MidiUnityNote", true)) + { + block.setSize ((sizeof (InstChunk) + 3) & ~3, true); + InstChunk* const inst = static_cast (block.getData()); + + inst->baseNote = (int8) values.getValue ("MidiUnityNote", "60").getIntValue(); + inst->detune = (int8) values.getValue ("Detune", "0").getIntValue(); + inst->lowNote = (int8) values.getValue ("LowNote", "0").getIntValue(); + inst->highNote = (int8) values.getValue ("HighNote", "127").getIntValue(); + inst->lowVelocity = (int8) values.getValue ("LowVelocity", "1").getIntValue(); + inst->highVelocity = (int8) values.getValue ("HighVelocity", "127").getIntValue(); + inst->gain = (int16) ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Gain", "0").getIntValue()); + + inst->sustainLoop.type = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop0Type", "0").getIntValue()); + inst->sustainLoop.startIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop0StartIdentifier", "0").getIntValue()); + inst->sustainLoop.endIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop0EndIdentifier", "0").getIntValue()); + inst->releaseLoop.type = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop1Type", "0").getIntValue()); + inst->releaseLoop.startIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop1StartIdentifier", "0").getIntValue()); + inst->releaseLoop.endIdentifier = ByteOrder::swapIfLittleEndian ((uint16) values.getValue ("Loop1EndIdentifier", "0").getIntValue()); + } + } + +} PACKED; + +#if JUCE_MSVC + #pragma pack (pop) +#endif + +#undef PACKED + +//============================================================================== +namespace MarkChunk +{ + bool metaDataContainsZeroIdentifiers (const StringPairArray& values) + { + // (zero cue identifiers are valid for WAV but not for AIFF) + const String cueString ("Cue"); + const String noteString ("CueNote"); + const String identifierString ("Identifier"); + + const StringArray& keys = values.getAllKeys(); + + for (int i = 0; i < keys.size(); ++i) + { + const String key (keys[i]); + + if (key.startsWith (noteString)) + continue; // zero identifier IS valid in a COMT chunk + + if (key.startsWith (cueString) && key.contains (identifierString)) + { + const int value = values.getValue (key, "-1").getIntValue(); + + if (value == 0) + return true; + } + } + + return false; + } + + void create (MemoryBlock& block, const StringPairArray& values) + { + const int numCues = values.getValue ("NumCuePoints", "0").getIntValue(); + + if (numCues > 0) + { + MemoryOutputStream out (block, false); + + out.writeShortBigEndian ((short) numCues); + + const int numCueLabels = values.getValue ("NumCueLabels", "0").getIntValue(); + const int idOffset = metaDataContainsZeroIdentifiers (values) ? 1 : 0; // can't have zero IDs in AIFF + + #if JUCE_DEBUG + Array identifiers; + #endif + + for (int i = 0; i < numCues; ++i) + { + const String prefixCue ("Cue" + String (i)); + const String prefixLabel ("CueLabel" + String (i)); + + const int identifier = idOffset + values.getValue (prefixCue + "Identifier", "1").getIntValue(); + + #if JUCE_DEBUG + jassert (! identifiers.contains (identifier)); + identifiers.add (identifier); + #endif + + const int offset = values.getValue (prefixCue + "Offset", "0").getIntValue(); + + String label (prefixLabel); + + for (int labelIndex = 0; labelIndex < numCueLabels; ++labelIndex) + { + const String prefixLabel ("CueLabel" + String (labelIndex)); + const int labelIdentifier = idOffset + values.getValue (prefixLabel + "Identifier", "1").getIntValue(); + + if (labelIdentifier == identifier) + { + label = values.getValue (prefixLabel + "Text", label); + break; + } + } + + out.writeShortBigEndian ((short) identifier); + out.writeIntBigEndian (offset); + + const int labelLength = jmin (254, label.getNumBytesAsUTF8()); // seems to need null terminator even though it's a pstring + out.writeByte ((char) labelLength + 1); + out.write (label.toUTF8(), labelLength); + out.writeByte (0); + } + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + } +} + +//============================================================================== +namespace COMTChunk +{ + void create (MemoryBlock& block, const StringPairArray& values) + { + const int numNotes = values.getValue ("NumCueNotes", "0").getIntValue(); + + if (numNotes > 0) + { + MemoryOutputStream out (block, false); + out.writeShortBigEndian ((short) numNotes); + + for (int i = 0; i < numNotes; ++i) + { + const String prefix ("CueNote" + String (i)); + + out.writeIntBigEndian (values.getValue (prefix + "TimeStamp", "0").getIntValue()); + out.writeShortBigEndian ((short) values.getValue (prefix + "Identifier", "0").getIntValue()); + + const String comment (values.getValue (prefix + "Text", String::empty)); + out.write (comment.toUTF8(), jmin (comment.getNumBytesAsUTF8(), 65534)); + out.writeByte (0); + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + } + } +} + +} //============================================================================== class AiffAudioFormatReader : public AudioFormatReader @@ -50,6 +260,8 @@ public: AiffAudioFormatReader (InputStream* in) : AudioFormatReader (in, TRANS (aiffFormatName)) { + using namespace AiffFileHelpers; + if (input->readInt() == chunkName ("FORM")) { const int len = input->readIntBigEndian(); @@ -131,6 +343,67 @@ public: dataChunkStart = input->getPosition() + 4 + offset; lengthInSamples = (bytesPerFrame > 0) ? jmin (lengthInSamples, (int64) (length / bytesPerFrame)) : 0; } + else if (type == chunkName ("MARK")) + { + const uint16 numCues = (uint16) input->readShortBigEndian(); + + // these two are always the same for AIFF-read files + metadataValues.set ("NumCuePoints", String (numCues)); + metadataValues.set ("NumCueLabels", String (numCues)); + + for (uint16 i = 0; i < numCues; ++i) + { + uint16 identifier = (uint16) input->readShortBigEndian(); + uint32 offset = (uint32) input->readIntBigEndian(); + uint8 stringLength = (uint8) input->readByte(); + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength); + + // if the stringLength is even then read one more byte as the + // string needs to be an even number of bytes INCLUDING the + // leading length character in the pascal string + if ((stringLength & 1) == 0) + input->readByte(); + + const String text = String::fromUTF8 ((const char*)textBlock.getData(), stringLength); + + const String prefixCue ("Cue" + String (i)); + metadataValues.set (prefixCue + "Identifier", String (identifier)); + metadataValues.set (prefixCue + "Offset", String (offset)); + + const String prefixLabel ("CueLabel" + String (i)); + metadataValues.set (prefixLabel + "Identifier", String (identifier)); + metadataValues.set (prefixLabel + "Text", text); + } + } + else if (type == chunkName ("COMT")) + { + const uint16 numNotes = (uint16) input->readShortBigEndian(); + metadataValues.set ("NumCueNotes", String (numNotes)); + + for (uint16 i = 0; i < numNotes; ++i) + { + uint32 timestamp = (uint32) input->readIntBigEndian(); + uint16 identifier = (uint16) input->readShortBigEndian(); // may be zero in this case + uint16 stringLength = (uint16) input->readShortBigEndian(); + + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength + (stringLength & 1)); + const String text = String::fromUTF8 ((const char*)textBlock.getData(), stringLength); + + const String prefix ("CueNote" + String (i)); + metadataValues.set (prefix + "TimeStamp", String (timestamp)); + metadataValues.set (prefix + "Identifier", String (identifier)); + metadataValues.set (prefix + "Text", text); + } + } + else if (type == chunkName ("INST")) + { + HeapBlock inst; + inst.calloc (jmax ((size_t) length + 1, sizeof (InstChunk)), 1); + input->read (inst, length); + inst->copyTo (metadataValues); + } else if ((hasGotVer && hasGotData && hasGotType) || chunkEnd < input->getPosition() || input->isExhausted()) @@ -142,6 +415,9 @@ public: } } } + + if (metadataValues.size() > 0) + metadataValues.set ("MetaDataSource", "AIFF"); } //============================================================================== @@ -211,8 +487,6 @@ public: } private: - static inline int chunkName (const char* const name) { return (int) ByteOrder::littleEndianInt (name); } - JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AiffAudioFormatReader); }; @@ -221,12 +495,28 @@ class AiffAudioFormatWriter : public AudioFormatWriter { public: //============================================================================== - AiffAudioFormatWriter (OutputStream* out, double sampleRate_, unsigned int numChans, int bits) + AiffAudioFormatWriter (OutputStream* out, double sampleRate_, + unsigned int numChans, int bits, + const StringPairArray& metadataValues) : AudioFormatWriter (out, TRANS (aiffFormatName), sampleRate_, numChans, bits), lengthInSamples (0), bytesWritten (0), writeFailed (false) { + using namespace AiffFileHelpers; + + if (metadataValues.size() > 0) + { + // The meta data should have been santised for the AIFF format. + // If it was originally sourced from a WAV file the MetaDataSource + // key should be removed (or set to "AIFF") once this has been done + jassert (metadataValues.getValue ("MetaDataSource", "None") != "WAV"); + + MarkChunk::create (markChunk, metadataValues); + COMTChunk::create (comtChunk, metadataValues); + InstChunk::create (instChunk, metadataValues); + } + headerPosition = out->getPosition(); writeHeader(); } @@ -279,15 +569,15 @@ public: } private: - MemoryBlock tempBlock; + MemoryBlock tempBlock, markChunk, comtChunk, instChunk; uint32 lengthInSamples, bytesWritten; int64 headerPosition; bool writeFailed; - static inline int chunkName (const char* const name) { return (int) ByteOrder::littleEndianInt (name); } - void writeHeader() { + using namespace AiffFileHelpers; + const bool couldSeekOk = output->setPosition (headerPosition); (void) couldSeekOk; @@ -295,7 +585,9 @@ private: // to be able to seek back to write the header jassert (couldSeekOk); - const int headerLen = 54; + const int headerLen = 54 + (markChunk.getSize() > 0 ? markChunk.getSize() + 8 : 0) + + (comtChunk.getSize() > 0 ? comtChunk.getSize() + 8 : 0) + + (instChunk.getSize() > 0 ? instChunk.getSize() + 8 : 0); int audioBytes = lengthInSamples * ((bitsPerSample * numChannels) / 8); audioBytes += (audioBytes & 1); @@ -351,6 +643,27 @@ private: output->write (sampleRateBytes, 10); + if (markChunk.getSize() > 0) + { + output->writeInt (chunkName ("MARK")); + output->writeIntBigEndian ((int) markChunk.getSize()); + output->write (markChunk.getData(), (int) markChunk.getSize()); + } + + if (comtChunk.getSize() > 0) + { + output->writeInt (chunkName ("COMT")); + output->writeIntBigEndian ((int) comtChunk.getSize()); + output->write (comtChunk.getData(), (int) comtChunk.getSize()); + } + + if (instChunk.getSize() > 0) + { + output->writeInt (chunkName ("INST")); + output->writeIntBigEndian ((int) instChunk.getSize()); + output->write (instChunk.getData(), (int) instChunk.getSize()); + } + output->writeInt (chunkName ("SSND")); output->writeIntBigEndian (audioBytes + 8); output->writeInt (0); @@ -416,11 +729,11 @@ AudioFormatWriter* AiffAudioFormat::createWriterFor (OutputStream* out, double sampleRate, unsigned int numberOfChannels, int bitsPerSample, - const StringPairArray& /*metadataValues*/, + const StringPairArray& metadataValues, int /*qualityOptionIndex*/) { if (getPossibleBitDepths().contains (bitsPerSample)) - return new AiffAudioFormatWriter (out, sampleRate, numberOfChannels, bitsPerSample); + return new AiffAudioFormatWriter (out, sampleRate, numberOfChannels, bitsPerSample, metadataValues); return nullptr; } diff --git a/src/audio/audio_file_formats/juce_AiffAudioFormat.h b/src/audio/audio_file_formats/juce_AiffAudioFormat.h index 67eed61562..b5659e2761 100644 --- a/src/audio/audio_file_formats/juce_AiffAudioFormat.h +++ b/src/audio/audio_file_formats/juce_AiffAudioFormat.h @@ -50,9 +50,10 @@ public: const Array getPossibleBitDepths(); bool canDoStereo(); bool canDoMono(); -#if JUCE_MAC + + #if JUCE_MAC bool canHandleFile (const File& fileToTest); -#endif + #endif //============================================================================== AudioFormatReader* createReaderFor (InputStream* sourceStream, diff --git a/src/audio/audio_file_formats/juce_AudioThumbnail.cpp b/src/audio/audio_file_formats/juce_AudioThumbnail.cpp index 0f1ec39ded..fc055ad564 100644 --- a/src/audio/audio_file_formats/juce_AudioThumbnail.cpp +++ b/src/audio/audio_file_formats/juce_AudioThumbnail.cpp @@ -38,7 +38,7 @@ struct AudioThumbnail::MinMaxValue char minValue; char maxValue; - MinMaxValue() : minValue (0), maxValue (0) + MinMaxValue() noexcept : minValue (0), maxValue (0) { } @@ -281,7 +281,7 @@ public: return data.size(); } - void getMinMax (int startSample, int endSample, MinMaxValue& result) noexcept + void getMinMax (int startSample, int endSample, MinMaxValue& result) const noexcept { if (startSample >= 0) { @@ -323,12 +323,12 @@ public: dest[i] = source[i]; } - void resetPeak() + void resetPeak() noexcept { peakLevel = -1; } - int getPeak() + int getPeak() noexcept { if (peakLevel < 0) { @@ -743,6 +743,24 @@ float AudioThumbnail::getApproximatePeak() const return jlimit (0, 127, peak) / 127.0f; } +void AudioThumbnail::getApproximateMinMax (const double startTime, const double endTime, const int channelIndex, + float& minValue, float& maxValue) const noexcept +{ + MinMaxValue result; + const ThumbData* const data = channels [channelIndex]; + + if (data != nullptr && sampleRate > 0) + { + const int firstThumbIndex = (int) ((startTime * sampleRate) / samplesPerThumbSample); + const int lastThumbIndex = (int) (((endTime * sampleRate) + samplesPerThumbSample - 1) / samplesPerThumbSample); + + data->getMinMax (jmax (0, firstThumbIndex), lastThumbIndex, result); + } + + minValue = result.minValue / 128.0f; + maxValue = result.maxValue / 128.0f; +} + void AudioThumbnail::drawChannel (Graphics& g, const Rectangle& area, double startTime, double endTime, int channelNum, float verticalZoomFactor) { diff --git a/src/audio/audio_file_formats/juce_AudioThumbnail.h b/src/audio/audio_file_formats/juce_AudioThumbnail.h index 120eb9bdb8..f9f84ccd22 100644 --- a/src/audio/audio_file_formats/juce_AudioThumbnail.h +++ b/src/audio/audio_file_formats/juce_AudioThumbnail.h @@ -186,12 +186,19 @@ public: */ float getApproximatePeak() const; + /** Reads the approximate min and max levels from a section of the thumbnail. + The lowest and highest samples are returned in minValue and maxValue, but obviously + because the thumb only stores low-resolution data, these numbers will only be a rough + approximation of the true values. + */ + void getApproximateMinMax (double startTime, double endTime, int channelIndex, + float& minValue, float& maxValue) const noexcept; + /** Returns the hash code that was set by setSource() or setReader(). */ int64 getHashCode() const; #ifndef DOXYGEN - // (this is only public to avoid a VC6 bug) - class LevelDataSource; + class LevelDataSource; // (this is only public to avoid a VC6 bug) #endif private: diff --git a/src/audio/audio_file_formats/juce_WavAudioFormat.cpp b/src/audio/audio_file_formats/juce_WavAudioFormat.cpp index c06154ab6c..48e0eee3a5 100644 --- a/src/audio/audio_file_formats/juce_WavAudioFormat.cpp +++ b/src/audio/audio_file_formats/juce_WavAudioFormat.cpp @@ -29,6 +29,7 @@ BEGIN_JUCE_NAMESPACE #include "juce_WavAudioFormat.h" #include "../../io/streams/juce_BufferedInputStream.h" +#include "../../io/streams/juce_MemoryOutputStream.h" #include "../../text/juce_LocalisedStrings.h" #include "../../io/files/juce_FileInputStream.h" #include "../../io/files/juce_TemporaryFile.h" @@ -156,7 +157,7 @@ struct SMPLChunk struct SampleLoop { uint32 identifier; - uint32 type; + uint32 type; // these are different in AIFF and WAV uint32 start; uint32 end; uint32 fraction; @@ -214,8 +215,6 @@ struct SMPLChunk SMPLChunk* const s = static_cast (data.getData()); - // Allow these calls to overwrite an extra byte at the end, which is fine as long - // as they get called in the right order.. s->manufacturer = ByteOrder::swapIfBigEndian ((uint32) values.getValue ("Manufacturer", "0").getIntValue()); s->product = ByteOrder::swapIfBigEndian ((uint32) values.getValue ("Product", "0").getIntValue()); s->samplePeriod = ByteOrder::swapIfBigEndian ((uint32) values.getValue ("SamplePeriod", "0").getIntValue()); @@ -241,6 +240,52 @@ struct SMPLChunk } } PACKED; +//============================================================================== +struct InstChunk +{ + int8 baseNote; + int8 detune; + int8 gain; + int8 lowNote; + int8 highNote; + int8 lowVelocity; + int8 highVelocity; + + void copyTo (StringPairArray& values) const + { + values.set ("MidiUnityNote", String (baseNote)); + values.set ("Detune", String (detune)); + values.set ("Gain", String (gain)); + values.set ("LowNote", String (lowNote)); + values.set ("HighNote", String (highNote)); + values.set ("LowVelocity", String (lowVelocity)); + values.set ("HighVelocity", String (highVelocity)); + } + + static MemoryBlock createFrom (const StringPairArray& values) + { + const StringArray& keys = values.getAllKeys(); + + if (! (keys.contains ("LowNote", true) && keys.contains ("HighNote", true))) + return MemoryBlock(); + + MemoryBlock data (8); + data.fillWith (0); + + InstChunk* const inst = static_cast (data.getData()); + + inst->baseNote = (int8) values.getValue ("MidiUnityNote", "60").getIntValue(); + inst->detune = (int8) values.getValue ("Detune", "0").getIntValue(); + inst->gain = (int8) values.getValue ("Gain", "0").getIntValue(); + inst->lowNote = (int8) values.getValue ("LowNote", "0").getIntValue(); + inst->highNote = (int8) values.getValue ("HighNote", "127").getIntValue(); + inst->lowVelocity = (int8) values.getValue ("LowVelocity", "1").getIntValue(); + inst->highVelocity = (int8) values.getValue ("HighVelocity", "127").getIntValue(); + + return data; + } +} PACKED; + //============================================================================== struct CueChunk { @@ -276,39 +321,118 @@ struct CueChunk } } - static MemoryBlock createFrom (const StringPairArray& values) + static void create (MemoryBlock& data, const StringPairArray& values) { const int numCues = values.getValue ("NumCuePoints", "0").getIntValue(); - if (numCues <= 0) - return MemoryBlock(); + if (numCues > 0) + { + const size_t sizeNeeded = sizeof (CueChunk) + (numCues - 1) * sizeof (Cue); + data.setSize ((sizeNeeded + 3) & ~3, true); - const size_t sizeNeeded = sizeof (CueChunk) + (numCues - 1) * sizeof (Cue); - MemoryBlock data ((sizeNeeded + 3) & ~3); - data.fillWith (0); + CueChunk* const c = static_cast (data.getData()); - CueChunk* const c = static_cast (data.getData()); + c->numCues = ByteOrder::swapIfBigEndian ((uint32) numCues); - c->numCues = ByteOrder::swapIfBigEndian ((uint32) numCues); + const String dataChunkID (chunkName ("data")); - const String dataChunkID (chunkName ("data")); + int nextOrder = 0; - for (int i = 0; i < numCues; ++i) - { - const String prefix ("Cue" + String(i)); - c->cues[i].identifier = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Identifier", "0").getIntValue()); - c->cues[i].order = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Order", "0").getIntValue()); - c->cues[i].chunkID = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkID", dataChunkID).getIntValue()); - c->cues[i].chunkStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkStart", "0").getIntValue()); - c->cues[i].blockStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "BlockStart", "0").getIntValue()); - c->cues[i].offset = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Offset", "0").getIntValue()); - } + #if JUCE_DEBUG + Array identifiers; + #endif - return data; + for (int i = 0; i < numCues; ++i) + { + const String prefix ("Cue" + String (i)); + + uint32 identifier = values.getValue (prefix + "Identifier", "0").getIntValue(); + + #if JUCE_DEBUG + jassert (! identifiers.contains (identifier)); + identifiers.add (identifier); + #endif + + c->cues[i].identifier = ByteOrder::swapIfBigEndian ((uint32) identifier); + + const int order = values.getValue (prefix + "Order", String (nextOrder)).getIntValue(); + nextOrder = jmax (nextOrder, order) + 1; + + c->cues[i].order = ByteOrder::swapIfBigEndian ((uint32) order); + c->cues[i].chunkID = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkID", dataChunkID).getIntValue()); + c->cues[i].chunkStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "ChunkStart", "0").getIntValue()); + c->cues[i].blockStart = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "BlockStart", "0").getIntValue()); + c->cues[i].offset = ByteOrder::swapIfBigEndian ((uint32) values.getValue (prefix + "Offset", "0").getIntValue()); + } + } } } PACKED; +//============================================================================== +namespace ListChunk +{ + void appendLabelOrNoteChunk (const StringPairArray& values, const String& prefix, + const int chunkType, MemoryOutputStream& out) + { + const String label (values.getValue (prefix + "Text", prefix)); + const int labelLength = label.getNumBytesAsUTF8() + 1; + const int chunkLength = 4 + labelLength + (labelLength & 1); + + out.writeInt (chunkType); + out.writeInt (chunkLength); + out.writeInt (values.getValue (prefix + "Identifier", "0").getIntValue()); + out.write (label.toUTF8(), labelLength); + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + + void appendExtraChunk (const StringPairArray& values, const String& prefix, MemoryOutputStream& out) + { + const String text (values.getValue (prefix + "Text", prefix)); + + const int textLength = text.getNumBytesAsUTF8() + 1; // include null terminator + uint32 chunkLength = textLength + 20 + (textLength & 1); + + out.writeInt (chunkName ("ltxt")); + out.writeInt (chunkLength); + out.writeInt (values.getValue (prefix + "Identifier", "0").getIntValue()); + out.writeInt (values.getValue (prefix + "SampleLength", "0").getIntValue()); + out.writeInt (values.getValue (prefix + "Purpose", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "Country", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "Language", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "Dialect", "0").getIntValue()); + out.writeShort ((short) values.getValue (prefix + "CodePage", "0").getIntValue()); + out.write (text.toUTF8(), textLength); + + if ((out.getDataSize() & 1) != 0) + out.writeByte (0); + } + + void create (MemoryBlock& block, const StringPairArray& values) + { + const int numCueLabels = values.getValue ("NumCueLabels", "0").getIntValue(); + const int numCueNotes = values.getValue ("NumCueNotes", "0").getIntValue(); + const int numCueRegions = values.getValue ("NumCueRegions", "0").getIntValue(); + + if (numCueLabels > 0 || numCueNotes > 0 || numCueRegions > 0) + { + MemoryOutputStream out (block, false); + + int i; + for (i = 0; i < numCueLabels; ++i) + appendLabelOrNoteChunk (values, "CueLabel" + String (i), chunkName ("labl"), out); + + for (i = 0; i < numCueNotes; ++i) + appendLabelOrNoteChunk (values, "CueNote" + String (i), chunkName ("note"), out); + + for (i = 0; i < numCueRegions; ++i) + appendExtraChunk (values, "CueRegion" + String (i), out); + } + } +} + //============================================================================== struct ExtensibleWavSubFormat { @@ -355,6 +479,9 @@ public: int64 end = 0; bool hasGotType = false; bool hasGotData = false; + int cueNoteIndex = 0; + int cueLabelIndex = 0; + int cueRegionIndex = 0; const int firstChunkType = input->readInt(); @@ -479,6 +606,13 @@ public: input->read (smpl, length); smpl->copyTo (metadataValues, length); } + else if (chunkType == chunkName ("inst") || chunkType == chunkName ("INST")) // need to check which... + { + HeapBlock inst; + inst.calloc (jmax ((size_t) length + 1, sizeof (InstChunk)), 1); + input->read (inst, length); + inst->copyTo (metadataValues); + } else if (chunkType == chunkName ("cue ")) { HeapBlock cue; @@ -486,6 +620,65 @@ public: input->read (cue, length); cue->copyTo (metadataValues, length); } + else if (chunkType == chunkName ("LIST")) + { + if (input->readInt() == chunkName ("adtl")) + { + while (input->getPosition() < chunkEnd) + { + const int adtlChunkType = input->readInt(); + const uint32 adtlLength = (uint32) input->readInt(); + const int64 adtlChunkEnd = input->getPosition() + (adtlLength + (adtlLength & 1)); + + if (adtlChunkType == chunkName ("labl") || adtlChunkType == chunkName ("note")) + { + String prefix; + + if (adtlChunkType == chunkName ("labl")) + prefix << "CueLabel" << cueLabelIndex++; + else if (adtlChunkType == chunkName ("note")) + prefix << "CueNote" << cueNoteIndex++; + + const uint32 identifier = (uint32) input->readInt(); + const uint32 stringLength = adtlLength - 4; + + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength); + const String text (String::fromUTF8 (static_cast (textBlock.getData()), textBlock.getSize())); + + metadataValues.set (prefix + "Identifier", String (identifier)); + metadataValues.set (prefix + "Text", text); + } + else if (adtlChunkType == chunkName ("ltxt")) + { + const String prefix ("CueRegion" + String (cueRegionIndex++)); + const uint32 identifier = (uint32) input->readInt(); + const uint32 sampleLength = (uint32) input->readInt(); + const uint32 purpose = (uint32) input->readInt(); + const uint16 country = (uint16) input->readInt(); + const uint16 language = (uint16) input->readInt(); + const uint16 dialect = (uint16) input->readInt(); + const uint16 codePage = (uint16) input->readInt(); + const uint32 stringLength = adtlLength - 20; + + MemoryBlock textBlock; + input->readIntoMemoryBlock (textBlock, stringLength); + const String text = String::fromUTF8 ((const char*)textBlock.getData(), textBlock.getSize()); + + metadataValues.set (prefix + "Identifier", String (identifier)); + metadataValues.set (prefix + "SampleLength", String (sampleLength)); + metadataValues.set (prefix + "Purpose", String (purpose)); + metadataValues.set (prefix + "Country", String (country)); + metadataValues.set (prefix + "Language", String (language)); + metadataValues.set (prefix + "Dialect", String (dialect)); + metadataValues.set (prefix + "CodePage", String (codePage)); + metadataValues.set (prefix + "Text", text); + } + + input->setPosition (adtlChunkEnd); + } + } + } else if (chunkEnd <= input->getPosition()) { break; @@ -494,6 +687,11 @@ public: input->setPosition (chunkEnd); } } + + if (cueLabelIndex > 0) metadataValues.set ("NumCueLabels", String (cueLabelIndex)); + if (cueNoteIndex > 0) metadataValues.set ("NumCueNotes", String (cueNoteIndex)); + if (cueRegionIndex > 0) metadataValues.set ("NumCueRegions", String (cueRegionIndex)); + if (metadataValues.size() > 0) metadataValues.set ("MetaDataSource", "WAV"); } //============================================================================== @@ -576,9 +774,16 @@ public: if (metadataValues.size() > 0) { + // The meta data should have been santised for the WAV format. + // If it was originally sourced from an AIFF file the MetaDataSource + // key should be removed (or set to "WAV") once this has been done + jassert (metadataValues.getValue ("MetaDataSource", "None") != "AIFF"); + bwavChunk = BWAVChunk::createFrom (metadataValues); smplChunk = SMPLChunk::createFrom (metadataValues); - cueChunk = CueChunk ::createFrom (metadataValues); + instChunk = InstChunk::createFrom (metadataValues); + CueChunk ::create (cueChunk, metadataValues); + ListChunk::create (listChunk, metadataValues); } headerPosition = out->getPosition(); @@ -636,7 +841,7 @@ public: private: ScopedPointer converter; - MemoryBlock tempBlock, bwavChunk, smplChunk, cueChunk; + MemoryBlock tempBlock, bwavChunk, smplChunk, instChunk, cueChunk, listChunk; uint64 lengthInSamples, bytesWritten; int64 headerPosition; bool writeFailed; @@ -675,7 +880,9 @@ private: + 8 + audioDataSize + (audioDataSize & 1) + (bwavChunk.getSize() > 0 ? (8 + bwavChunk.getSize()) : 0) + (smplChunk.getSize() > 0 ? (8 + smplChunk.getSize()) : 0) + + (instChunk.getSize() > 0 ? (8 + instChunk.getSize()) : 0) + (cueChunk .getSize() > 0 ? (8 + cueChunk .getSize()) : 0) + + (listChunk.getSize() > 0 ? (12 + listChunk.getSize()) : 0) + (8 + 28); // (ds64 chunk) riffChunkSize += (riffChunkSize & 0x1); @@ -754,6 +961,13 @@ private: output->write (smplChunk.getData(), (int) smplChunk.getSize()); } + if (instChunk.getSize() > 0) + { + output->writeInt (chunkName ("inst")); + output->writeInt (7); + output->write (instChunk.getData(), (int) instChunk.getSize()); + } + if (cueChunk.getSize() > 0) { output->writeInt (chunkName ("cue ")); @@ -761,6 +975,14 @@ private: output->write (cueChunk.getData(), (int) cueChunk.getSize()); } + if (listChunk.getSize() > 0) + { + output->writeInt (chunkName ("LIST")); + output->writeInt ((int) listChunk.getSize() + 4); + output->writeInt (chunkName ("adtl")); + output->write (listChunk.getData(), (int) listChunk.getSize()); + } + output->writeInt (chunkName ("data")); output->writeInt (isRF64 ? -1 : (int) (lengthInSamples * bytesPerFrame)); diff --git a/src/audio/plugin_client/juce_PluginHeaders.h b/src/audio/plugin_client/juce_PluginHeaders.h index efa729c660..8b7bec9109 100644 --- a/src/audio/plugin_client/juce_PluginHeaders.h +++ b/src/audio/plugin_client/juce_PluginHeaders.h @@ -24,6 +24,9 @@ */ #include "juce_IncludeCharacteristics.h" + +#define DONT_AUTOLINK_TO_JUCE_LIBRARY 1 + #include "../../../juce.h" #ifndef __JUCE_PLUGINHEADERS_JUCEHEADER__ diff --git a/src/audio/processors/juce_AudioProcessorGraph.cpp b/src/audio/processors/juce_AudioProcessorGraph.cpp index f82da1ecd9..fded460da3 100644 --- a/src/audio/processors/juce_AudioProcessorGraph.cpp +++ b/src/audio/processors/juce_AudioProcessorGraph.cpp @@ -30,908 +30,1020 @@ BEGIN_JUCE_NAMESPACE #include "juce_AudioProcessorGraph.h" #include "../../events/juce_MessageManager.h" - const int AudioProcessorGraph::midiChannelIndex = 0x1000; //============================================================================== -AudioProcessorGraph::Node::Node (const uint32 id_, AudioProcessor* const processor_) - : id (id_), - processor (processor_), - isPrepared (false) +namespace GraphRenderingOps { - jassert (processor_ != nullptr); -} -void AudioProcessorGraph::Node::prepare (const double sampleRate, const int blockSize, - AudioProcessorGraph* const graph) +//============================================================================== +class AudioGraphRenderingOp { - if (! isPrepared) - { - isPrepared = true; +public: + AudioGraphRenderingOp() {} + virtual ~AudioGraphRenderingOp() {} - AudioProcessorGraph::AudioGraphIOProcessor* const ioProc - = dynamic_cast (static_cast (processor)); + virtual void perform (AudioSampleBuffer& sharedBufferChans, + const OwnedArray & sharedMidiBuffers, + const int numSamples) = 0; - if (ioProc != nullptr) - ioProc->setParentGraph (graph); + JUCE_LEAK_DETECTOR (AudioGraphRenderingOp); +}; - processor->setPlayConfigDetails (processor->getNumInputChannels(), - processor->getNumOutputChannels(), - sampleRate, blockSize); +//============================================================================== +class ClearChannelOp : public AudioGraphRenderingOp +{ +public: + ClearChannelOp (const int channelNum_) + : channelNum (channelNum_) + {} - processor->prepareToPlay (sampleRate, blockSize); + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) + { + sharedBufferChans.clear (channelNum, 0, numSamples); } -} -void AudioProcessorGraph::Node::unprepare() +private: + const int channelNum; + + JUCE_DECLARE_NON_COPYABLE (ClearChannelOp); +}; + +//============================================================================== +class CopyChannelOp : public AudioGraphRenderingOp { - if (isPrepared) +public: + CopyChannelOp (const int srcChannelNum_, const int dstChannelNum_) + : srcChannelNum (srcChannelNum_), + dstChannelNum (dstChannelNum_) + {} + + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) { - isPrepared = false; - processor->releaseResources(); + sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); } -} + +private: + const int srcChannelNum, dstChannelNum; + + JUCE_DECLARE_NON_COPYABLE (CopyChannelOp); +}; //============================================================================== -AudioProcessorGraph::AudioProcessorGraph() - : lastNodeId (0), - renderingBuffers (1, 1), - currentAudioOutputBuffer (1, 1) +class AddChannelOp : public AudioGraphRenderingOp { -} +public: + AddChannelOp (const int srcChannelNum_, const int dstChannelNum_) + : srcChannelNum (srcChannelNum_), + dstChannelNum (dstChannelNum_) + {} -AudioProcessorGraph::~AudioProcessorGraph() -{ - clearRenderingSequence(); - clear(); -} + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) + { + sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); + } -const String AudioProcessorGraph::getName() const -{ - return "Audio Graph"; -} +private: + const int srcChannelNum, dstChannelNum; + + JUCE_DECLARE_NON_COPYABLE (AddChannelOp); +}; //============================================================================== -void AudioProcessorGraph::clear() +class ClearMidiBufferOp : public AudioGraphRenderingOp { - nodes.clear(); - connections.clear(); - triggerAsyncUpdate(); -} +public: + ClearMidiBufferOp (const int bufferNum_) + : bufferNum (bufferNum_) + {} -AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const -{ - for (int i = nodes.size(); --i >= 0;) - if (nodes.getUnchecked(i)->id == nodeId) - return nodes.getUnchecked(i); + void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) + { + sharedMidiBuffers.getUnchecked (bufferNum)->clear(); + } - return nullptr; -} +private: + const int bufferNum; -AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, - uint32 nodeId) + JUCE_DECLARE_NON_COPYABLE (ClearMidiBufferOp); +}; + +//============================================================================== +class CopyMidiBufferOp : public AudioGraphRenderingOp { - if (newProcessor == nullptr) - { - jassertfalse; - return nullptr; - } +public: + CopyMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) + : srcBufferNum (srcBufferNum_), + dstBufferNum (dstBufferNum_) + {} - if (nodeId == 0) - { - nodeId = ++lastNodeId; - } - else + void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) { - // you can't add a node with an id that already exists in the graph.. - jassert (getNodeForId (nodeId) == nullptr); - removeNode (nodeId); + *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum); } - lastNodeId = nodeId; +private: + const int srcBufferNum, dstBufferNum; - Node* const n = new Node (nodeId, newProcessor); - nodes.add (n); - triggerAsyncUpdate(); + JUCE_DECLARE_NON_COPYABLE (CopyMidiBufferOp); +}; - AudioProcessorGraph::AudioGraphIOProcessor* const ioProc - = dynamic_cast (static_cast (n->processor)); +//============================================================================== +class AddMidiBufferOp : public AudioGraphRenderingOp +{ +public: + AddMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) + : srcBufferNum (srcBufferNum_), + dstBufferNum (dstBufferNum_) + {} - if (ioProc != nullptr) - ioProc->setParentGraph (this); + void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int numSamples) + { + sharedMidiBuffers.getUnchecked (dstBufferNum) + ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0); + } - return n; -} +private: + const int srcBufferNum, dstBufferNum; -bool AudioProcessorGraph::removeNode (const uint32 nodeId) -{ - disconnectNode (nodeId); + JUCE_DECLARE_NON_COPYABLE (AddMidiBufferOp); +}; - for (int i = nodes.size(); --i >= 0;) +//============================================================================== +class ProcessBufferOp : public AudioGraphRenderingOp +{ +public: + ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& node_, + const Array & audioChannelsToUse_, + const int totalChans_, + const int midiBufferToUse_) + : node (node_), + processor (node_->getProcessor()), + audioChannelsToUse (audioChannelsToUse_), + totalChans (jmax (1, totalChans_)), + midiBufferToUse (midiBufferToUse_) { - if (nodes.getUnchecked(i)->id == nodeId) - { - AudioProcessorGraph::AudioGraphIOProcessor* const ioProc - = dynamic_cast (static_cast (nodes.getUnchecked(i)->processor)); + channels.calloc (totalChans); - if (ioProc != nullptr) - ioProc->setParentGraph (nullptr); + while (audioChannelsToUse.size() < totalChans) + audioChannelsToUse.add (0); + } - nodes.remove (i); - triggerAsyncUpdate(); + void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray & sharedMidiBuffers, const int numSamples) + { + for (int i = totalChans; --i >= 0;) + channels[i] = sharedBufferChans.getSampleData (audioChannelsToUse.getUnchecked (i), 0); - return true; - } + AudioSampleBuffer buffer (channels, totalChans, numSamples); + + processor->processBlock (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse)); } - return false; -} + const AudioProcessorGraph::Node::Ptr node; + AudioProcessor* const processor; + +private: + Array audioChannelsToUse; + HeapBlock channels; + int totalChans; + int midiBufferToUse; + + JUCE_DECLARE_NON_COPYABLE (ProcessBufferOp); +}; //============================================================================== -const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId, - const int sourceChannelIndex, - const uint32 destNodeId, - const int destChannelIndex) const +/** Used to calculate the correct sequence of rendering ops needed, based on + the best re-use of shared buffers at each stage. +*/ +class RenderingOpSequenceCalculator { - for (int i = connections.size(); --i >= 0;) +public: + //============================================================================== + RenderingOpSequenceCalculator (AudioProcessorGraph& graph_, + const Array& orderedNodes_, + Array& renderingOps) + : graph (graph_), + orderedNodes (orderedNodes_) { - const Connection* const c = connections.getUnchecked(i); + nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros + channels.add (0); - if (c->sourceNodeId == sourceNodeId - && c->destNodeId == destNodeId - && c->sourceChannelIndex == sourceChannelIndex - && c->destChannelIndex == destChannelIndex) + midiNodeIds.add ((uint32) zeroNodeID); + + for (int i = 0; i < orderedNodes.size(); ++i) { - return c; + createRenderingOpsForNode ((AudioProcessorGraph::Node*) orderedNodes.getUnchecked(i), + renderingOps, i); + + markAnyUnusedBuffersAsFree (i); } } - return nullptr; -} + int getNumBuffersNeeded() const { return nodeIds.size(); } + int getNumMidiBuffersNeeded() const { return midiNodeIds.size(); } -bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId, - const uint32 possibleDestNodeId) const -{ - for (int i = connections.size(); --i >= 0;) +private: + //============================================================================== + AudioProcessorGraph& graph; + const Array& orderedNodes; + Array channels; + Array nodeIds, midiNodeIds; + + enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe }; + + static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; } + + //============================================================================== + void createRenderingOpsForNode (AudioProcessorGraph::Node* const node, + Array& renderingOps, + const int ourRenderingIndex) { - const Connection* const c = connections.getUnchecked(i); + const int numIns = node->getProcessor()->getNumInputChannels(); + const int numOuts = node->getProcessor()->getNumOutputChannels(); + const int totalChans = jmax (numIns, numOuts); - if (c->sourceNodeId == possibleSourceNodeId - && c->destNodeId == possibleDestNodeId) + Array audioChannelsToUse; + int midiBufferToUse = -1; + + for (int inputChan = 0; inputChan < numIns; ++inputChan) { - return true; - } - } + // get a list of all the inputs to this node + Array sourceNodes, sourceOutputChans; - return false; -} + for (int i = graph.getNumConnections(); --i >= 0;) + { + const AudioProcessorGraph::Connection* const c = graph.getConnection (i); -bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId, - const int sourceChannelIndex, - const uint32 destNodeId, - const int destChannelIndex) const -{ - if (sourceChannelIndex < 0 - || destChannelIndex < 0 - || sourceNodeId == destNodeId - || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex)) - return false; + if (c->destNodeId == node->nodeId && c->destChannelIndex == inputChan) + { + sourceNodes.add (c->sourceNodeId); + sourceOutputChans.add (c->sourceChannelIndex); + } + } - const Node* const source = getNodeForId (sourceNodeId); + int bufIndex = -1; - if (source == nullptr - || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getNumOutputChannels()) - || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi())) - return false; + if (sourceNodes.size() == 0) + { + // unconnected input channel - const Node* const dest = getNodeForId (destNodeId); + if (inputChan >= numOuts) + { + bufIndex = getReadOnlyEmptyBuffer(); + jassert (bufIndex >= 0); + } + else + { + bufIndex = getFreeBuffer (false); + renderingOps.add (new ClearChannelOp (bufIndex)); + } + } + else if (sourceNodes.size() == 1) + { + // channel with a straightforward single input.. + const int srcNode = sourceNodes.getUnchecked(0); + const int srcChan = sourceOutputChans.getUnchecked(0); - if (dest == nullptr - || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getNumInputChannels()) - || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi())) - return false; + bufIndex = getBufferContaining (srcNode, srcChan); - return getConnectionBetween (sourceNodeId, sourceChannelIndex, - destNodeId, destChannelIndex) == nullptr; -} + if (bufIndex < 0) + { + // if not found, this is probably a feedback loop + bufIndex = getReadOnlyEmptyBuffer(); + jassert (bufIndex >= 0); + } -bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId, - const int sourceChannelIndex, - const uint32 destNodeId, - const int destChannelIndex) -{ - if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex)) - return false; + if (inputChan < numOuts + && isBufferNeededLater (ourRenderingIndex, + inputChan, + srcNode, srcChan)) + { + // can't mess up this channel because it's needed later by another node, so we + // need to use a copy of it.. + const int newFreeBuffer = getFreeBuffer (false); - Connection* const c = new Connection(); - c->sourceNodeId = sourceNodeId; - c->sourceChannelIndex = sourceChannelIndex; - c->destNodeId = destNodeId; - c->destChannelIndex = destChannelIndex; + renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer)); - connections.add (c); - triggerAsyncUpdate(); + bufIndex = newFreeBuffer; + } + } + else + { + // channel with a mix of several inputs.. - return true; -} + // try to find a re-usable channel from our inputs.. + int reusableInputIndex = -1; -void AudioProcessorGraph::removeConnection (const int index) -{ - connections.remove (index); - triggerAsyncUpdate(); -} + for (int i = 0; i < sourceNodes.size(); ++i) + { + const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i), + sourceOutputChans.getUnchecked(i)); -bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex, - const uint32 destNodeId, const int destChannelIndex) -{ - bool doneAnything = false; + if (sourceBufIndex >= 0 + && ! isBufferNeededLater (ourRenderingIndex, + inputChan, + sourceNodes.getUnchecked(i), + sourceOutputChans.getUnchecked(i))) + { + // we've found one of our input chans that can be re-used.. + reusableInputIndex = i; + bufIndex = sourceBufIndex; + break; + } + } - for (int i = connections.size(); --i >= 0;) - { - const Connection* const c = connections.getUnchecked(i); + if (reusableInputIndex < 0) + { + // can't re-use any of our input chans, so get a new one and copy everything into it.. + bufIndex = getFreeBuffer (false); + jassert (bufIndex != 0); - if (c->sourceNodeId == sourceNodeId - && c->destNodeId == destNodeId - && c->sourceChannelIndex == sourceChannelIndex - && c->destChannelIndex == destChannelIndex) - { - removeConnection (i); - doneAnything = true; - triggerAsyncUpdate(); - } - } + const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0), + sourceOutputChans.getUnchecked (0)); + if (srcIndex < 0) + { + // if not found, this is probably a feedback loop + renderingOps.add (new ClearChannelOp (bufIndex)); + } + else + { + renderingOps.add (new CopyChannelOp (srcIndex, bufIndex)); + } - return doneAnything; -} + reusableInputIndex = 0; + } -bool AudioProcessorGraph::disconnectNode (const uint32 nodeId) -{ - bool doneAnything = false; + for (int j = 0; j < sourceNodes.size(); ++j) + { + if (j != reusableInputIndex) + { + const int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j), + sourceOutputChans.getUnchecked(j)); + if (srcIndex >= 0) + renderingOps.add (new AddChannelOp (srcIndex, bufIndex)); + } + } + } - for (int i = connections.size(); --i >= 0;) - { - const Connection* const c = connections.getUnchecked(i); + jassert (bufIndex >= 0); + audioChannelsToUse.add (bufIndex); - if (c->sourceNodeId == nodeId || c->destNodeId == nodeId) - { - removeConnection (i); - doneAnything = true; - triggerAsyncUpdate(); + if (inputChan < numOuts) + markBufferAsContaining (bufIndex, node->nodeId, inputChan); } - } - - return doneAnything; -} -bool AudioProcessorGraph::removeIllegalConnections() -{ - bool doneAnything = false; + for (int outputChan = numIns; outputChan < numOuts; ++outputChan) + { + const int bufIndex = getFreeBuffer (false); + jassert (bufIndex != 0); + audioChannelsToUse.add (bufIndex); - for (int i = connections.size(); --i >= 0;) - { - const Connection* const c = connections.getUnchecked(i); + markBufferAsContaining (bufIndex, node->nodeId, outputChan); + } - const Node* const source = getNodeForId (c->sourceNodeId); - const Node* const dest = getNodeForId (c->destNodeId); + // Now the same thing for midi.. + Array midiSourceNodes; - if (source == nullptr || dest == nullptr - || (c->sourceChannelIndex != midiChannelIndex - && ! isPositiveAndBelow (c->sourceChannelIndex, source->processor->getNumOutputChannels())) - || (c->sourceChannelIndex == midiChannelIndex - && ! source->processor->producesMidi()) - || (c->destChannelIndex != midiChannelIndex - && ! isPositiveAndBelow (c->destChannelIndex, dest->processor->getNumInputChannels())) - || (c->destChannelIndex == midiChannelIndex - && ! dest->processor->acceptsMidi())) + for (int i = graph.getNumConnections(); --i >= 0;) { - removeConnection (i); - doneAnything = true; - triggerAsyncUpdate(); - } - } + const AudioProcessorGraph::Connection* const c = graph.getConnection (i); - return doneAnything; -} + if (c->destNodeId == node->nodeId && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex) + midiSourceNodes.add (c->sourceNodeId); + } -//============================================================================== -namespace GraphRenderingOps -{ + if (midiSourceNodes.size() == 0) + { + // No midi inputs.. + midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi -//============================================================================== -class AudioGraphRenderingOp -{ -public: - AudioGraphRenderingOp() {} - virtual ~AudioGraphRenderingOp() {} - - virtual void perform (AudioSampleBuffer& sharedBufferChans, - const OwnedArray & sharedMidiBuffers, - const int numSamples) = 0; - - JUCE_LEAK_DETECTOR (AudioGraphRenderingOp); -}; - -//============================================================================== -class ClearChannelOp : public AudioGraphRenderingOp -{ -public: - ClearChannelOp (const int channelNum_) - : channelNum (channelNum_) - {} - - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) - { - sharedBufferChans.clear (channelNum, 0, numSamples); - } - -private: - const int channelNum; - - JUCE_DECLARE_NON_COPYABLE (ClearChannelOp); -}; - -//============================================================================== -class CopyChannelOp : public AudioGraphRenderingOp -{ -public: - CopyChannelOp (const int srcChannelNum_, const int dstChannelNum_) - : srcChannelNum (srcChannelNum_), - dstChannelNum (dstChannelNum_) - {} - - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) - { - sharedBufferChans.copyFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); - } - -private: - const int srcChannelNum, dstChannelNum; - - JUCE_DECLARE_NON_COPYABLE (CopyChannelOp); -}; - -//============================================================================== -class AddChannelOp : public AudioGraphRenderingOp -{ -public: - AddChannelOp (const int srcChannelNum_, const int dstChannelNum_) - : srcChannelNum (srcChannelNum_), - dstChannelNum (dstChannelNum_) - {} + if (node->getProcessor()->acceptsMidi() || node->getProcessor()->producesMidi()) + renderingOps.add (new ClearMidiBufferOp (midiBufferToUse)); + } + else if (midiSourceNodes.size() == 1) + { + // One midi input.. + midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0), + AudioProcessorGraph::midiChannelIndex); - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray &, const int numSamples) - { - sharedBufferChans.addFrom (dstChannelNum, 0, sharedBufferChans, srcChannelNum, 0, numSamples); - } + if (midiBufferToUse >= 0) + { + if (isBufferNeededLater (ourRenderingIndex, + AudioProcessorGraph::midiChannelIndex, + midiSourceNodes.getUnchecked(0), + AudioProcessorGraph::midiChannelIndex)) + { + // can't mess up this channel because it's needed later by another node, so we + // need to use a copy of it.. + const int newFreeBuffer = getFreeBuffer (true); + renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer)); + midiBufferToUse = newFreeBuffer; + } + } + else + { + // probably a feedback loop, so just use an empty one.. + midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi + } + } + else + { + // More than one midi input being mixed.. + int reusableInputIndex = -1; -private: - const int srcChannelNum, dstChannelNum; + for (int i = 0; i < midiSourceNodes.size(); ++i) + { + const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i), + AudioProcessorGraph::midiChannelIndex); - JUCE_DECLARE_NON_COPYABLE (AddChannelOp); -}; + if (sourceBufIndex >= 0 + && ! isBufferNeededLater (ourRenderingIndex, + AudioProcessorGraph::midiChannelIndex, + midiSourceNodes.getUnchecked(i), + AudioProcessorGraph::midiChannelIndex)) + { + // we've found one of our input buffers that can be re-used.. + reusableInputIndex = i; + midiBufferToUse = sourceBufIndex; + break; + } + } -//============================================================================== -class ClearMidiBufferOp : public AudioGraphRenderingOp -{ -public: - ClearMidiBufferOp (const int bufferNum_) - : bufferNum (bufferNum_) - {} + if (reusableInputIndex < 0) + { + // can't re-use any of our input buffers, so get a new one and copy everything into it.. + midiBufferToUse = getFreeBuffer (true); + jassert (midiBufferToUse >= 0); - void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) - { - sharedMidiBuffers.getUnchecked (bufferNum)->clear(); - } + const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0), + AudioProcessorGraph::midiChannelIndex); + if (srcIndex >= 0) + renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse)); + else + renderingOps.add (new ClearMidiBufferOp (midiBufferToUse)); -private: - const int bufferNum; + reusableInputIndex = 0; + } - JUCE_DECLARE_NON_COPYABLE (ClearMidiBufferOp); -}; + for (int j = 0; j < midiSourceNodes.size(); ++j) + { + if (j != reusableInputIndex) + { + const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j), + AudioProcessorGraph::midiChannelIndex); + if (srcIndex >= 0) + renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse)); + } + } + } -//============================================================================== -class CopyMidiBufferOp : public AudioGraphRenderingOp -{ -public: - CopyMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) - : srcBufferNum (srcBufferNum_), - dstBufferNum (dstBufferNum_) - {} + if (node->getProcessor()->producesMidi()) + markBufferAsContaining (midiBufferToUse, node->nodeId, + AudioProcessorGraph::midiChannelIndex); - void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int) - { - *sharedMidiBuffers.getUnchecked (dstBufferNum) = *sharedMidiBuffers.getUnchecked (srcBufferNum); + renderingOps.add (new ProcessBufferOp (node, audioChannelsToUse, + totalChans, midiBufferToUse)); } -private: - const int srcBufferNum, dstBufferNum; - - JUCE_DECLARE_NON_COPYABLE (CopyMidiBufferOp); -}; - -//============================================================================== -class AddMidiBufferOp : public AudioGraphRenderingOp -{ -public: - AddMidiBufferOp (const int srcBufferNum_, const int dstBufferNum_) - : srcBufferNum (srcBufferNum_), - dstBufferNum (dstBufferNum_) - {} - - void perform (AudioSampleBuffer&, const OwnedArray & sharedMidiBuffers, const int numSamples) + //============================================================================== + int getFreeBuffer (const bool forMidi) { - sharedMidiBuffers.getUnchecked (dstBufferNum) - ->addEvents (*sharedMidiBuffers.getUnchecked (srcBufferNum), 0, numSamples, 0); - } - -private: - const int srcBufferNum, dstBufferNum; - - JUCE_DECLARE_NON_COPYABLE (AddMidiBufferOp); -}; + if (forMidi) + { + for (int i = 1; i < midiNodeIds.size(); ++i) + if (midiNodeIds.getUnchecked(i) == freeNodeID) + return i; -//============================================================================== -class ProcessBufferOp : public AudioGraphRenderingOp -{ -public: - ProcessBufferOp (const AudioProcessorGraph::Node::Ptr& node_, - const Array & audioChannelsToUse_, - const int totalChans_, - const int midiBufferToUse_) - : node (node_), - processor (node_->getProcessor()), - audioChannelsToUse (audioChannelsToUse_), - totalChans (jmax (1, totalChans_)), - midiBufferToUse (midiBufferToUse_) - { - channels.calloc (totalChans); + midiNodeIds.add ((uint32) freeNodeID); + return midiNodeIds.size() - 1; + } + else + { + for (int i = 1; i < nodeIds.size(); ++i) + if (nodeIds.getUnchecked(i) == freeNodeID) + return i; - while (audioChannelsToUse.size() < totalChans) - audioChannelsToUse.add (0); + nodeIds.add ((uint32) freeNodeID); + channels.add (0); + return nodeIds.size() - 1; + } } - void perform (AudioSampleBuffer& sharedBufferChans, const OwnedArray & sharedMidiBuffers, const int numSamples) + int getReadOnlyEmptyBuffer() const noexcept { - for (int i = totalChans; --i >= 0;) - channels[i] = sharedBufferChans.getSampleData (audioChannelsToUse.getUnchecked (i), 0); - - AudioSampleBuffer buffer (channels, totalChans, numSamples); - - processor->processBlock (buffer, *sharedMidiBuffers.getUnchecked (midiBufferToUse)); + return 0; } - const AudioProcessorGraph::Node::Ptr node; - AudioProcessor* const processor; - -private: - Array audioChannelsToUse; - HeapBlock channels; - int totalChans; - int midiBufferToUse; - - JUCE_DECLARE_NON_COPYABLE (ProcessBufferOp); -}; - -//============================================================================== -/** Used to calculate the correct sequence of rendering ops needed, based on - the best re-use of shared buffers at each stage. -*/ -class RenderingOpSequenceCalculator -{ -public: - //============================================================================== - RenderingOpSequenceCalculator (AudioProcessorGraph& graph_, - const Array& orderedNodes_, - Array& renderingOps) - : graph (graph_), - orderedNodes (orderedNodes_) + int getBufferContaining (const uint32 nodeId, const int outputChannel) const noexcept { - nodeIds.add ((uint32) zeroNodeID); // first buffer is read-only zeros - channels.add (0); - - midiNodeIds.add ((uint32) zeroNodeID); - - for (int i = 0; i < orderedNodes.size(); ++i) + if (outputChannel == AudioProcessorGraph::midiChannelIndex) { - createRenderingOpsForNode ((AudioProcessorGraph::Node*) orderedNodes.getUnchecked(i), - renderingOps, i); - - markAnyUnusedBuffersAsFree (i); + for (int i = midiNodeIds.size(); --i >= 0;) + if (midiNodeIds.getUnchecked(i) == nodeId) + return i; + } + else + { + for (int i = nodeIds.size(); --i >= 0;) + if (nodeIds.getUnchecked(i) == nodeId + && channels.getUnchecked(i) == outputChannel) + return i; } - } - - int getNumBuffersNeeded() const { return nodeIds.size(); } - int getNumMidiBuffersNeeded() const { return midiNodeIds.size(); } - -private: - //============================================================================== - AudioProcessorGraph& graph; - const Array& orderedNodes; - Array channels; - Array nodeIds, midiNodeIds; - - enum { freeNodeID = 0xffffffff, zeroNodeID = 0xfffffffe }; - static bool isNodeBusy (uint32 nodeID) noexcept { return nodeID != freeNodeID && nodeID != zeroNodeID; } + return -1; + } - //============================================================================== - void createRenderingOpsForNode (AudioProcessorGraph::Node* const node, - Array& renderingOps, - const int ourRenderingIndex) + void markAnyUnusedBuffersAsFree (const int stepIndex) { - const int numIns = node->getProcessor()->getNumInputChannels(); - const int numOuts = node->getProcessor()->getNumOutputChannels(); - const int totalChans = jmax (numIns, numOuts); - - Array audioChannelsToUse; - int midiBufferToUse = -1; - - for (int inputChan = 0; inputChan < numIns; ++inputChan) + int i; + for (i = 0; i < nodeIds.size(); ++i) { - // get a list of all the inputs to this node - Array sourceNodes, sourceOutputChans; - - for (int i = graph.getNumConnections(); --i >= 0;) + if (isNodeBusy (nodeIds.getUnchecked(i)) + && ! isBufferNeededLater (stepIndex, -1, + nodeIds.getUnchecked(i), + channels.getUnchecked(i))) { - const AudioProcessorGraph::Connection* const c = graph.getConnection (i); - - if (c->destNodeId == node->id && c->destChannelIndex == inputChan) - { - sourceNodes.add (c->sourceNodeId); - sourceOutputChans.add (c->sourceChannelIndex); - } + nodeIds.set (i, (uint32) freeNodeID); } + } - int bufIndex = -1; - - if (sourceNodes.size() == 0) + for (i = 0; i < midiNodeIds.size(); ++i) + { + if (isNodeBusy (midiNodeIds.getUnchecked(i)) + && ! isBufferNeededLater (stepIndex, -1, + midiNodeIds.getUnchecked(i), + AudioProcessorGraph::midiChannelIndex)) { - // unconnected input channel - - if (inputChan >= numOuts) - { - bufIndex = getReadOnlyEmptyBuffer(); - jassert (bufIndex >= 0); - } - else - { - bufIndex = getFreeBuffer (false); - renderingOps.add (new ClearChannelOp (bufIndex)); - } + midiNodeIds.set (i, (uint32) freeNodeID); } - else if (sourceNodes.size() == 1) - { - // channel with a straightforward single input.. - const int srcNode = sourceNodes.getUnchecked(0); - const int srcChan = sourceOutputChans.getUnchecked(0); - - bufIndex = getBufferContaining (srcNode, srcChan); - - if (bufIndex < 0) - { - // if not found, this is probably a feedback loop - bufIndex = getReadOnlyEmptyBuffer(); - jassert (bufIndex >= 0); - } - - if (inputChan < numOuts - && isBufferNeededLater (ourRenderingIndex, - inputChan, - srcNode, srcChan)) - { - // can't mess up this channel because it's needed later by another node, so we - // need to use a copy of it.. - const int newFreeBuffer = getFreeBuffer (false); + } + } - renderingOps.add (new CopyChannelOp (bufIndex, newFreeBuffer)); + bool isBufferNeededLater (int stepIndexToSearchFrom, + int inputChannelOfIndexToIgnore, + const uint32 nodeId, + const int outputChanIndex) const + { + while (stepIndexToSearchFrom < orderedNodes.size()) + { + const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom); - bufIndex = newFreeBuffer; - } + if (outputChanIndex == AudioProcessorGraph::midiChannelIndex) + { + if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex + && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex, + node->nodeId, AudioProcessorGraph::midiChannelIndex) != nullptr) + return true; } else { - // channel with a mix of several inputs.. - - // try to find a re-usable channel from our inputs.. - int reusableInputIndex = -1; - - for (int i = 0; i < sourceNodes.size(); ++i) - { - const int sourceBufIndex = getBufferContaining (sourceNodes.getUnchecked(i), - sourceOutputChans.getUnchecked(i)); - - if (sourceBufIndex >= 0 - && ! isBufferNeededLater (ourRenderingIndex, - inputChan, - sourceNodes.getUnchecked(i), - sourceOutputChans.getUnchecked(i))) - { - // we've found one of our input chans that can be re-used.. - reusableInputIndex = i; - bufIndex = sourceBufIndex; - break; - } - } - - if (reusableInputIndex < 0) - { - // can't re-use any of our input chans, so get a new one and copy everything into it.. - bufIndex = getFreeBuffer (false); - jassert (bufIndex != 0); - - const int srcIndex = getBufferContaining (sourceNodes.getUnchecked (0), - sourceOutputChans.getUnchecked (0)); - if (srcIndex < 0) - { - // if not found, this is probably a feedback loop - renderingOps.add (new ClearChannelOp (bufIndex)); - } - else - { - renderingOps.add (new CopyChannelOp (srcIndex, bufIndex)); - } - - reusableInputIndex = 0; - } - - for (int j = 0; j < sourceNodes.size(); ++j) - { - if (j != reusableInputIndex) - { - const int srcIndex = getBufferContaining (sourceNodes.getUnchecked(j), - sourceOutputChans.getUnchecked(j)); - if (srcIndex >= 0) - renderingOps.add (new AddChannelOp (srcIndex, bufIndex)); - } - } - } - - jassert (bufIndex >= 0); - audioChannelsToUse.add (bufIndex); + for (int i = 0; i < node->getProcessor()->getNumInputChannels(); ++i) + if (i != inputChannelOfIndexToIgnore + && graph.getConnectionBetween (nodeId, outputChanIndex, + node->nodeId, i) != nullptr) + return true; + } - if (inputChan < numOuts) - markBufferAsContaining (bufIndex, node->id, inputChan); + inputChannelOfIndexToIgnore = -1; + ++stepIndexToSearchFrom; } - for (int outputChan = numIns; outputChan < numOuts; ++outputChan) + return false; + } + + void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex) + { + if (outputIndex == AudioProcessorGraph::midiChannelIndex) { - const int bufIndex = getFreeBuffer (false); - jassert (bufIndex != 0); - audioChannelsToUse.add (bufIndex); + jassert (bufferNum > 0 && bufferNum < midiNodeIds.size()); - markBufferAsContaining (bufIndex, node->id, outputChan); + midiNodeIds.set (bufferNum, nodeId); } - - // Now the same thing for midi.. - Array midiSourceNodes; - - for (int i = graph.getNumConnections(); --i >= 0;) + else { - const AudioProcessorGraph::Connection* const c = graph.getConnection (i); + jassert (bufferNum >= 0 && bufferNum < nodeIds.size()); - if (c->destNodeId == node->id && c->destChannelIndex == AudioProcessorGraph::midiChannelIndex) - midiSourceNodes.add (c->sourceNodeId); + nodeIds.set (bufferNum, nodeId); + channels.set (bufferNum, outputIndex); } + } - if (midiSourceNodes.size() == 0) - { - // No midi inputs.. - midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi + JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (RenderingOpSequenceCalculator); +}; - if (node->getProcessor()->acceptsMidi() || node->getProcessor()->producesMidi()) - renderingOps.add (new ClearMidiBufferOp (midiBufferToUse)); - } - else if (midiSourceNodes.size() == 1) +//============================================================================== +// Holds a fast lookup table for checking which nodes are inputs to others. +class ConnectionLookupTable +{ +public: + explicit ConnectionLookupTable (const OwnedArray& connections) + { + for (int i = 0; i < connections.size(); ++i) { - // One midi input.. - midiBufferToUse = getBufferContaining (midiSourceNodes.getUnchecked(0), - AudioProcessorGraph::midiChannelIndex); + const AudioProcessorGraph::Connection* const c = connections.getUnchecked(i); - if (midiBufferToUse >= 0) - { - if (isBufferNeededLater (ourRenderingIndex, - AudioProcessorGraph::midiChannelIndex, - midiSourceNodes.getUnchecked(0), - AudioProcessorGraph::midiChannelIndex)) - { - // can't mess up this channel because it's needed later by another node, so we - // need to use a copy of it.. - const int newFreeBuffer = getFreeBuffer (true); - renderingOps.add (new CopyMidiBufferOp (midiBufferToUse, newFreeBuffer)); - midiBufferToUse = newFreeBuffer; - } - } - else + int index; + Entry* entry = findEntry (c->destNodeId, index); + + if (entry == nullptr) { - // probably a feedback loop, so just use an empty one.. - midiBufferToUse = getFreeBuffer (true); // need to pick a buffer even if the processor doesn't use midi + entry = new Entry (c->destNodeId); + entries.insert (index, entry); } + + entry->srcNodes.add (c->sourceNodeId); } - else - { - // More than one midi input being mixed.. - int reusableInputIndex = -1; + } - for (int i = 0; i < midiSourceNodes.size(); ++i) - { - const int sourceBufIndex = getBufferContaining (midiSourceNodes.getUnchecked(i), - AudioProcessorGraph::midiChannelIndex); + bool isAnInputTo (const uint32 possibleInputId, + const uint32 possibleDestinationId) const noexcept + { + return isAnInputToRecursive (possibleInputId, possibleDestinationId, entries.size()); + } - if (sourceBufIndex >= 0 - && ! isBufferNeededLater (ourRenderingIndex, - AudioProcessorGraph::midiChannelIndex, - midiSourceNodes.getUnchecked(i), - AudioProcessorGraph::midiChannelIndex)) - { - // we've found one of our input buffers that can be re-used.. - reusableInputIndex = i; - midiBufferToUse = sourceBufIndex; - break; - } - } +private: + //============================================================================== + struct Entry + { + explicit Entry (const uint32 destNodeId_) noexcept : destNodeId (destNodeId_) {} - if (reusableInputIndex < 0) - { - // can't re-use any of our input buffers, so get a new one and copy everything into it.. - midiBufferToUse = getFreeBuffer (true); - jassert (midiBufferToUse >= 0); + const uint32 destNodeId; + SortedSet srcNodes; - const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(0), - AudioProcessorGraph::midiChannelIndex); - if (srcIndex >= 0) - renderingOps.add (new CopyMidiBufferOp (srcIndex, midiBufferToUse)); - else - renderingOps.add (new ClearMidiBufferOp (midiBufferToUse)); + JUCE_DECLARE_NON_COPYABLE (Entry); + }; - reusableInputIndex = 0; + OwnedArray entries; + + bool isAnInputToRecursive (const uint32 possibleInputId, + const uint32 possibleDestinationId, + int recursionCheck) const noexcept + { + int index; + const Entry* const entry = findEntry (possibleDestinationId, index); + + if (entry != nullptr) + { + const SortedSet& srcNodes = entry->srcNodes; + + if (srcNodes.contains (possibleInputId)) + return true; + + if (--recursionCheck >= 0) + { + for (int i = 0; i < srcNodes.size(); ++i) + if (isAnInputToRecursive (possibleInputId, srcNodes.getUnchecked(i), recursionCheck)) + return true; } + } - for (int j = 0; j < midiSourceNodes.size(); ++j) + return false; + } + + Entry* findEntry (const uint32 destNodeId, int& insertIndex) const noexcept + { + Entry* result = nullptr; + int firstElement = 0, lastElement = entries.size(); + + while (firstElement < lastElement) + { + Entry* const firstEntry = entries.getUnchecked (firstElement); + if (destNodeId == firstEntry->destNodeId) { - if (j != reusableInputIndex) - { - const int srcIndex = getBufferContaining (midiSourceNodes.getUnchecked(j), - AudioProcessorGraph::midiChannelIndex); - if (srcIndex >= 0) - renderingOps.add (new AddMidiBufferOp (srcIndex, midiBufferToUse)); - } + result = firstEntry; + break; } + + const int halfway = (firstElement + lastElement) / 2; + + if (halfway <= firstElement) + break; + + if (destNodeId >= entries.getUnchecked (halfway)->destNodeId) + firstElement = halfway; + else + lastElement = halfway; } - if (node->getProcessor()->producesMidi()) - markBufferAsContaining (midiBufferToUse, node->id, - AudioProcessorGraph::midiChannelIndex); + insertIndex = firstElement; + return result; + } - renderingOps.add (new ProcessBufferOp (node, audioChannelsToUse, - totalChans, midiBufferToUse)); + JUCE_DECLARE_NON_COPYABLE (ConnectionLookupTable); +}; + +//============================================================================== +struct ConnectionSorter +{ + static int compareElements (const AudioProcessorGraph::Connection* const first, + const AudioProcessorGraph::Connection* const second) noexcept + { + if (first->sourceNodeId < second->sourceNodeId) return -1; + else if (first->sourceNodeId > second->sourceNodeId) return 1; + else if (first->destNodeId < second->destNodeId) return -1; + else if (first->destNodeId > second->destNodeId) return 1; + else if (first->sourceChannelIndex < second->sourceChannelIndex) return -1; + else if (first->sourceChannelIndex > second->sourceChannelIndex) return 1; + else if (first->destChannelIndex < second->destChannelIndex) return -1; + else if (first->destChannelIndex > second->destChannelIndex) return 1; + + return 0; + } +}; + +} + +//============================================================================== +AudioProcessorGraph::Connection::Connection (const uint32 sourceNodeId_, const int sourceChannelIndex_, + const uint32 destNodeId_, const int destChannelIndex_) noexcept + : sourceNodeId (sourceNodeId_), sourceChannelIndex (sourceChannelIndex_), + destNodeId (destNodeId_), destChannelIndex (destChannelIndex_) +{ +} + +//============================================================================== +AudioProcessorGraph::Node::Node (const uint32 nodeId_, AudioProcessor* const processor_) noexcept + : nodeId (nodeId_), + processor (processor_), + isPrepared (false) +{ + jassert (processor_ != nullptr); +} + +void AudioProcessorGraph::Node::prepare (const double sampleRate, const int blockSize, + AudioProcessorGraph* const graph) +{ + if (! isPrepared) + { + isPrepared = true; + + AudioProcessorGraph::AudioGraphIOProcessor* const ioProc + = dynamic_cast (static_cast (processor)); + + if (ioProc != nullptr) + ioProc->setParentGraph (graph); + + processor->setPlayConfigDetails (processor->getNumInputChannels(), + processor->getNumOutputChannels(), + sampleRate, blockSize); + + processor->prepareToPlay (sampleRate, blockSize); + } +} + +void AudioProcessorGraph::Node::unprepare() +{ + if (isPrepared) + { + isPrepared = false; + processor->releaseResources(); + } +} + +//============================================================================== +AudioProcessorGraph::AudioProcessorGraph() + : lastNodeId (0), + renderingBuffers (1, 1), + currentAudioOutputBuffer (1, 1) +{ +} + +AudioProcessorGraph::~AudioProcessorGraph() +{ + clearRenderingSequence(); + clear(); +} + +const String AudioProcessorGraph::getName() const +{ + return "Audio Graph"; +} + +//============================================================================== +void AudioProcessorGraph::clear() +{ + nodes.clear(); + connections.clear(); + triggerAsyncUpdate(); +} + +AudioProcessorGraph::Node* AudioProcessorGraph::getNodeForId (const uint32 nodeId) const +{ + for (int i = nodes.size(); --i >= 0;) + if (nodes.getUnchecked(i)->nodeId == nodeId) + return nodes.getUnchecked(i); + + return nullptr; +} + +AudioProcessorGraph::Node* AudioProcessorGraph::addNode (AudioProcessor* const newProcessor, + uint32 nodeId) +{ + if (newProcessor == nullptr) + { + jassertfalse; + return nullptr; + } + + if (nodeId == 0) + { + nodeId = ++lastNodeId; + } + else + { + // you can't add a node with an id that already exists in the graph.. + jassert (getNodeForId (nodeId) == nullptr); + removeNode (nodeId); } - //============================================================================== - int getFreeBuffer (const bool forMidi) + lastNodeId = nodeId; + + Node* const n = new Node (nodeId, newProcessor); + nodes.add (n); + triggerAsyncUpdate(); + + AudioProcessorGraph::AudioGraphIOProcessor* const ioProc + = dynamic_cast (static_cast (n->processor)); + + if (ioProc != nullptr) + ioProc->setParentGraph (this); + + return n; +} + +bool AudioProcessorGraph::removeNode (const uint32 nodeId) +{ + disconnectNode (nodeId); + + for (int i = nodes.size(); --i >= 0;) { - if (forMidi) + if (nodes.getUnchecked(i)->nodeId == nodeId) { - for (int i = 1; i < midiNodeIds.size(); ++i) - if (midiNodeIds.getUnchecked(i) == freeNodeID) - return i; + AudioProcessorGraph::AudioGraphIOProcessor* const ioProc + = dynamic_cast (static_cast (nodes.getUnchecked(i)->processor)); - midiNodeIds.add ((uint32) freeNodeID); - return midiNodeIds.size() - 1; - } - else - { - for (int i = 1; i < nodeIds.size(); ++i) - if (nodeIds.getUnchecked(i) == freeNodeID) - return i; + if (ioProc != nullptr) + ioProc->setParentGraph (nullptr); - nodeIds.add ((uint32) freeNodeID); - channels.add (0); - return nodeIds.size() - 1; + nodes.remove (i); + triggerAsyncUpdate(); + + return true; } } - int getReadOnlyEmptyBuffer() const - { - return 0; - } + return false; +} + +//============================================================================== +const AudioProcessorGraph::Connection* AudioProcessorGraph::getConnectionBetween (const uint32 sourceNodeId, + const int sourceChannelIndex, + const uint32 destNodeId, + const int destChannelIndex) const +{ + const Connection c (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex); + GraphRenderingOps::ConnectionSorter sorter; + return connections [connections.indexOfSorted (sorter, &c)]; +} - int getBufferContaining (const uint32 nodeId, const int outputChannel) const +bool AudioProcessorGraph::isConnected (const uint32 possibleSourceNodeId, + const uint32 possibleDestNodeId) const +{ + for (int i = connections.size(); --i >= 0;) { - if (outputChannel == AudioProcessorGraph::midiChannelIndex) - { - for (int i = midiNodeIds.size(); --i >= 0;) - if (midiNodeIds.getUnchecked(i) == nodeId) - return i; - } - else + const Connection* const c = connections.getUnchecked(i); + + if (c->sourceNodeId == possibleSourceNodeId + && c->destNodeId == possibleDestNodeId) { - for (int i = nodeIds.size(); --i >= 0;) - if (nodeIds.getUnchecked(i) == nodeId - && channels.getUnchecked(i) == outputChannel) - return i; + return true; } - - return -1; } - void markAnyUnusedBuffersAsFree (const int stepIndex) + return false; +} + +bool AudioProcessorGraph::canConnect (const uint32 sourceNodeId, + const int sourceChannelIndex, + const uint32 destNodeId, + const int destChannelIndex) const +{ + if (sourceChannelIndex < 0 + || destChannelIndex < 0 + || sourceNodeId == destNodeId + || (destChannelIndex == midiChannelIndex) != (sourceChannelIndex == midiChannelIndex)) + return false; + + const Node* const source = getNodeForId (sourceNodeId); + + if (source == nullptr + || (sourceChannelIndex != midiChannelIndex && sourceChannelIndex >= source->processor->getNumOutputChannels()) + || (sourceChannelIndex == midiChannelIndex && ! source->processor->producesMidi())) + return false; + + const Node* const dest = getNodeForId (destNodeId); + + if (dest == nullptr + || (destChannelIndex != midiChannelIndex && destChannelIndex >= dest->processor->getNumInputChannels()) + || (destChannelIndex == midiChannelIndex && ! dest->processor->acceptsMidi())) + return false; + + return getConnectionBetween (sourceNodeId, sourceChannelIndex, + destNodeId, destChannelIndex) == nullptr; +} + +bool AudioProcessorGraph::addConnection (const uint32 sourceNodeId, + const int sourceChannelIndex, + const uint32 destNodeId, + const int destChannelIndex) +{ + if (! canConnect (sourceNodeId, sourceChannelIndex, destNodeId, destChannelIndex)) + return false; + + GraphRenderingOps::ConnectionSorter sorter; + connections.addSorted (sorter, new Connection (sourceNodeId, sourceChannelIndex, + destNodeId, destChannelIndex)); + triggerAsyncUpdate(); + + return true; +} + +void AudioProcessorGraph::removeConnection (const int index) +{ + connections.remove (index); + triggerAsyncUpdate(); +} + +bool AudioProcessorGraph::removeConnection (const uint32 sourceNodeId, const int sourceChannelIndex, + const uint32 destNodeId, const int destChannelIndex) +{ + bool doneAnything = false; + + for (int i = connections.size(); --i >= 0;) { - int i; - for (i = 0; i < nodeIds.size(); ++i) - { - if (isNodeBusy (nodeIds.getUnchecked(i)) - && ! isBufferNeededLater (stepIndex, -1, - nodeIds.getUnchecked(i), - channels.getUnchecked(i))) - { - nodeIds.set (i, (uint32) freeNodeID); - } - } + const Connection* const c = connections.getUnchecked(i); - for (i = 0; i < midiNodeIds.size(); ++i) + if (c->sourceNodeId == sourceNodeId + && c->destNodeId == destNodeId + && c->sourceChannelIndex == sourceChannelIndex + && c->destChannelIndex == destChannelIndex) { - if (isNodeBusy (midiNodeIds.getUnchecked(i)) - && ! isBufferNeededLater (stepIndex, -1, - midiNodeIds.getUnchecked(i), - AudioProcessorGraph::midiChannelIndex)) - { - midiNodeIds.set (i, (uint32) freeNodeID); - } + removeConnection (i); + doneAnything = true; + triggerAsyncUpdate(); } } - bool isBufferNeededLater (int stepIndexToSearchFrom, - int inputChannelOfIndexToIgnore, - const uint32 nodeId, - const int outputChanIndex) const - { - while (stepIndexToSearchFrom < orderedNodes.size()) - { - const AudioProcessorGraph::Node* const node = (const AudioProcessorGraph::Node*) orderedNodes.getUnchecked (stepIndexToSearchFrom); + return doneAnything; +} - if (outputChanIndex == AudioProcessorGraph::midiChannelIndex) - { - if (inputChannelOfIndexToIgnore != AudioProcessorGraph::midiChannelIndex - && graph.getConnectionBetween (nodeId, AudioProcessorGraph::midiChannelIndex, - node->id, AudioProcessorGraph::midiChannelIndex) != nullptr) - return true; - } - else - { - for (int i = 0; i < node->getProcessor()->getNumInputChannels(); ++i) - if (i != inputChannelOfIndexToIgnore - && graph.getConnectionBetween (nodeId, outputChanIndex, - node->id, i) != nullptr) - return true; - } +bool AudioProcessorGraph::disconnectNode (const uint32 nodeId) +{ + bool doneAnything = false; - inputChannelOfIndexToIgnore = -1; - ++stepIndexToSearchFrom; - } + for (int i = connections.size(); --i >= 0;) + { + const Connection* const c = connections.getUnchecked(i); - return false; + if (c->sourceNodeId == nodeId || c->destNodeId == nodeId) + { + removeConnection (i); + doneAnything = true; + triggerAsyncUpdate(); + } } - void markBufferAsContaining (int bufferNum, uint32 nodeId, int outputIndex) + return doneAnything; +} + +bool AudioProcessorGraph::removeIllegalConnections() +{ + bool doneAnything = false; + + for (int i = connections.size(); --i >= 0;) { - if (outputIndex == AudioProcessorGraph::midiChannelIndex) - { - jassert (bufferNum > 0 && bufferNum < midiNodeIds.size()); + const Connection* const c = connections.getUnchecked(i); - midiNodeIds.set (bufferNum, nodeId); - } - else - { - jassert (bufferNum >= 0 && bufferNum < nodeIds.size()); + const Node* const source = getNodeForId (c->sourceNodeId); + const Node* const dest = getNodeForId (c->destNodeId); - nodeIds.set (bufferNum, nodeId); - channels.set (bufferNum, outputIndex); + if (source == nullptr || dest == nullptr + || (c->sourceChannelIndex != midiChannelIndex + && ! isPositiveAndBelow (c->sourceChannelIndex, source->processor->getNumOutputChannels())) + || (c->sourceChannelIndex == midiChannelIndex + && ! source->processor->producesMidi()) + || (c->destChannelIndex != midiChannelIndex + && ! isPositiveAndBelow (c->destChannelIndex, dest->processor->getNumInputChannels())) + || (c->destChannelIndex == midiChannelIndex + && ! dest->processor->acceptsMidi())) + { + removeConnection (i); + doneAnything = true; + triggerAsyncUpdate(); } } - JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (RenderingOpSequenceCalculator); -}; - + return doneAnything; } //============================================================================== @@ -980,21 +1092,22 @@ void AudioProcessorGraph::buildRenderingSequence() Array orderedNodes; - int i; - for (i = 0; i < nodes.size(); ++i) { - Node* const node = nodes.getUnchecked(i); + const GraphRenderingOps::ConnectionLookupTable table (connections); - node->prepare (getSampleRate(), getBlockSize(), this); + for (int i = 0; i < nodes.size(); ++i) + { + Node* const node = nodes.getUnchecked(i); - int j = 0; - for (; j < orderedNodes.size(); ++j) - if (isAnInputTo (node->id, - ((Node*) orderedNodes.getUnchecked (j))->id, - nodes.size() + 1)) - break; + node->prepare (getSampleRate(), getBlockSize(), this); - orderedNodes.insert (j, node); + int j = 0; + for (; j < orderedNodes.size(); ++j) + if (table.isAnInputTo (node->nodeId, ((Node*) orderedNodes.getUnchecked(j))->nodeId)) + break; + + orderedNodes.insert (j, node); + } } GraphRenderingOps::RenderingOpSequenceCalculator calculator (*this, orderedNodes, newRenderingOps); @@ -1162,38 +1275,38 @@ void AudioProcessorGraph::AudioGraphIOProcessor::processBlock (AudioSampleBuffer switch (type) { - case audioOutputNode: - { - for (int i = jmin (graph->currentAudioOutputBuffer.getNumChannels(), - buffer.getNumChannels()); --i >= 0;) + case audioOutputNode: { - graph->currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples()); - } + for (int i = jmin (graph->currentAudioOutputBuffer.getNumChannels(), + buffer.getNumChannels()); --i >= 0;) + { + graph->currentAudioOutputBuffer.addFrom (i, 0, buffer, i, 0, buffer.getNumSamples()); + } - break; - } + break; + } - case audioInputNode: - { - for (int i = jmin (graph->currentAudioInputBuffer->getNumChannels(), - buffer.getNumChannels()); --i >= 0;) + case audioInputNode: { - buffer.copyFrom (i, 0, *graph->currentAudioInputBuffer, i, 0, buffer.getNumSamples()); - } + for (int i = jmin (graph->currentAudioInputBuffer->getNumChannels(), + buffer.getNumChannels()); --i >= 0;) + { + buffer.copyFrom (i, 0, *graph->currentAudioInputBuffer, i, 0, buffer.getNumSamples()); + } - break; - } + break; + } - case midiOutputNode: - graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0); - break; + case midiOutputNode: + graph->currentMidiOutputBuffer.addEvents (midiMessages, 0, buffer.getNumSamples(), 0); + break; - case midiInputNode: - midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0); - break; + case midiInputNode: + midiMessages.addEvents (*graph->currentMidiInputBuffer, 0, buffer.getNumSamples(), 0); + break; - default: - break; + default: + break; } } @@ -1241,15 +1354,8 @@ bool AudioProcessorGraph::AudioGraphIOProcessor::isOutputChannelStereoPair (int return isInputChannelStereoPair (index); } -bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const -{ - return type == audioInputNode || type == midiInputNode; -} - -bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const -{ - return type == audioOutputNode || type == midiOutputNode; -} +bool AudioProcessorGraph::AudioGraphIOProcessor::isInput() const { return type == audioInputNode || type == midiInputNode; } +bool AudioProcessorGraph::AudioGraphIOProcessor::isOutput() const { return type == audioOutputNode || type == midiOutputNode; } bool AudioProcessorGraph::AudioGraphIOProcessor::hasEditor() const { return false; } AudioProcessorEditor* AudioProcessorGraph::AudioGraphIOProcessor::createEditor() { return nullptr; } @@ -1266,15 +1372,10 @@ int AudioProcessorGraph::AudioGraphIOProcessor::getCurrentProgram() void AudioProcessorGraph::AudioGraphIOProcessor::setCurrentProgram (int) { } const String AudioProcessorGraph::AudioGraphIOProcessor::getProgramName (int) { return String::empty; } -void AudioProcessorGraph::AudioGraphIOProcessor::changeProgramName (int, const String&) { } - -void AudioProcessorGraph::AudioGraphIOProcessor::getStateInformation (JUCE_NAMESPACE::MemoryBlock&) -{ -} +void AudioProcessorGraph::AudioGraphIOProcessor::changeProgramName (int, const String&) {} -void AudioProcessorGraph::AudioGraphIOProcessor::setStateInformation (const void*, int) -{ -} +void AudioProcessorGraph::AudioGraphIOProcessor::getStateInformation (JUCE_NAMESPACE::MemoryBlock&) {} +void AudioProcessorGraph::AudioGraphIOProcessor::setStateInformation (const void*, int) {} void AudioProcessorGraph::AudioGraphIOProcessor::setParentGraph (AudioProcessorGraph* const newGraph) { diff --git a/src/audio/processors/juce_AudioProcessorGraph.h b/src/audio/processors/juce_AudioProcessorGraph.h index 94ff980b30..880b6aa6d0 100644 --- a/src/audio/processors/juce_AudioProcessorGraph.h +++ b/src/audio/processors/juce_AudioProcessorGraph.h @@ -71,10 +71,9 @@ public: public: //============================================================================== /** The ID number assigned to this node. - This is assigned by the graph that owns it, and can't be changed. */ - const uint32 id; + const uint32 nodeId; /** The actual processor object that this node represents. */ AudioProcessor* getProcessor() const noexcept { return processor; } @@ -99,7 +98,7 @@ public: const ScopedPointer processor; bool isPrepared; - Node (uint32 id, AudioProcessor* processor); + Node (uint32 nodeId, AudioProcessor* processor) noexcept; void prepare (double sampleRate, int blockSize, AudioProcessorGraph* graph); void unprepare(); @@ -115,6 +114,10 @@ public: struct JUCE_API Connection { public: + //============================================================================== + Connection (uint32 sourceNodeId, int sourceChannelIndex, + uint32 destNodeId, int destChannelIndex) noexcept; + //============================================================================== /** The ID number of the node which is the input source for this connection. @see AudioProcessorGraph::getNodeForId diff --git a/src/events/juce_MessageManager.cpp b/src/events/juce_MessageManager.cpp index f47b73577a..55be9fe115 100644 --- a/src/events/juce_MessageManager.cpp +++ b/src/events/juce_MessageManager.cpp @@ -42,10 +42,9 @@ static const int quitMessageId = 0xfffff321; MessageManager::MessageManager() noexcept : quitMessagePosted (false), quitMessageReceived (false), + messageThreadId (Thread::getCurrentThreadId()), threadWithLock (0) { - messageThreadId = Thread::getCurrentThreadId(); - if (JUCEApplication::isStandaloneApp()) Thread::setCurrentThreadName ("Juce Message Thread"); } diff --git a/src/events/juce_MessageManager.h b/src/events/juce_MessageManager.h index 04bc5c0b77..306ee64fa4 100644 --- a/src/events/juce_MessageManager.h +++ b/src/events/juce_MessageManager.h @@ -106,8 +106,7 @@ public: @returns the value that the callback function returns. @see MessageManagerLock */ - void* callFunctionOnMessageThread (MessageCallbackFunction* callback, - void* userData); + void* callFunctionOnMessageThread (MessageCallbackFunction* callback, void* userData); /** Returns true if the caller-thread is the message thread. */ bool isThisTheMessageThread() const noexcept; @@ -157,12 +156,12 @@ public: void deregisterBroadcastListener (ActionListener* listener); //============================================================================== - /** @internal */ + #ifndef DOXYGEN + // Internal methods - do not use! void deliverMessage (Message*); - /** @internal */ void deliverBroadcastMessage (const String&); - /** @internal */ ~MessageManager() noexcept; + #endif private: //============================================================================== @@ -294,12 +293,10 @@ public: //============================================================================== /** Returns true if the lock was successfully acquired. - (See the constructor that takes a Thread for more info). */ bool lockWasGained() const noexcept { return locked; } - private: class BlockingMessage; friend class ReferenceCountedObjectPtr; diff --git a/src/gui/components/windows/juce_DocumentWindow.h b/src/gui/components/windows/juce_DocumentWindow.h index 1657da4897..57a5470eaa 100644 --- a/src/gui/components/windows/juce_DocumentWindow.h +++ b/src/gui/components/windows/juce_DocumentWindow.h @@ -229,6 +229,7 @@ public: }; //============================================================================== + #ifndef DOXYGEN /** @internal */ void paint (Graphics& g); /** @internal */ @@ -251,6 +252,7 @@ public: void parentHierarchyChanged(); /** @internal */ const Rectangle getTitleBarArea(); + #endif private: //============================================================================== diff --git a/src/gui/components/windows/juce_ResizableWindow.h b/src/gui/components/windows/juce_ResizableWindow.h index 6c9d8abd98..54a5ba475d 100644 --- a/src/gui/components/windows/juce_ResizableWindow.h +++ b/src/gui/components/windows/juce_ResizableWindow.h @@ -340,7 +340,7 @@ protected: /** @internal */ int getDesktopWindowStyleFlags() const; -#if JUCE_DEBUG + #if JUCE_DEBUG /** Overridden to warn people about adding components directly to this component instead of using setContentOwned(). @@ -355,7 +355,7 @@ protected: a base-class method call to Component::addAndMakeVisible(), to side-step this warning. */ void addAndMakeVisible (Component* child, int zOrder = -1); -#endif + #endif ScopedPointer resizableCorner; ScopedPointer resizableBorder; diff --git a/src/gui/graphics/imaging/juce_CameraDevice.h b/src/gui/graphics/imaging/juce_CameraDevice.h index a4895377d2..bfb329e83c 100644 --- a/src/gui/graphics/imaging/juce_CameraDevice.h +++ b/src/gui/graphics/imaging/juce_CameraDevice.h @@ -140,8 +140,9 @@ public: protected: - /** @internal */ + #ifndef DOXYGEN CameraDevice (const String& name, int index); + #endif private: void* internal; diff --git a/src/threads/juce_TimeSliceThread.h b/src/threads/juce_TimeSliceThread.h index 3252c5761a..606db33535 100644 --- a/src/threads/juce_TimeSliceThread.h +++ b/src/threads/juce_TimeSliceThread.h @@ -123,8 +123,9 @@ public: TimeSliceClient* getClient (int index) const; //============================================================================== - /** @internal */ + #ifndef DOXYGEN void run(); + #endif //============================================================================== private: