Browse Source

Replace juce7 with submodule as well, update to 7.0.9

Signed-off-by: falkTX <falktx@falktx.com>
pull/128/head
falkTX 1 year ago
parent
commit
5541dda8c8
Signed by: falkTX <falktx@falktx.com> GPG Key ID: CDBAA37ABC74FBA0
100 changed files with 29 additions and 29809 deletions
  1. +3
    -0
      .gitmodules
  2. +1
    -1
      README.md
  3. +16
    -3
      libs/juce-plugin/JucePluginMain.cpp
  4. +1
    -1
      libs/juce-plugin/JucePluginUtils.cpp
  5. +7
    -3
      libs/juce7/meson.build
  6. +1
    -0
      libs/juce7/source
  7. +0
    -16
      libs/juce7/source/README.md
  8. +0
    -593
      libs/juce7/source/modules/juce_audio_basics/audio_play_head/juce_AudioPlayHead.h
  9. +0
    -741
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioChannelSet.cpp
  10. +0
    -525
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioChannelSet.h
  11. +0
    -635
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioDataConverters.cpp
  12. +0
    -857
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioDataConverters.h
  13. +0
    -99
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioProcessLoadMeasurer.cpp
  14. +0
    -109
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioProcessLoadMeasurer.h
  15. +0
    -1279
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioSampleBuffer.h
  16. +0
    -1712
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_FloatVectorOperations.cpp
  17. +0
    -269
      libs/juce7/source/modules/juce_audio_basics/buffers/juce_FloatVectorOperations.h
  18. +0
    -99
      libs/juce7/source/modules/juce_audio_basics/juce_audio_basics.cpp
  19. +0
    -123
      libs/juce7/source/modules/juce_audio_basics/juce_audio_basics.h
  20. +0
    -23
      libs/juce7/source/modules/juce_audio_basics/juce_audio_basics.mm
  21. +0
    -320
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiBuffer.cpp
  22. +0
    -345
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiBuffer.h
  23. +0
    -188
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiDataConcatenator.h
  24. +0
    -794
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiFile.cpp
  25. +0
    -198
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiFile.h
  26. +0
    -173
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiKeyboardState.cpp
  27. +0
    -195
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiKeyboardState.h
  28. +0
    -1331
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessage.cpp
  29. +0
    -986
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessage.h
  30. +0
    -870
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessageSequence.cpp
  31. +0
    -315
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessageSequence.h
  32. +0
    -375
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiRPN.cpp
  33. +0
    -153
      libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiRPN.h
  34. +0
    -47
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMP.h
  35. +0
    -330
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPConversion.h
  36. +0
    -169
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPConverters.h
  37. +0
    -202
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPDispatcher.h
  38. +0
    -538
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPFactory.h
  39. +0
    -130
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPIterator.h
  40. +0
    -217
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPMidi1ToBytestreamTranslator.h
  41. +0
    -195
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPMidi1ToMidi2DefaultTranslator.cpp
  42. +0
    -191
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPMidi1ToMidi2DefaultTranslator.h
  43. +0
    -48
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPProtocols.h
  44. +0
    -46
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPReceiver.h
  45. +0
    -53
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPSysEx7.cpp
  46. +0
    -77
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPSysEx7.h
  47. +0
    -59
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPUtils.cpp
  48. +0
    -117
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPUtils.h
  49. +0
    -35
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPView.cpp
  50. +0
    -92
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPView.h
  51. +0
    -1018
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMP_test.cpp
  52. +0
    -193
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPacket.h
  53. +0
    -96
      libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPackets.h
  54. +0
    -2352
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEInstrument.cpp
  55. +0
    -426
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEInstrument.h
  56. +0
    -238
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEMessages.cpp
  57. +0
    -116
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEMessages.h
  58. +0
    -127
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPENote.cpp
  59. +0
    -184
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPENote.h
  60. +0
    -341
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiser.cpp
  61. +0
    -311
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiser.h
  62. +0
    -375
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserBase.cpp
  63. +0
    -216
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserBase.h
  64. +0
    -50
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserVoice.cpp
  65. +0
    -191
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserVoice.h
  66. +0
    -555
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEUtils.cpp
  67. +0
    -158
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEUtils.h
  68. +0
    -193
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEValue.cpp
  69. +0
    -103
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEValue.h
  70. +0
    -396
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.cpp
  71. +0
    -241
      libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.h
  72. +0
    -344
      libs/juce7/source/modules/juce_audio_basics/native/juce_mac_CoreAudioLayouts.h
  73. +0
    -80
      libs/juce7/source/modules/juce_audio_basics/native/juce_mac_CoreAudioTimeConversions.h
  74. +0
    -179
      libs/juce7/source/modules/juce_audio_basics/sources/juce_AudioSource.h
  75. +0
    -329
      libs/juce7/source/modules/juce_audio_basics/sources/juce_BufferingAudioSource.cpp
  76. +0
    -124
      libs/juce7/source/modules/juce_audio_basics/sources/juce_BufferingAudioSource.h
  77. +0
    -187
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ChannelRemappingAudioSource.cpp
  78. +0
    -141
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ChannelRemappingAudioSource.h
  79. +0
    -80
      libs/juce7/source/modules/juce_audio_basics/sources/juce_IIRFilterAudioSource.cpp
  80. +0
    -68
      libs/juce7/source/modules/juce_audio_basics/sources/juce_IIRFilterAudioSource.h
  81. +0
    -261
      libs/juce7/source/modules/juce_audio_basics/sources/juce_MemoryAudioSource.cpp
  82. +0
    -82
      libs/juce7/source/modules/juce_audio_basics/sources/juce_MemoryAudioSource.h
  83. +0
    -158
      libs/juce7/source/modules/juce_audio_basics/sources/juce_MixerAudioSource.cpp
  84. +0
    -99
      libs/juce7/source/modules/juce_audio_basics/sources/juce_MixerAudioSource.h
  85. +0
    -76
      libs/juce7/source/modules/juce_audio_basics/sources/juce_PositionableAudioSource.h
  86. +0
    -265
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ResamplingAudioSource.cpp
  87. +0
    -106
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ResamplingAudioSource.h
  88. +0
    -83
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ReverbAudioSource.cpp
  89. +0
    -74
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ReverbAudioSource.h
  90. +0
    -78
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ToneGeneratorAudioSource.cpp
  91. +0
    -71
      libs/juce7/source/modules/juce_audio_basics/sources/juce_ToneGeneratorAudioSource.h
  92. +0
    -585
      libs/juce7/source/modules/juce_audio_basics/synthesisers/juce_Synthesiser.cpp
  93. +0
    -637
      libs/juce7/source/modules/juce_audio_basics/synthesisers/juce_Synthesiser.h
  94. +0
    -297
      libs/juce7/source/modules/juce_audio_basics/utilities/juce_ADSR.h
  95. +0
    -257
      libs/juce7/source/modules/juce_audio_basics/utilities/juce_ADSR_test.cpp
  96. +0
    -112
      libs/juce7/source/modules/juce_audio_basics/utilities/juce_Decibels.h
  97. +0
    -497
      libs/juce7/source/modules/juce_audio_basics/utilities/juce_GenericInterpolator.h
  98. +0
    -340
      libs/juce7/source/modules/juce_audio_basics/utilities/juce_IIRFilter.cpp
  99. +0
    -254
      libs/juce7/source/modules/juce_audio_basics/utilities/juce_IIRFilter.h
  100. +0
    -188
      libs/juce7/source/modules/juce_audio_basics/utilities/juce_Interpolators.cpp

+ 3
- 0
.gitmodules View File

@@ -4,3 +4,6 @@
[submodule "libs/juce6.1/source"]
path = libs/juce6.1/source
url = https://github.com/DISTRHO/JUCE.git
[submodule "libs/juce7/source"]
path = libs/juce7/source
url = https://github.com/DISTRHO/JUCE.git

+ 1
- 1
README.md View File

@@ -27,7 +27,7 @@ GNU/Linux: (development versions of these)
In order to build and install the plugins, just run the usual steps for a meson project:

```
meson build --buildtype release
meson setup build --buildtype release
ninja -C build
ninja -C build install
```

+ 16
- 3
libs/juce-plugin/JucePluginMain.cpp View File

@@ -19,23 +19,36 @@

#if JucePlugin_Build_AU
#include "modules/juce_audio_plugin_client/AU/juce_AU_Wrapper.mm"

#elif JucePlugin_Build_LV2
#if JUCE_MAJOR_VERSION >= 7
#include "modules/juce_audio_plugin_client/LV2/juce_LV2_Client.cpp"
#include "modules/juce_audio_plugin_client/juce_audio_plugin_client_LV2.cpp"
#else
#include "modules/juce_audio_plugin_client/LV2/juce_LV2_Wrapper.cpp"
#endif

#elif JucePlugin_Build_RTAS
#include "modules/juce_audio_plugin_client/RTAS/juce_RTAS_Wrapper.cpp"

#elif JucePlugin_Build_VST
// we need to include 'juce_VSTMidiEventList' before 'juce_VST_Wrapper'
namespace Vst2 {
#include "modules/juce_audio_processors/format_types/juce_VSTInterface.h"
}
#include "modules/juce_audio_processors/format_types/juce_VSTMidiEventList.h"
#include "modules/juce_audio_plugin_client/VST/juce_VST_Wrapper.cpp"
#if JUCE_MAJOR_VERSION >= 7
#include "modules/juce_audio_plugin_client/juce_audio_plugin_client_VST2.cpp"
#else
#include "modules/juce_audio_plugin_client/VST/juce_VST_Wrapper.cpp"
#endif

#elif JucePlugin_Build_VST3
#include "modules/juce_audio_plugin_client/VST3/juce_VST3_Wrapper.cpp"
#if JUCE_MAJOR_VERSION >= 7
#include "modules/juce_audio_plugin_client/juce_audio_plugin_client_VST3.cpp"
#else
#include "modules/juce_audio_plugin_client/VST3/juce_VST3_Wrapper.cpp"
#endif

#elif JucePlugin_Build_Standalone
#include "juce_StandaloneFilterApplication.cpp"
#else


+ 1
- 1
libs/juce-plugin/JucePluginUtils.cpp View File

@@ -23,7 +23,7 @@
#error Invalid configuration
#endif

#if ! JucePlugin_Build_Standalone
#if ! JucePlugin_Build_Standalone && JUCE_MAJOR_VERSION < 7
#include "modules/juce_audio_plugin_client/utility/juce_PluginUtilities.cpp"
#endif



+ 7
- 3
libs/juce7/meson.build View File

@@ -35,9 +35,10 @@ juce7_devices_srcs = [
]

juce7_extra_cpp_args = [
'-std=gnu++14',
'-std=gnu++17',
'-Wno-non-virtual-dtor',
]
juce7_extra_dependencies = []
juce7_extra_include_dirs = []

if os_windows
@@ -49,6 +50,9 @@ if os_windows
'-mpreferred-stack-boundary=2',
]
endif
juce7_extra_dependencies += [
cc.find_library('dxgi'),
]
endif

lib_juce7 = static_library('juce7',
@@ -61,7 +65,7 @@ lib_juce7 = static_library('juce7',
juce7_extra_include_dirs
],
cpp_args: build_flags_cpp + juce7_extra_cpp_args,
dependencies: dependencies,
dependencies: dependencies + juce7_extra_dependencies,
pic: true,
install: false,
)
@@ -75,7 +79,7 @@ lib_juce7_device = static_library('juce7-devices',
juce7_extra_include_dirs
],
cpp_args: build_flags_cpp + juce7_extra_cpp_args + ['-DJUCE_MODULE_AVAILABLE_juce_audio_devices=1'],
dependencies: dependencies_devices,
dependencies: dependencies_devices + juce7_extra_dependencies,
pic: true,
install: false,
)


+ 1
- 0
libs/juce7/source

@@ -0,0 +1 @@
Subproject commit 3f54e12a6e1b04d4fa11e21a6af951717f83963a

+ 0
- 16
libs/juce7/source/README.md View File

@@ -1,16 +0,0 @@
# The JUCE 7 Library

**The JUCE fork provided by this repository uses GPLv3 license.
See [GNU General Public License v.3](https://www.gnu.org/licenses/gpl-3.0.en.html)
for more details.**

JUCE is an open-source cross-platform C++ application framework for creating high quality
desktop and mobile applications, including VST, VST3, AU, AUv3, AAX and LV2 audio plug-ins
and plug-in hosts.

JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING WARRANTY OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE, ARE DISCLAIMED.

The juce_audio_basics, juce_audio_devices, juce_core and juce_events modules
are permissively licensed under the terms of the [ISC license](http://www.isc.org/downloads/software-support-policy/isc-license).

+ 0
- 593
libs/juce7/source/modules/juce_audio_basics/audio_play_head/juce_AudioPlayHead.h View File

@@ -1,593 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
A subclass of AudioPlayHead can supply information about the position and
status of a moving play head during audio playback.
One of these can be supplied to an AudioProcessor object so that it can find
out about the position of the audio that it is rendering.
@see AudioProcessor::setPlayHead, AudioProcessor::getPlayHead
@tags{Audio}
*/
class JUCE_API AudioPlayHead
{
protected:
//==============================================================================
AudioPlayHead() = default;
public:
virtual ~AudioPlayHead() = default;
//==============================================================================
/** Frame rate types. */
enum FrameRateType
{
fps23976 = 0,
fps24 = 1,
fps25 = 2,
fps2997 = 3,
fps30 = 4,
fps2997drop = 5,
fps30drop = 6,
fps60 = 7,
fps60drop = 8,
fpsUnknown = 99
};
/** More descriptive frame rate type. */
class JUCE_API FrameRate
{
public:
/** Creates a frame rate with a base rate of 0. */
FrameRate() = default;
/** Creates a FrameRate instance from a FrameRateType. */
FrameRate (FrameRateType type) : FrameRate (fromType (type)) {}
/** Gets the FrameRateType that matches the state of this FrameRate.
Returns fpsUnknown if this FrameRate cannot be represented by any of the
other enum fields.
*/
FrameRateType getType() const
{
switch (base)
{
case 24: return pulldown ? fps23976 : fps24;
case 25: return fps25;
case 30: return pulldown ? (drop ? fps2997drop : fps2997)
: (drop ? fps30drop : fps30);
case 60: return drop ? fps60drop : fps60;
}
return fpsUnknown;
}
/** Returns the plain rate, without taking pulldown into account. */
int getBaseRate() const { return base; }
/** Returns true if drop-frame timecode is in use. */
bool isDrop() const { return drop; }
/** Returns true if the effective framerate is actually equal to the base rate divided by 1.001 */
bool isPullDown() const { return pulldown; }
/** Returns the actual rate described by this object, taking pulldown into account. */
double getEffectiveRate() const { return pulldown ? (double) base / 1.001 : (double) base; }
/** Returns a copy of this object with the specified base rate. */
JUCE_NODISCARD FrameRate withBaseRate (int x) const { return with (&FrameRate::base, x); }
/** Returns a copy of this object with drop frames enabled or disabled, as specified. */
JUCE_NODISCARD FrameRate withDrop (bool x = true) const { return with (&FrameRate::drop, x); }
/** Returns a copy of this object with pulldown enabled or disabled, as specified. */
JUCE_NODISCARD FrameRate withPullDown (bool x = true) const { return with (&FrameRate::pulldown, x); }
/** Returns true if this instance is equal to other. */
bool operator== (const FrameRate& other) const
{
const auto tie = [] (const FrameRate& x) { return std::tie (x.base, x.drop, x.pulldown); };
return tie (*this) == tie (other);
}
/** Returns true if this instance is not equal to other. */
bool operator!= (const FrameRate& other) const { return ! (*this == other); }
private:
static FrameRate fromType (FrameRateType type)
{
switch (type)
{
case fps23976: return FrameRate().withBaseRate (24).withPullDown();
case fps24: return FrameRate().withBaseRate (24);
case fps25: return FrameRate().withBaseRate (25);
case fps2997: return FrameRate().withBaseRate (30).withPullDown();
case fps30: return FrameRate().withBaseRate (30);
case fps2997drop: return FrameRate().withBaseRate (30).withDrop().withPullDown();
case fps30drop: return FrameRate().withBaseRate (30).withDrop();
case fps60: return FrameRate().withBaseRate (60);
case fps60drop: return FrameRate().withBaseRate (60).withDrop();
case fpsUnknown: break;
}
return {};
}
template <typename Member, typename Value>
FrameRate with (Member&& member, Value&& value) const
{
auto copy = *this;
copy.*member = std::forward<Value> (value);
return copy;
}
int base = 0;
bool drop = false, pulldown = false;
};
/** Describes a musical time signature.
@see PositionInfo::getTimeSignature() PositionInfo::setTimeSignature()
*/
struct JUCE_API TimeSignature
{
/** Time signature numerator, e.g. the 3 of a 3/4 time sig */
int numerator = 4;
/** Time signature denominator, e.g. the 4 of a 3/4 time sig */
int denominator = 4;
bool operator== (const TimeSignature& other) const
{
const auto tie = [] (auto& x) { return std::tie (x.numerator, x.denominator); };
return tie (*this) == tie (other);
}
bool operator!= (const TimeSignature& other) const
{
return ! operator== (other);
}
};
/** Holds the begin and end points of a looped region.
@see PositionInfo::getIsLooping() PositionInfo::setIsLooping() PositionInfo::getLoopPoints() PositionInfo::setLoopPoints()
*/
struct JUCE_API LoopPoints
{
/** The current cycle start position in units of quarter-notes. */
double ppqStart = 0;
/** The current cycle end position in units of quarter-notes. */
double ppqEnd = 0;
bool operator== (const LoopPoints& other) const
{
const auto tie = [] (auto& x) { return std::tie (x.ppqStart, x.ppqEnd); };
return tie (*this) == tie (other);
}
bool operator!= (const LoopPoints& other) const
{
return ! operator== (other);
}
};
//==============================================================================
/** This type is deprecated; prefer PositionInfo instead.
Some position info may be unavailable, depending on the host or plugin format.
Unfortunately, CurrentPositionInfo doesn't have any way of differentiating between
default values and values that have been set explicitly.
*/
struct JUCE_API CurrentPositionInfo
{
/** The tempo in BPM */
double bpm = 120.0;
/** Time signature numerator, e.g. the 3 of a 3/4 time sig */
int timeSigNumerator = 4;
/** Time signature denominator, e.g. the 4 of a 3/4 time sig */
int timeSigDenominator = 4;
/** The current play position, in samples from the start of the timeline. */
int64 timeInSamples = 0;
/** The current play position, in seconds from the start of the timeline. */
double timeInSeconds = 0;
/** For timecode, the position of the start of the timeline, in seconds from 00:00:00:00. */
double editOriginTime = 0;
/** The current play position, in units of quarter-notes. */
double ppqPosition = 0;
/** The position of the start of the last bar, in units of quarter-notes.
This is the time from the start of the timeline to the start of the current
bar, in ppq units.
Note - this value may be unavailable on some hosts, e.g. Pro-Tools. If
it's not available, the value will be 0.
*/
double ppqPositionOfLastBarStart = 0;
/** The video frame rate, if applicable. */
FrameRate frameRate = FrameRateType::fps23976;
/** True if the transport is currently playing. */
bool isPlaying = false;
/** True if the transport is currently recording.
(When isRecording is true, then isPlaying will also be true).
*/
bool isRecording = false;
/** The current cycle start position in units of quarter-notes.
Note that not all hosts or plugin formats may provide this value.
@see isLooping
*/
double ppqLoopStart = 0;
/** The current cycle end position in units of quarter-notes.
Note that not all hosts or plugin formats may provide this value.
@see isLooping
*/
double ppqLoopEnd = 0;
/** True if the transport is currently looping. */
bool isLooping = false;
//==============================================================================
bool operator== (const CurrentPositionInfo& other) const noexcept
{
const auto tie = [] (const CurrentPositionInfo& i)
{
return std::tie (i.timeInSamples,
i.ppqPosition,
i.editOriginTime,
i.ppqPositionOfLastBarStart,
i.frameRate,
i.isPlaying,
i.isRecording,
i.bpm,
i.timeSigNumerator,
i.timeSigDenominator,
i.ppqLoopStart,
i.ppqLoopEnd,
i.isLooping);
};
return tie (*this) == tie (other);
}
bool operator!= (const CurrentPositionInfo& other) const noexcept
{
return ! operator== (other);
}
void resetToDefault()
{
*this = CurrentPositionInfo{};
}
};
//==============================================================================
/**
Describes the time at the start of the current audio callback.
Not all hosts and plugin formats can provide all of the possible time
information, so most of the getter functions in this class return
an Optional that will only be engaged if the host provides the corresponding
information. As a plugin developer, you should code defensively so that
the plugin behaves sensibly even when the host fails to provide timing
information.
A default-constructed instance of this class will return nullopt from
all functions that return an Optional.
*/
class PositionInfo
{
public:
/** Returns the number of samples that have elapsed. */
Optional<int64_t> getTimeInSamples() const { return getOptional (flagTimeSamples, timeInSamples); }
/** @see getTimeInSamples() */
void setTimeInSamples (Optional<int64_t> timeInSamplesIn) { setOptional (flagTimeSamples, timeInSamples, timeInSamplesIn); }
/** Returns the number of seconds that have elapsed. */
Optional<double> getTimeInSeconds() const { return getOptional (flagTimeSeconds, timeInSeconds); }
/** @see getTimeInSamples() */
void setTimeInSeconds (Optional<double> timeInSecondsIn) { setOptional (flagTimeSeconds, timeInSeconds, timeInSecondsIn); }
/** Returns the bpm, if available. */
Optional<double> getBpm() const { return getOptional (flagTempo, tempoBpm); }
/** @see getBpm() */
void setBpm (Optional<double> bpmIn) { setOptional (flagTempo, tempoBpm, bpmIn); }
/** Returns the time signature, if available. */
Optional<TimeSignature> getTimeSignature() const { return getOptional (flagTimeSignature, timeSignature); }
/** @see getTimeSignature() */
void setTimeSignature (Optional<TimeSignature> timeSignatureIn) { setOptional (flagTimeSignature, timeSignature, timeSignatureIn); }
/** Returns host loop points, if available. */
Optional<LoopPoints> getLoopPoints() const { return getOptional (flagLoopPoints, loopPoints); }
/** @see getLoopPoints() */
void setLoopPoints (Optional<LoopPoints> loopPointsIn) { setOptional (flagLoopPoints, loopPoints, loopPointsIn); }
/** The number of bars since the beginning of the timeline.
This value isn't available in all hosts or in all plugin formats.
*/
Optional<int64_t> getBarCount() const { return getOptional (flagBarCount, barCount); }
/** @see getBarCount() */
void setBarCount (Optional<int64_t> barCountIn) { setOptional (flagBarCount, barCount, barCountIn); }
/** The position of the start of the last bar, in units of quarter-notes.
This is the time from the start of the timeline to the start of the current
bar, in ppq units.
Note - this value may be unavailable on some hosts, e.g. Pro-Tools.
*/
Optional<double> getPpqPositionOfLastBarStart() const { return getOptional (flagLastBarStartPpq, lastBarStartPpq); }
/** @see getPpqPositionOfLastBarStart() */
void setPpqPositionOfLastBarStart (Optional<double> positionIn) { setOptional (flagLastBarStartPpq, lastBarStartPpq, positionIn); }
/** The video frame rate, if available. */
Optional<FrameRate> getFrameRate() const { return getOptional (flagFrameRate, frame); }
/** @see getFrameRate() */
void setFrameRate (Optional<FrameRate> frameRateIn) { setOptional (flagFrameRate, frame, frameRateIn); }
/** The current play position, in units of quarter-notes. */
Optional<double> getPpqPosition() const { return getOptional (flagPpqPosition, positionPpq); }
/** @see getPpqPosition() */
void setPpqPosition (Optional<double> ppqPositionIn) { setOptional (flagPpqPosition, positionPpq, ppqPositionIn); }
/** For timecode, the position of the start of the timeline, in seconds from 00:00:00:00. */
Optional<double> getEditOriginTime() const { return getOptional (flagOriginTime, originTime); }
/** @see getEditOriginTime() */
void setEditOriginTime (Optional<double> editOriginTimeIn) { setOptional (flagOriginTime, originTime, editOriginTimeIn); }
/** Get the host's callback time in nanoseconds, if available. */
Optional<uint64_t> getHostTimeNs() const { return getOptional (flagHostTimeNs, hostTimeNs); }
/** @see getHostTimeNs() */
void setHostTimeNs (Optional<uint64_t> hostTimeNsIn) { setOptional (flagHostTimeNs, hostTimeNs, hostTimeNsIn); }
/** True if the transport is currently playing. */
bool getIsPlaying() const { return getFlag (flagIsPlaying); }
/** @see getIsPlaying() */
void setIsPlaying (bool isPlayingIn) { setFlag (flagIsPlaying, isPlayingIn); }
/** True if the transport is currently recording.
(When isRecording is true, then isPlaying will also be true).
*/
bool getIsRecording() const { return getFlag (flagIsRecording); }
/** @see getIsRecording() */
void setIsRecording (bool isRecordingIn) { setFlag (flagIsRecording, isRecordingIn); }
/** True if the transport is currently looping. */
bool getIsLooping() const { return getFlag (flagIsLooping); }
/** @see getIsLooping() */
void setIsLooping (bool isLoopingIn) { setFlag (flagIsLooping, isLoopingIn); }
bool operator== (const PositionInfo& other) const noexcept
{
const auto tie = [] (const PositionInfo& i)
{
return std::make_tuple (i.getTimeInSamples(),
i.getTimeInSeconds(),
i.getPpqPosition(),
i.getEditOriginTime(),
i.getPpqPositionOfLastBarStart(),
i.getFrameRate(),
i.getBarCount(),
i.getTimeSignature(),
i.getBpm(),
i.getLoopPoints(),
i.getHostTimeNs(),
i.getIsPlaying(),
i.getIsRecording(),
i.getIsLooping());
};
return tie (*this) == tie (other);
}
bool operator!= (const PositionInfo& other) const noexcept
{
return ! operator== (other);
}
private:
bool getFlag (int64_t flagToCheck) const
{
return (flagToCheck & flags) != 0;
}
void setFlag (int64_t flagToCheck, bool value)
{
flags = (value ? flags | flagToCheck : flags & ~flagToCheck);
}
template <typename Value>
Optional<Value> getOptional (int64_t flagToCheck, Value value) const
{
return getFlag (flagToCheck) ? makeOptional (std::move (value)) : nullopt;
}
template <typename Value>
void setOptional (int64_t flagToCheck, Value& value, Optional<Value> opt)
{
if (opt.hasValue())
value = *opt;
setFlag (flagToCheck, opt.hasValue());
}
enum
{
flagTimeSignature = 1 << 0,
flagLoopPoints = 1 << 1,
flagFrameRate = 1 << 2,
flagTimeSeconds = 1 << 3,
flagLastBarStartPpq = 1 << 4,
flagPpqPosition = 1 << 5,
flagOriginTime = 1 << 6,
flagTempo = 1 << 7,
flagTimeSamples = 1 << 8,
flagBarCount = 1 << 9,
flagHostTimeNs = 1 << 10,
flagIsPlaying = 1 << 11,
flagIsRecording = 1 << 12,
flagIsLooping = 1 << 13
};
TimeSignature timeSignature;
LoopPoints loopPoints;
FrameRate frame = FrameRateType::fps23976;
double timeInSeconds = 0.0;
double lastBarStartPpq = 0.0;
double positionPpq = 0.0;
double originTime = 0.0;
double tempoBpm = 0.0;
int64_t timeInSamples = 0;
int64_t barCount = 0;
uint64_t hostTimeNs = 0;
int64_t flags = 0;
};
//==============================================================================
/** Deprecated, use getPosition() instead.
Fills-in the given structure with details about the transport's
position at the start of the current processing block. If this method returns
false then the current play head position is not available and the given
structure will be undefined.
You can ONLY call this from your processBlock() method! Calling it at other
times will produce undefined behaviour, as the host may not have any context
in which a time would make sense, and some hosts will almost certainly have
multithreading issues if it's not called on the audio thread.
*/
[[deprecated ("Use getPosition instead. Not all hosts are able to provide all time position information; getPosition differentiates clearly between set and unset fields.")]]
bool getCurrentPosition (CurrentPositionInfo& result)
{
if (const auto pos = getPosition())
{
result.resetToDefault();
if (const auto sig = pos->getTimeSignature())
{
result.timeSigNumerator = sig->numerator;
result.timeSigDenominator = sig->denominator;
}
if (const auto loop = pos->getLoopPoints())
{
result.ppqLoopStart = loop->ppqStart;
result.ppqLoopEnd = loop->ppqEnd;
}
if (const auto frame = pos->getFrameRate())
result.frameRate = *frame;
if (const auto timeInSeconds = pos->getTimeInSeconds())
result.timeInSeconds = *timeInSeconds;
if (const auto lastBarStartPpq = pos->getPpqPositionOfLastBarStart())
result.ppqPositionOfLastBarStart = *lastBarStartPpq;
if (const auto ppqPosition = pos->getPpqPosition())
result.ppqPosition = *ppqPosition;
if (const auto originTime = pos->getEditOriginTime())
result.editOriginTime = *originTime;
if (const auto bpm = pos->getBpm())
result.bpm = *bpm;
if (const auto timeInSamples = pos->getTimeInSamples())
result.timeInSamples = *timeInSamples;
result.isPlaying = pos->getIsPlaying();
result.isRecording = pos->getIsRecording();
result.isLooping = pos->getIsLooping();
return true;
}
return false;
}
/** Fetches details about the transport's position at the start of the current
processing block. If this method returns nullopt then the current play head
position is not available.
A non-null return value just indicates that the host was able to provide
*some* relevant timing information. Individual PositionInfo getters may
still return nullopt.
You can ONLY call this from your processBlock() method! Calling it at other
times will produce undefined behaviour, as the host may not have any context
in which a time would make sense, and some hosts will almost certainly have
multithreading issues if it's not called on the audio thread.
*/
virtual Optional<PositionInfo> getPosition() const = 0;
/** Returns true if this object can control the transport. */
virtual bool canControlTransport() { return false; }
/** Starts or stops the audio. */
virtual void transportPlay (bool shouldStartPlaying) { ignoreUnused (shouldStartPlaying); }
/** Starts or stops recording the audio. */
virtual void transportRecord (bool shouldStartRecording) { ignoreUnused (shouldStartRecording); }
/** Rewinds the audio. */
virtual void transportRewind() {}
};
} // namespace juce

+ 0
- 741
libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioChannelSet.cpp View File

@@ -1,741 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
AudioChannelSet::AudioChannelSet (uint32 c) : channels (static_cast<int64> (c))
{
}
AudioChannelSet::AudioChannelSet (const std::initializer_list<ChannelType>& c)
{
for (auto channel : c)
addChannel (channel);
}
bool AudioChannelSet::operator== (const AudioChannelSet& other) const noexcept { return channels == other.channels; }
bool AudioChannelSet::operator!= (const AudioChannelSet& other) const noexcept { return channels != other.channels; }
bool AudioChannelSet::operator< (const AudioChannelSet& other) const noexcept { return channels < other.channels; }
String AudioChannelSet::getChannelTypeName (AudioChannelSet::ChannelType type)
{
if (type >= discreteChannel0)
return "Discrete " + String (type - discreteChannel0 + 1);
switch (type)
{
case left: return NEEDS_TRANS("Left");
case right: return NEEDS_TRANS("Right");
case centre: return NEEDS_TRANS("Centre");
case LFE: return NEEDS_TRANS("LFE");
case leftSurround: return NEEDS_TRANS("Left Surround");
case rightSurround: return NEEDS_TRANS("Right Surround");
case leftCentre: return NEEDS_TRANS("Left Centre");
case rightCentre: return NEEDS_TRANS("Right Centre");
case centreSurround: return NEEDS_TRANS("Centre Surround");
case leftSurroundRear: return NEEDS_TRANS("Left Surround Rear");
case rightSurroundRear: return NEEDS_TRANS("Right Surround Rear");
case topMiddle: return NEEDS_TRANS("Top Middle");
case topFrontLeft: return NEEDS_TRANS("Top Front Left");
case topFrontCentre: return NEEDS_TRANS("Top Front Centre");
case topFrontRight: return NEEDS_TRANS("Top Front Right");
case topRearLeft: return NEEDS_TRANS("Top Rear Left");
case topRearCentre: return NEEDS_TRANS("Top Rear Centre");
case topRearRight: return NEEDS_TRANS("Top Rear Right");
case wideLeft: return NEEDS_TRANS("Wide Left");
case wideRight: return NEEDS_TRANS("Wide Right");
case LFE2: return NEEDS_TRANS("LFE 2");
case leftSurroundSide: return NEEDS_TRANS("Left Surround Side");
case rightSurroundSide: return NEEDS_TRANS("Right Surround Side");
case ambisonicW: return NEEDS_TRANS("Ambisonic W");
case ambisonicX: return NEEDS_TRANS("Ambisonic X");
case ambisonicY: return NEEDS_TRANS("Ambisonic Y");
case ambisonicZ: return NEEDS_TRANS("Ambisonic Z");
case topSideLeft: return NEEDS_TRANS("Top Side Left");
case topSideRight: return NEEDS_TRANS("Top Side Right");
case ambisonicACN4: return NEEDS_TRANS("Ambisonic 4");
case ambisonicACN5: return NEEDS_TRANS("Ambisonic 5");
case ambisonicACN6: return NEEDS_TRANS("Ambisonic 6");
case ambisonicACN7: return NEEDS_TRANS("Ambisonic 7");
case ambisonicACN8: return NEEDS_TRANS("Ambisonic 8");
case ambisonicACN9: return NEEDS_TRANS("Ambisonic 9");
case ambisonicACN10: return NEEDS_TRANS("Ambisonic 10");
case ambisonicACN11: return NEEDS_TRANS("Ambisonic 11");
case ambisonicACN12: return NEEDS_TRANS("Ambisonic 12");
case ambisonicACN13: return NEEDS_TRANS("Ambisonic 13");
case ambisonicACN14: return NEEDS_TRANS("Ambisonic 14");
case ambisonicACN15: return NEEDS_TRANS("Ambisonic 15");
case ambisonicACN16: return NEEDS_TRANS("Ambisonic 16");
case ambisonicACN17: return NEEDS_TRANS("Ambisonic 17");
case ambisonicACN18: return NEEDS_TRANS("Ambisonic 18");
case ambisonicACN19: return NEEDS_TRANS("Ambisonic 19");
case ambisonicACN20: return NEEDS_TRANS("Ambisonic 20");
case ambisonicACN21: return NEEDS_TRANS("Ambisonic 21");
case ambisonicACN22: return NEEDS_TRANS("Ambisonic 22");
case ambisonicACN23: return NEEDS_TRANS("Ambisonic 23");
case ambisonicACN24: return NEEDS_TRANS("Ambisonic 24");
case ambisonicACN25: return NEEDS_TRANS("Ambisonic 25");
case ambisonicACN26: return NEEDS_TRANS("Ambisonic 26");
case ambisonicACN27: return NEEDS_TRANS("Ambisonic 27");
case ambisonicACN28: return NEEDS_TRANS("Ambisonic 28");
case ambisonicACN29: return NEEDS_TRANS("Ambisonic 29");
case ambisonicACN30: return NEEDS_TRANS("Ambisonic 30");
case ambisonicACN31: return NEEDS_TRANS("Ambisonic 31");
case ambisonicACN32: return NEEDS_TRANS("Ambisonic 32");
case ambisonicACN33: return NEEDS_TRANS("Ambisonic 33");
case ambisonicACN34: return NEEDS_TRANS("Ambisonic 34");
case ambisonicACN35: return NEEDS_TRANS("Ambisonic 35");
case bottomFrontLeft: return NEEDS_TRANS("Bottom Front Left");
case bottomFrontCentre: return NEEDS_TRANS("Bottom Front Centre");
case bottomFrontRight: return NEEDS_TRANS("Bottom Front Right");
case proximityLeft: return NEEDS_TRANS("Proximity Left");
case proximityRight: return NEEDS_TRANS("Proximity Right");
case bottomSideLeft: return NEEDS_TRANS("Bottom Side Left");
case bottomSideRight: return NEEDS_TRANS("Bottom Side Right");
case bottomRearLeft: return NEEDS_TRANS("Bottom Rear Left");
case bottomRearCentre: return NEEDS_TRANS("Bottom Rear Centre");
case bottomRearRight: return NEEDS_TRANS("Bottom Rear Right");
case discreteChannel0:
case unknown:
default: break;
}
return "Unknown";
}
String AudioChannelSet::getAbbreviatedChannelTypeName (AudioChannelSet::ChannelType type)
{
if (type >= discreteChannel0)
return String (type - discreteChannel0 + 1);
switch (type)
{
case left: return "L";
case right: return "R";
case centre: return "C";
case LFE: return "Lfe";
case leftSurround: return "Ls";
case rightSurround: return "Rs";
case leftCentre: return "Lc";
case rightCentre: return "Rc";
case centreSurround: return "Cs";
case leftSurroundRear: return "Lrs";
case rightSurroundRear: return "Rrs";
case topMiddle: return "Tm";
case topFrontLeft: return "Tfl";
case topFrontCentre: return "Tfc";
case topFrontRight: return "Tfr";
case topRearLeft: return "Trl";
case topRearCentre: return "Trc";
case topRearRight: return "Trr";
case wideLeft: return "Wl";
case wideRight: return "Wr";
case LFE2: return "Lfe2";
case leftSurroundSide: return "Lss";
case rightSurroundSide: return "Rss";
case ambisonicACN0: return "ACN0";
case ambisonicACN1: return "ACN1";
case ambisonicACN2: return "ACN2";
case ambisonicACN3: return "ACN3";
case ambisonicACN4: return "ACN4";
case ambisonicACN5: return "ACN5";
case ambisonicACN6: return "ACN6";
case ambisonicACN7: return "ACN7";
case ambisonicACN8: return "ACN8";
case ambisonicACN9: return "ACN9";
case ambisonicACN10: return "ACN10";
case ambisonicACN11: return "ACN11";
case ambisonicACN12: return "ACN12";
case ambisonicACN13: return "ACN13";
case ambisonicACN14: return "ACN14";
case ambisonicACN15: return "ACN15";
case ambisonicACN16: return "ACN16";
case ambisonicACN17: return "ACN17";
case ambisonicACN18: return "ACN18";
case ambisonicACN19: return "ACN19";
case ambisonicACN20: return "ACN20";
case ambisonicACN21: return "ACN21";
case ambisonicACN22: return "ACN22";
case ambisonicACN23: return "ACN23";
case ambisonicACN24: return "ACN24";
case ambisonicACN25: return "ACN25";
case ambisonicACN26: return "ACN26";
case ambisonicACN27: return "ACN27";
case ambisonicACN28: return "ACN28";
case ambisonicACN29: return "ACN29";
case ambisonicACN30: return "ACN30";
case ambisonicACN31: return "ACN31";
case ambisonicACN32: return "ACN32";
case ambisonicACN33: return "ACN33";
case ambisonicACN34: return "ACN34";
case ambisonicACN35: return "ACN35";
case topSideLeft: return "Tsl";
case topSideRight: return "Tsr";
case bottomFrontLeft: return "Bfl";
case bottomFrontCentre: return "Bfc";
case bottomFrontRight: return "Bfr";
case proximityLeft: return "Pl";
case proximityRight: return "Pr";
case bottomSideLeft: return "Bsl";
case bottomSideRight: return "Bsr";
case bottomRearLeft: return "Brl";
case bottomRearCentre: return "Brc";
case bottomRearRight: return "Brr";
case discreteChannel0:
case unknown:
default: break;
}
if (type >= ambisonicACN4 && type <= ambisonicACN35)
return "ACN" + String (type - ambisonicACN4 + 4);
return {};
}
AudioChannelSet::ChannelType AudioChannelSet::getChannelTypeFromAbbreviation (const String& abbr)
{
if (abbr.length() > 0 && (abbr[0] >= '0' && abbr[0] <= '9'))
return static_cast<AudioChannelSet::ChannelType> (static_cast<int> (discreteChannel0)
+ abbr.getIntValue() - 1);
if (abbr == "L") return left;
if (abbr == "R") return right;
if (abbr == "C") return centre;
if (abbr == "Lfe") return LFE;
if (abbr == "Ls") return leftSurround;
if (abbr == "Rs") return rightSurround;
if (abbr == "Lc") return leftCentre;
if (abbr == "Rc") return rightCentre;
if (abbr == "Cs") return centreSurround;
if (abbr == "Lrs") return leftSurroundRear;
if (abbr == "Rrs") return rightSurroundRear;
if (abbr == "Tm") return topMiddle;
if (abbr == "Tfl") return topFrontLeft;
if (abbr == "Tfc") return topFrontCentre;
if (abbr == "Tfr") return topFrontRight;
if (abbr == "Trl") return topRearLeft;
if (abbr == "Trc") return topRearCentre;
if (abbr == "Trr") return topRearRight;
if (abbr == "Wl") return wideLeft;
if (abbr == "Wr") return wideRight;
if (abbr == "Lfe2") return LFE2;
if (abbr == "Lss") return leftSurroundSide;
if (abbr == "Rss") return rightSurroundSide;
if (abbr == "W") return ambisonicW;
if (abbr == "X") return ambisonicX;
if (abbr == "Y") return ambisonicY;
if (abbr == "Z") return ambisonicZ;
if (abbr == "ACN0") return ambisonicACN0;
if (abbr == "ACN1") return ambisonicACN1;
if (abbr == "ACN2") return ambisonicACN2;
if (abbr == "ACN3") return ambisonicACN3;
if (abbr == "ACN4") return ambisonicACN4;
if (abbr == "ACN5") return ambisonicACN5;
if (abbr == "ACN6") return ambisonicACN6;
if (abbr == "ACN7") return ambisonicACN7;
if (abbr == "ACN8") return ambisonicACN8;
if (abbr == "ACN9") return ambisonicACN9;
if (abbr == "ACN10") return ambisonicACN10;
if (abbr == "ACN11") return ambisonicACN11;
if (abbr == "ACN12") return ambisonicACN12;
if (abbr == "ACN13") return ambisonicACN13;
if (abbr == "ACN14") return ambisonicACN14;
if (abbr == "ACN15") return ambisonicACN15;
if (abbr == "ACN16") return ambisonicACN16;
if (abbr == "ACN17") return ambisonicACN17;
if (abbr == "ACN18") return ambisonicACN18;
if (abbr == "ACN19") return ambisonicACN19;
if (abbr == "ACN20") return ambisonicACN20;
if (abbr == "ACN21") return ambisonicACN21;
if (abbr == "ACN22") return ambisonicACN22;
if (abbr == "ACN23") return ambisonicACN23;
if (abbr == "ACN24") return ambisonicACN24;
if (abbr == "ACN25") return ambisonicACN25;
if (abbr == "ACN26") return ambisonicACN26;
if (abbr == "ACN27") return ambisonicACN27;
if (abbr == "ACN28") return ambisonicACN28;
if (abbr == "ACN29") return ambisonicACN29;
if (abbr == "ACN30") return ambisonicACN30;
if (abbr == "ACN31") return ambisonicACN31;
if (abbr == "ACN32") return ambisonicACN32;
if (abbr == "ACN33") return ambisonicACN33;
if (abbr == "ACN34") return ambisonicACN34;
if (abbr == "ACN35") return ambisonicACN35;
if (abbr == "Tsl") return topSideLeft;
if (abbr == "Tsr") return topSideRight;
if (abbr == "Bfl") return bottomFrontLeft;
if (abbr == "Bfc") return bottomFrontCentre;
if (abbr == "Bfr") return bottomFrontRight;
if (abbr == "Bsl") return bottomSideLeft;
if (abbr == "Bsr") return bottomSideRight;
if (abbr == "Brl") return bottomRearLeft;
if (abbr == "Brc") return bottomRearCentre;
if (abbr == "Brr") return bottomRearRight;
return unknown;
}
String AudioChannelSet::getSpeakerArrangementAsString() const
{
StringArray speakerTypes;
for (auto& speaker : getChannelTypes())
{
auto name = getAbbreviatedChannelTypeName (speaker);
if (name.isNotEmpty())
speakerTypes.add (name);
}
return speakerTypes.joinIntoString (" ");
}
AudioChannelSet AudioChannelSet::fromAbbreviatedString (const String& str)
{
AudioChannelSet set;
for (auto& abbr : StringArray::fromTokens (str, true))
{
auto type = getChannelTypeFromAbbreviation (abbr);
if (type != unknown)
set.addChannel (type);
}
return set;
}
String AudioChannelSet::getDescription() const
{
if (isDiscreteLayout()) return "Discrete #" + String (size());
if (*this == disabled()) return "Disabled";
if (*this == mono()) return "Mono";
if (*this == stereo()) return "Stereo";
if (*this == createLCR()) return "LCR";
if (*this == createLRS()) return "LRS";
if (*this == createLCRS()) return "LCRS";
if (*this == create5point0()) return "5.0 Surround";
if (*this == create5point1()) return "5.1 Surround";
if (*this == create5point1point2()) return "5.1.2 Surround";
if (*this == create5point1point4()) return "5.1.4 Surround";
if (*this == create6point0()) return "6.0 Surround";
if (*this == create6point1()) return "6.1 Surround";
if (*this == create6point0Music()) return "6.0 (Music) Surround";
if (*this == create6point1Music()) return "6.1 (Music) Surround";
if (*this == create7point0()) return "7.0 Surround";
if (*this == create7point1()) return "7.1 Surround";
if (*this == create7point0SDDS()) return "7.0 Surround SDDS";
if (*this == create7point1SDDS()) return "7.1 Surround SDDS";
if (*this == create7point0point2()) return "7.0.2 Surround";
if (*this == create7point0point4()) return "7.0.4 Surround";
if (*this == create7point1point2()) return "7.1.2 Surround";
if (*this == create7point1point4()) return "7.1.4 Surround";
if (*this == create7point1point6()) return "7.1.6 Surround";
if (*this == create9point1point6()) return "9.1.6 Surround";
if (*this == quadraphonic()) return "Quadraphonic";
if (*this == pentagonal()) return "Pentagonal";
if (*this == hexagonal()) return "Hexagonal";
if (*this == octagonal()) return "Octagonal";
// ambisonics
{
auto order = getAmbisonicOrder();
if (order >= 0)
{
String suffix;
switch (order)
{
case 1: suffix = "st"; break;
case 2: suffix = "nd"; break;
case 3: suffix = "rd"; break;
default: suffix = "th"; break;
}
return String (order) + suffix + " Order Ambisonics";
}
}
return "Unknown";
}
bool AudioChannelSet::isDiscreteLayout() const noexcept
{
for (auto& speaker : getChannelTypes())
if (speaker <= ambisonicACN35)
return false;
return true;
}
int AudioChannelSet::size() const noexcept
{
return channels.countNumberOfSetBits();
}
AudioChannelSet::ChannelType AudioChannelSet::getTypeOfChannel (int index) const noexcept
{
int bit = channels.findNextSetBit(0);
for (int i = 0; i < index && bit >= 0; ++i)
bit = channels.findNextSetBit (bit + 1);
return static_cast<ChannelType> (bit);
}
int AudioChannelSet::getChannelIndexForType (AudioChannelSet::ChannelType type) const noexcept
{
int idx = 0;
for (int bit = channels.findNextSetBit (0); bit >= 0; bit = channels.findNextSetBit (bit + 1))
{
if (static_cast<ChannelType> (bit) == type)
return idx;
idx++;
}
return -1;
}
Array<AudioChannelSet::ChannelType> AudioChannelSet::getChannelTypes() const
{
Array<ChannelType> result;
for (int bit = channels.findNextSetBit(0); bit >= 0; bit = channels.findNextSetBit (bit + 1))
result.add (static_cast<ChannelType> (bit));
return result;
}
void AudioChannelSet::addChannel (ChannelType newChannel)
{
const int bit = static_cast<int> (newChannel);
jassert (bit >= 0 && bit < 1024);
channels.setBit (bit);
}
void AudioChannelSet::removeChannel (ChannelType newChannel)
{
const int bit = static_cast<int> (newChannel);
jassert (bit >= 0 && bit < 1024);
channels.clearBit (bit);
}
AudioChannelSet AudioChannelSet::disabled() { return {}; }
AudioChannelSet AudioChannelSet::mono() { return AudioChannelSet ({ centre }); }
AudioChannelSet AudioChannelSet::stereo() { return AudioChannelSet ({ left, right }); }
AudioChannelSet AudioChannelSet::createLCR() { return AudioChannelSet ({ left, right, centre }); }
AudioChannelSet AudioChannelSet::createLRS() { return AudioChannelSet ({ left, right, surround }); }
AudioChannelSet AudioChannelSet::createLCRS() { return AudioChannelSet ({ left, right, centre, surround }); }
AudioChannelSet AudioChannelSet::create5point0() { return AudioChannelSet ({ left, right, centre, leftSurround, rightSurround }); }
AudioChannelSet AudioChannelSet::create5point1() { return AudioChannelSet ({ left, right, centre, LFE, leftSurround, rightSurround }); }
AudioChannelSet AudioChannelSet::create6point0() { return AudioChannelSet ({ left, right, centre, leftSurround, rightSurround, centreSurround }); }
AudioChannelSet AudioChannelSet::create6point1() { return AudioChannelSet ({ left, right, centre, LFE, leftSurround, rightSurround, centreSurround }); }
AudioChannelSet AudioChannelSet::create6point0Music() { return AudioChannelSet ({ left, right, leftSurround, rightSurround, leftSurroundSide, rightSurroundSide }); }
AudioChannelSet AudioChannelSet::create6point1Music() { return AudioChannelSet ({ left, right, LFE, leftSurround, rightSurround, leftSurroundSide, rightSurroundSide }); }
AudioChannelSet AudioChannelSet::create7point0() { return AudioChannelSet ({ left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear }); }
AudioChannelSet AudioChannelSet::create7point0SDDS() { return AudioChannelSet ({ left, right, centre, leftSurround, rightSurround, leftCentre, rightCentre }); }
AudioChannelSet AudioChannelSet::create7point1() { return AudioChannelSet ({ left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear }); }
AudioChannelSet AudioChannelSet::create7point1SDDS() { return AudioChannelSet ({ left, right, centre, LFE, leftSurround, rightSurround, leftCentre, rightCentre }); }
AudioChannelSet AudioChannelSet::quadraphonic() { return AudioChannelSet ({ left, right, leftSurround, rightSurround }); }
AudioChannelSet AudioChannelSet::pentagonal() { return AudioChannelSet ({ left, right, centre, leftSurroundRear, rightSurroundRear }); }
AudioChannelSet AudioChannelSet::hexagonal() { return AudioChannelSet ({ left, right, centre, centreSurround, leftSurroundRear, rightSurroundRear }); }
AudioChannelSet AudioChannelSet::octagonal() { return AudioChannelSet ({ left, right, centre, leftSurround, rightSurround, centreSurround, wideLeft, wideRight }); }
AudioChannelSet AudioChannelSet::create5point1point2() { return AudioChannelSet ({ left, right, centre, LFE, leftSurround, rightSurround, topSideLeft, topSideRight }); }
AudioChannelSet AudioChannelSet::create5point1point4() { return AudioChannelSet ({ left, right, centre, LFE, leftSurround, rightSurround, topFrontLeft, topFrontRight, topRearLeft, topRearRight }); }
AudioChannelSet AudioChannelSet::create7point0point2() { return AudioChannelSet ({ left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topSideLeft, topSideRight }); }
AudioChannelSet AudioChannelSet::create7point1point2() { return AudioChannelSet ({ left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topSideLeft, topSideRight }); }
AudioChannelSet AudioChannelSet::create7point0point4() { return AudioChannelSet ({ left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topFrontLeft, topFrontRight, topRearLeft, topRearRight }); }
AudioChannelSet AudioChannelSet::create7point1point4() { return AudioChannelSet ({ left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topFrontLeft, topFrontRight, topRearLeft, topRearRight }); }
AudioChannelSet AudioChannelSet::create7point1point6() { return AudioChannelSet ({ left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topFrontLeft, topFrontRight, topSideLeft, topSideRight, topRearLeft, topRearRight }); }
AudioChannelSet AudioChannelSet::create9point1point6() { return AudioChannelSet ({ left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, wideLeft, wideRight, topFrontLeft, topFrontRight, topSideLeft, topSideRight, topRearLeft, topRearRight }); }
AudioChannelSet AudioChannelSet::ambisonic (int order)
{
jassert (isPositiveAndBelow (order, 6));
if (order == 0)
return AudioChannelSet ((uint32) (1 << ambisonicACN0));
AudioChannelSet set ((1u << ambisonicACN0) | (1u << ambisonicACN1) | (1u << ambisonicACN2) | (1u << ambisonicACN3));
auto numAmbisonicChannels = (order + 1) * (order + 1);
set.channels.setRange (ambisonicACN4, numAmbisonicChannels - 4, true);
return set;
}
int AudioChannelSet::getAmbisonicOrder() const
{
auto ambisonicOrder = getAmbisonicOrderForNumChannels (size());
if (ambisonicOrder >= 0)
return (*this == ambisonic (ambisonicOrder) ? ambisonicOrder : -1);
return -1;
}
AudioChannelSet AudioChannelSet::discreteChannels (int numChannels)
{
AudioChannelSet s;
s.channels.setRange (discreteChannel0, numChannels, true);
return s;
}
AudioChannelSet AudioChannelSet::canonicalChannelSet (int numChannels)
{
if (numChannels == 1) return AudioChannelSet::mono();
if (numChannels == 2) return AudioChannelSet::stereo();
if (numChannels == 3) return AudioChannelSet::createLCR();
if (numChannels == 4) return AudioChannelSet::quadraphonic();
if (numChannels == 5) return AudioChannelSet::create5point0();
if (numChannels == 6) return AudioChannelSet::create5point1();
if (numChannels == 7) return AudioChannelSet::create7point0();
if (numChannels == 8) return AudioChannelSet::create7point1();
return discreteChannels (numChannels);
}
AudioChannelSet AudioChannelSet::namedChannelSet (int numChannels)
{
if (numChannels == 1) return AudioChannelSet::mono();
if (numChannels == 2) return AudioChannelSet::stereo();
if (numChannels == 3) return AudioChannelSet::createLCR();
if (numChannels == 4) return AudioChannelSet::quadraphonic();
if (numChannels == 5) return AudioChannelSet::create5point0();
if (numChannels == 6) return AudioChannelSet::create5point1();
if (numChannels == 7) return AudioChannelSet::create7point0();
if (numChannels == 8) return AudioChannelSet::create7point1();
return {};
}
Array<AudioChannelSet> AudioChannelSet::channelSetsWithNumberOfChannels (int numChannels)
{
Array<AudioChannelSet> retval;
if (numChannels != 0)
{
retval.add (AudioChannelSet::discreteChannels (numChannels));
retval.addArray ([numChannels]() -> Array<AudioChannelSet>
{
switch (numChannels)
{
case 1:
return { AudioChannelSet::mono() };
case 2:
return { AudioChannelSet::stereo() };
case 3:
return { AudioChannelSet::createLCR(),
AudioChannelSet::createLRS() };
case 4:
return { AudioChannelSet::quadraphonic(),
AudioChannelSet::createLCRS() };
case 5:
return { AudioChannelSet::create5point0(),
AudioChannelSet::pentagonal() };
case 6:
return { AudioChannelSet::create5point1(),
AudioChannelSet::create6point0(),
AudioChannelSet::create6point0Music(),
AudioChannelSet::hexagonal() };
case 7:
return { AudioChannelSet::create7point0(),
AudioChannelSet::create7point0SDDS(),
AudioChannelSet::create6point1(),
AudioChannelSet::create6point1Music() };
case 8:
return { AudioChannelSet::create7point1(),
AudioChannelSet::create7point1SDDS(),
AudioChannelSet::octagonal(),
AudioChannelSet::create5point1point2() };
case 9:
return { AudioChannelSet::create7point0point2() };
case 10:
return { AudioChannelSet::create5point1point4(),
AudioChannelSet::create7point1point2() };
case 11:
return { AudioChannelSet::create7point0point4() };
case 12:
return { AudioChannelSet::create7point1point4() };
case 14:
return { AudioChannelSet::create7point1point6() };
case 16:
return { AudioChannelSet::create9point1point6() };
}
return {};
}());
auto order = getAmbisonicOrderForNumChannels (numChannels);
if (order >= 0)
retval.add (AudioChannelSet::ambisonic (order));
}
return retval;
}
AudioChannelSet JUCE_CALLTYPE AudioChannelSet::channelSetWithChannels (const Array<ChannelType>& channelArray)
{
AudioChannelSet set;
for (auto ch : channelArray)
{
jassert (! set.channels[static_cast<int> (ch)]);
set.addChannel (ch);
}
return set;
}
//==============================================================================
AudioChannelSet JUCE_CALLTYPE AudioChannelSet::fromWaveChannelMask (int32 dwChannelMask)
{
return AudioChannelSet (static_cast<uint32> ((dwChannelMask & ((1 << 18) - 1)) << 1));
}
int32 AudioChannelSet::getWaveChannelMask() const noexcept
{
if (channels.getHighestBit() > topRearRight)
return -1;
return (channels.toInteger() >> 1);
}
//==============================================================================
int JUCE_CALLTYPE AudioChannelSet::getAmbisonicOrderForNumChannels (int numChannels)
{
auto sqrtMinusOne = std::sqrt (static_cast<float> (numChannels)) - 1.0f;
auto ambisonicOrder = jmax (0, static_cast<int> (std::floor (sqrtMinusOne)));
if (ambisonicOrder > 5)
return -1;
return (static_cast<float> (ambisonicOrder) == sqrtMinusOne ? ambisonicOrder : -1);
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class AudioChannelSetUnitTest : public UnitTest
{
public:
AudioChannelSetUnitTest()
: UnitTest ("AudioChannelSetUnitTest", UnitTestCategories::audio)
{}
void runTest() override
{
auto max = AudioChannelSet::maxChannelsOfNamedLayout;
beginTest ("maxChannelsOfNamedLayout is non-discrete");
expect (AudioChannelSet::channelSetsWithNumberOfChannels (max).size() >= 2);
beginTest ("channelSetsWithNumberOfChannels returns correct speaker count");
{
for (auto ch = 1; ch <= max; ++ch)
{
auto channelSets = AudioChannelSet::channelSetsWithNumberOfChannels (ch);
for (auto set : channelSets)
expect (set.size() == ch);
}
}
beginTest ("Ambisonics");
{
uint64 mask = 0;
mask |= (1ull << AudioChannelSet::ambisonicACN0);
checkAmbisonic (mask, 0, "0th Order Ambisonics");
mask |= (1ull << AudioChannelSet::ambisonicACN1) | (1ull << AudioChannelSet::ambisonicACN2) | (1ull << AudioChannelSet::ambisonicACN3);
checkAmbisonic (mask, 1, "1st Order Ambisonics");
mask |= (1ull << AudioChannelSet::ambisonicACN4) | (1ull << AudioChannelSet::ambisonicACN5) | (1ull << AudioChannelSet::ambisonicACN6)
| (1ull << AudioChannelSet::ambisonicACN7) | (1ull << AudioChannelSet::ambisonicACN8);
checkAmbisonic (mask, 2, "2nd Order Ambisonics");
mask |= (1ull << AudioChannelSet::ambisonicACN9) | (1ull << AudioChannelSet::ambisonicACN10) | (1ull << AudioChannelSet::ambisonicACN11)
| (1ull << AudioChannelSet::ambisonicACN12) | (1ull << AudioChannelSet::ambisonicACN13) | (1ull << AudioChannelSet::ambisonicACN14)
| (1ull << AudioChannelSet::ambisonicACN15);
checkAmbisonic (mask, 3, "3rd Order Ambisonics");
mask |= (1ull << AudioChannelSet::ambisonicACN16) | (1ull << AudioChannelSet::ambisonicACN17) | (1ull << AudioChannelSet::ambisonicACN18)
| (1ull << AudioChannelSet::ambisonicACN19) | (1ull << AudioChannelSet::ambisonicACN20) | (1ull << AudioChannelSet::ambisonicACN21)
| (1ull << AudioChannelSet::ambisonicACN22) | (1ull << AudioChannelSet::ambisonicACN23) | (1ull << AudioChannelSet::ambisonicACN24);
checkAmbisonic (mask, 4, "4th Order Ambisonics");
mask |= (1ull << AudioChannelSet::ambisonicACN25) | (1ull << AudioChannelSet::ambisonicACN26) | (1ull << AudioChannelSet::ambisonicACN27)
| (1ull << AudioChannelSet::ambisonicACN28) | (1ull << AudioChannelSet::ambisonicACN29) | (1ull << AudioChannelSet::ambisonicACN30)
| (1ull << AudioChannelSet::ambisonicACN31) | (1ull << AudioChannelSet::ambisonicACN32) | (1ull << AudioChannelSet::ambisonicACN33)
| (1ull << AudioChannelSet::ambisonicACN34) | (1ull << AudioChannelSet::ambisonicACN35);
checkAmbisonic (mask, 5, "5th Order Ambisonics");
}
}
private:
void checkAmbisonic (uint64 mask, int order, const char* layoutName)
{
auto expected = AudioChannelSet::ambisonic (order);
auto numChannels = expected.size();
expect (numChannels == BigInteger ((int64) mask).countNumberOfSetBits());
expect (channelSetFromMask (mask) == expected);
expect (order == expected.getAmbisonicOrder());
expect (expected.getDescription() == layoutName);
auto layouts = AudioChannelSet::channelSetsWithNumberOfChannels (numChannels);
expect (layouts.contains (expected));
for (auto layout : layouts)
expect (layout.getAmbisonicOrder() == (layout == expected ? order : -1));
}
static AudioChannelSet channelSetFromMask (uint64 mask)
{
Array<AudioChannelSet::ChannelType> channels;
for (int bit = 0; bit <= 62; ++bit)
if ((mask & (1ull << bit)) != 0)
channels.add (static_cast<AudioChannelSet::ChannelType> (bit));
return AudioChannelSet::channelSetWithChannels (channels);
}
};
static AudioChannelSetUnitTest audioChannelSetUnitTest;
#endif
} // namespace juce

+ 0
- 525
libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioChannelSet.h View File

@@ -1,525 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Represents a set of audio channel types.
For example, you might have a set of left + right channels, which is a stereo
channel set. It is a collection of values from the AudioChannelSet::ChannelType
enum, where each type may only occur once within the set.
The documentation below lists which AudioChannelSet corresponds to which native
layouts used by AAX, VST2/VST3 and CoreAudio/AU. The layout tags in CoreAudio
are particularly confusing. For example, the layout which is labeled as "7.1 SDDS"
in Logic Pro, corresponds to CoreAudio/AU's kAudioChannelLayoutTag_DTS_7_0 tag, whereas
AAX's DTS 7.1 Layout corresponds to CoreAudio/AU's
kAudioChannelLayoutTag_MPEG_7_1_A format, etc. Please do not use the CoreAudio tag
as an indication to the actual layout of the speakers.
@see Bus
@tags{Audio}
*/
class JUCE_API AudioChannelSet
{
public:
/** Creates an empty channel set.
You can call addChannel to add channels to the set.
*/
AudioChannelSet() = default;
/** Creates a zero-channel set which can be used to indicate that a
bus is disabled. */
static AudioChannelSet JUCE_CALLTYPE disabled();
//==============================================================================
/** Creates a one-channel mono set (centre).
Is equivalent to: kMonoAAX (VST), AAX_eStemFormat_Mono (AAX), kAudioChannelLayoutTag_Mono (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE mono();
/** Creates a set containing a stereo set (left, right).
Is equivalent to: kStereo (VST), AAX_eStemFormat_Stereo (AAX), kAudioChannelLayoutTag_Stereo (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE stereo();
//==============================================================================
/** Creates a set containing an LCR set (left, right, centre).
Is equivalent to: k30Cine (VST), AAX_eStemFormat_LCR (AAX), kAudioChannelLayoutTag_MPEG_3_0_A (CoreAudio)
This format is referred to as "LRC" in Cubase.
This format is referred to as "LCR" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE createLCR();
/** Creates a set containing an LRS set (left, right, surround).
Is equivalent to: k30Music (VST), n/a (AAX), kAudioChannelLayoutTag_ITU_2_1 (CoreAudio)
This format is referred to as "LRS" in Cubase.
*/
static AudioChannelSet JUCE_CALLTYPE createLRS();
/** Creates a set containing an LCRS set (left, right, centre, surround).
Is equivalent to: k40Cine (VST), AAX_eStemFormat_LCRS (AAX), kAudioChannelLayoutTag_MPEG_4_0_A (CoreAudio)
This format is referred to as "LCRS (Pro Logic)" in Logic Pro.
This format is referred to as "LRCS" in Cubase.
This format is referred to as "LCRS" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE createLCRS();
//==============================================================================
/** Creates a set for a 5.0 surround setup (left, right, centre, leftSurround, rightSurround).
Is equivalent to: k50 (VST), AAX_eStemFormat_5_0 (AAX), kAudioChannelLayoutTag_MPEG_5_0_A (CoreAudio)
This format is referred to as "5.0" in Cubase.
This format is referred to as "5.0" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create5point0();
/** Creates a set for a 5.1 surround setup (left, right, centre, leftSurround, rightSurround, LFE).
Is equivalent to: k51 (VST), AAX_eStemFormat_5_1 (AAX), kAudioChannelLayoutTag_MPEG_5_1_A (CoreAudio)
This format is referred to as "5.1 (ITU 775)" in Logic Pro.
This format is referred to as "5.1" in Cubase.
This format is referred to as "5.1" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create5point1();
/** Creates a set for a 6.0 Cine surround setup (left, right, centre, leftSurround, rightSurround, centreSurround).
Is equivalent to: k60Cine (VST), AAX_eStemFormat_6_0 (AAX), kAudioChannelLayoutTag_AudioUnit_6_0 (CoreAudio)
Logic Pro incorrectly uses this for the surround format labeled "6.1 (ES/EX)".
This format is referred to as "6.0 Cine" in Cubase.
This format is referred to as "6.0" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create6point0();
/** Creates a set for a 6.1 Cine surround setup (left, right, centre, leftSurround, rightSurround, centreSurround, LFE).
Is equivalent to: k61Cine (VST), AAX_eStemFormat_6_1 (AAX), kAudioChannelLayoutTag_MPEG_6_1_A (CoreAudio)
This format is referred to as "6.1" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create6point1();
/** Creates a set for a 6.0 Music surround setup (left, right, leftSurround, rightSurround, leftSurroundSide, rightSurroundSide).
Is equivalent to: k60Music (VST), n/a (AAX), kAudioChannelLayoutTag_DTS_6_0_A (CoreAudio)
This format is referred to as "6.0 Music" in Cubase.
*/
static AudioChannelSet JUCE_CALLTYPE create6point0Music();
/** Creates a set for a 6.0 Music surround setup (left, right, leftSurround, rightSurround, leftSurroundSide, rightSurroundSide, LFE).
Is equivalent to: k61Music (VST), n/a (AAX), kAudioChannelLayoutTag_DTS_6_1_A (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE create6point1Music();
/** Creates a set for a DTS 7.0 surround setup (left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear).
Is equivalent to: k70Music (VST), AAX_eStemFormat_7_0_DTS (AAX), kAudioChannelLayoutTag_AudioUnit_7_0 (CoreAudio)
This format is referred to as "7.0" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create7point0();
/** Creates a set for a SDDS 7.0 surround setup (left, right, centre, leftSurround, rightSurround, leftCentre, rightCentre).
Is equivalent to: k70Cine (VST), AAX_eStemFormat_7_0_SDDS (AAX), kAudioChannelLayoutTag_AudioUnit_7_0_Front (CoreAudio)
This format is referred to as "7.0 SDDS" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create7point0SDDS();
/** Creates a set for a DTS 7.1 surround setup (left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, LFE).
Is equivalent to: k71CineSideFill (VST), AAX_eStemFormat_7_1_DTS (AAX), kAudioChannelLayoutTag_MPEG_7_1_C/kAudioChannelLayoutTag_ITU_3_4_1 (CoreAudio)
This format is referred to as "7.1 (3/4.1)" in Logic Pro.
This format is referred to as "7.1" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create7point1();
/** Creates a set for a 7.1 surround setup (left, right, centre, leftSurround, rightSurround, leftCentre, rightCentre, LFE).
Is equivalent to: k71Cine (VST), AAX_eStemFormat_7_1_SDDS (AAX), kAudioChannelLayoutTag_MPEG_7_1_A (CoreAudio)
This format is referred to as "7.1 (SDDS)" in Logic Pro.
This format is referred to as "7.1 SDDS" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE create7point1SDDS();
/** Creates a set for a 5.1.2 surround setup (left, right, centre, LFE, leftSurround, rightSurround, topSideLeft, topSideRight).
Is equivalent to: kAudioChannelLayoutTag_Atmos_5_1_2 (CoreAudio).
*/
static AudioChannelSet JUCE_CALLTYPE create5point1point2();
/** Creates a set for a 5.1.4 surround setup (left, right, centre, LFE, leftSurround, rightSurround, topFrontLeft, topFrontRight, topRearLeft, topRearRight).
Is equivalent to: kAudioChannelLayoutTag_Atmos_5_1_4 (CoreAudio).
*/
static AudioChannelSet JUCE_CALLTYPE create5point1point4();
/** Creates a set for Dolby Atmos 7.0.2 surround setup (left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topSideLeft, topSideRight).
Is equivalent to: n/a (VST), AAX_eStemFormat_7_0_2 (AAX), n/a (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE create7point0point2();
/** Creates a set for Dolby Atmos 7.1.2 surround setup (left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, LFE, topSideLeft, topSideRight).
Is equivalent to: k71_2 (VST), AAX_eStemFormat_7_1_2 (AAX), kAudioChannelLayoutTag_Atmos_7_1_2 (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE create7point1point2();
/** Creates a set for Dolby Atmos 7.0.4 surround setup (left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topFrontLeft, topFrontRight, topRearLeft, topRearRight).
Is equivalent to: n/a (VST), n/a (AAX), n/a (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE create7point0point4();
/** Creates a set for Dolby Atmos 7.1.4 surround setup (left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, LFE, topFrontLeft, topFrontRight, topRearLeft, topRearRight).
Is equivalent to: k71_4 (VST), n/a (AAX), kAudioChannelLayoutTag_Atmos_7_1_4 (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE create7point1point4();
/** Creates a set for Dolby Atmos 7.1.6 surround setup (left, right, centre, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, LFE, topFrontLeft, topFrontRight, topSideLeft, topSideRight, topRearLeft, topRearRight).
Is equivalent to: k71_6 (VST), n/a (AAX), n/a (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE create7point1point6();
/** Creates a set for a 9.1.6 surround setup (left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, wideLeft, wideRight, topFrontLeft, topFrontRight, topSideLeft, topSideRight, topRearLeft, topRearRight).
Note that the VST3 layout arranges the front speakers "L Lc C Rc R", but the JUCE layout
uses the arrangement "wideLeft left centre right wideRight". To maintain the relative
positions of the speakers, the channels will be remapped accordingly. This means that the
VST3 host's "L" channel will be received on a JUCE plugin's "wideLeft" channel, the
"Lc" channel will be received on a JUCE plugin's "left" channel, and so on.
Is equivalent to: k91_6 (VST3), kAudioChannelLayoutTag_Atmos_9_1_6 (CoreAudio).
*/
static AudioChannelSet JUCE_CALLTYPE create9point1point6();
//==============================================================================
/** Creates a set for quadraphonic surround setup (left, right, leftSurround, rightSurround)
Is equivalent to: k40Music (VST), AAX_eStemFormat_Quad (AAX), kAudioChannelLayoutTag_Quadraphonic (CoreAudio)
This format is referred to as "Quadraphonic" in Logic Pro.
This format is referred to as "Quadro" in Cubase.
This format is referred to as "Quad" in Pro Tools.
*/
static AudioChannelSet JUCE_CALLTYPE quadraphonic();
/** Creates a set for pentagonal surround setup (left, right, centre, leftSurroundRear, rightSurroundRear).
Is equivalent to: n/a (VST), n/a (AAX), kAudioChannelLayoutTag_Pentagonal (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE pentagonal();
/** Creates a set for hexagonal surround setup (left, right, leftSurroundRear, rightSurroundRear, centre, surroundCentre).
Is equivalent to: n/a (VST), n/a (AAX), kAudioChannelLayoutTag_Hexagonal (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE hexagonal();
/** Creates a set for octagonal surround setup (left, right, leftSurround, rightSurround, centre, centreSurround, wideLeft, wideRight).
Is equivalent to: n/a (VST), n/a (AAX), kAudioChannelLayoutTag_Octagonal (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE octagonal();
//==============================================================================
/** Creates a set for ACN, SN3D normalised ambisonic surround setups with a given order.
Is equivalent to: kAmbiXXXOrderACN (VST), AAX_eStemFormat_Ambi_XXX_ACN (AAX), kAudioChannelLayoutTag_HOA_ACN_SN3D (CoreAudio)
*/
static AudioChannelSet JUCE_CALLTYPE ambisonic (int order = 1);
/** Returns the order of the ambisonic layout represented by this AudioChannelSet. If the
AudioChannelSet is not an ambisonic layout, then this method will return -1.
*/
int getAmbisonicOrder() const;
//==============================================================================
/** Creates a set of untyped discrete channels. */
static AudioChannelSet JUCE_CALLTYPE discreteChannels (int numChannels);
/** Create a canonical channel set for a given number of channels.
For example, numChannels = 1 will return mono, numChannels = 2 will return stereo, etc. */
static AudioChannelSet JUCE_CALLTYPE canonicalChannelSet (int numChannels);
/** Create a channel set for a given number of channels which is non-discrete.
If numChannels is larger than the number of channels of the surround format
with the maximum amount of channels (currently 7.1 Surround), then this
function returns an empty set.*/
static AudioChannelSet JUCE_CALLTYPE namedChannelSet (int numChannels);
/** Return an array of channel sets which have a given number of channels */
static Array<AudioChannelSet> JUCE_CALLTYPE channelSetsWithNumberOfChannels (int numChannels);
//==============================================================================
/** Represents different audio channel types. */
enum ChannelType
{
unknown = 0, /**< Unknown channel type. */
//==============================================================================
left = 1, /**< L channel. */
right = 2, /**< R channel. */
centre = 3, /**< C channel. (Sometimes M for mono) */
//==============================================================================
LFE = 4, /**< LFE channel. */
leftSurround = 5, /**< Ls channel. */
rightSurround = 6, /**< Rs channel. */
leftCentre = 7, /**< Lc (AAX/VST), Lc used as Lss in AU for most layouts. */
rightCentre = 8, /**< Rc (AAX/VST), Rc used as Rss in AU for most layouts. */
centreSurround = 9, /**< Cs/S channel. */
surround = centreSurround, /**< Same as Centre Surround channel. */
leftSurroundSide = 10, /**< Lss (AXX), Side Left "Sl" (VST), Left Centre "LC" (AU) channel. */
rightSurroundSide = 11, /**< Rss (AXX), Side right "Sr" (VST), Right Centre "Rc" (AU) channel. */
topMiddle = 12, /**< Top Middle channel. */
topFrontLeft = 13, /**< Top Front Left channel. */
topFrontCentre = 14, /**< Top Front Centre channel. */
topFrontRight = 15, /**< Top Front Right channel. */
topRearLeft = 16, /**< Top Rear Left channel. */
topRearCentre = 17, /**< Top Rear Centre channel. */
topRearRight = 18, /**< Top Rear Right channel. */
LFE2 = 19, /**< Second LFE channel. */
leftSurroundRear = 20, /**< Lsr (AAX), Lcs (VST), Rls (AU) channel. */
rightSurroundRear = 21, /**< Rsr (AAX), Rcs (VST), Rrs (AU) channel. */
wideLeft = 22, /**< Wide Left channel. */
wideRight = 23, /**< Wide Right channel. */
//==============================================================================
// Used by Dolby Atmos 7.0.2 and 7.1.2
topSideLeft = 28, /**< Lts (AAX), Tsl (VST), Ltm (AU) channel for Dolby Atmos. */
topSideRight = 29, /**< Rts (AAX), Tsr (VST), Rtm (AU) channel for Dolby Atmos. */
//==============================================================================
// Ambisonic ACN formats - all channels are SN3D normalised
// zero-th and first-order ambisonic ACN
ambisonicACN0 = 24, /**< Zero-th ambisonic channel number 0. */
ambisonicACN1 = 25, /**< First-order ambisonic channel number 1. */
ambisonicACN2 = 26, /**< First-order ambisonic channel number 2. */
ambisonicACN3 = 27, /**< First-order ambisonic channel number 3. */
// second-order ambisonic
ambisonicACN4 = 30, /**< Second-order ambisonic channel number 4. */
ambisonicACN5 = 31, /**< Second-order ambisonic channel number 5. */
ambisonicACN6 = 32, /**< Second-order ambisonic channel number 6. */
ambisonicACN7 = 33, /**< Second-order ambisonic channel number 7. */
ambisonicACN8 = 34, /**< Second-order ambisonic channel number 8. */
// third-order ambisonic
ambisonicACN9 = 35, /**< Third-order ambisonic channel number 9. */
ambisonicACN10 = 36, /**< Third-order ambisonic channel number 10. */
ambisonicACN11 = 37, /**< Third-order ambisonic channel number 11. */
ambisonicACN12 = 38, /**< Third-order ambisonic channel number 12. */
ambisonicACN13 = 39, /**< Third-order ambisonic channel number 13. */
ambisonicACN14 = 40, /**< Third-order ambisonic channel number 14. */
ambisonicACN15 = 41, /**< Third-order ambisonic channel number 15. */
// fourth-order ambisonic
ambisonicACN16 = 42, /**< Fourth-order ambisonic channel number 16. */
ambisonicACN17 = 43, /**< Fourth-order ambisonic channel number 17. */
ambisonicACN18 = 44, /**< Fourth-order ambisonic channel number 18. */
ambisonicACN19 = 45, /**< Fourth-order ambisonic channel number 19. */
ambisonicACN20 = 46, /**< Fourth-order ambisonic channel number 20. */
ambisonicACN21 = 47, /**< Fourth-order ambisonic channel number 21. */
ambisonicACN22 = 48, /**< Fourth-order ambisonic channel number 22. */
ambisonicACN23 = 49, /**< Fourth-order ambisonic channel number 23. */
ambisonicACN24 = 50, /**< Fourth-order ambisonic channel number 24. */
// fifth-order ambisonic
ambisonicACN25 = 51, /**< Fifth-order ambisonic channel number 25. */
ambisonicACN26 = 52, /**< Fifth-order ambisonic channel number 26. */
ambisonicACN27 = 53, /**< Fifth-order ambisonic channel number 27. */
ambisonicACN28 = 54, /**< Fifth-order ambisonic channel number 28. */
ambisonicACN29 = 55, /**< Fifth-order ambisonic channel number 29. */
ambisonicACN30 = 56, /**< Fifth-order ambisonic channel number 30. */
ambisonicACN31 = 57, /**< Fifth-order ambisonic channel number 31. */
ambisonicACN32 = 58, /**< Fifth-order ambisonic channel number 32. */
ambisonicACN33 = 59, /**< Fifth-order ambisonic channel number 33. */
ambisonicACN34 = 60, /**< Fifth-order ambisonic channel number 34. */
ambisonicACN35 = 61, /**< Fifth-order ambisonic channel number 35. */
//==============================================================================
ambisonicW = ambisonicACN0, /**< Same as zero-th ambisonic channel number 0. */
ambisonicX = ambisonicACN3, /**< Same as first-order ambisonic channel number 3. */
ambisonicY = ambisonicACN1, /**< Same as first-order ambisonic channel number 1. */
ambisonicZ = ambisonicACN2, /**< Same as first-order ambisonic channel number 2. */
//==============================================================================
bottomFrontLeft = 62, /**< Bottom Front Left (Bfl) */
bottomFrontCentre = 63, /**< Bottom Front Centre (Bfc) */
bottomFrontRight = 64, /**< Bottom Front Right (Bfr) */
proximityLeft = 65, /**< Proximity Left (Pl) */
proximityRight = 66, /**< Proximity Right (Pr) */
bottomSideLeft = 67, /**< Bottom Side Left (Bsl) */
bottomSideRight = 68, /**< Bottom Side Right (Bsr) */
bottomRearLeft = 69, /**< Bottom Rear Left (Brl) */
bottomRearCentre = 70, /**< Bottom Rear Center (Brc) */
bottomRearRight = 71, /**< Bottom Rear Right (Brr) */
//==============================================================================
discreteChannel0 = 128 /**< Non-typed individual channels are indexed upwards from this value. */
};
/** Returns the name of a given channel type. For example, this method may return "Surround Left". */
static String JUCE_CALLTYPE getChannelTypeName (ChannelType);
/** Returns the abbreviated name of a channel type. For example, this method may return "Ls". */
static String JUCE_CALLTYPE getAbbreviatedChannelTypeName (ChannelType);
/** Returns the channel type from an abbreviated name. */
static ChannelType JUCE_CALLTYPE getChannelTypeFromAbbreviation (const String& abbreviation);
//==============================================================================
enum
{
maxChannelsOfNamedLayout = 36
};
/** Adds a channel to the set. */
void addChannel (ChannelType newChannelType);
/** Removes a channel from the set. */
void removeChannel (ChannelType newChannelType);
/** Returns the number of channels in the set. */
int size() const noexcept;
/** Returns true if there are no channels in the set. */
bool isDisabled() const noexcept { return size() == 0; }
/** Returns an array of all the types in this channel set. */
Array<ChannelType> getChannelTypes() const;
/** Returns the type of one of the channels in the set, by index. */
ChannelType getTypeOfChannel (int channelIndex) const noexcept;
/** Returns the index for a particular channel-type.
Will return -1 if the this set does not contain a channel of this type. */
int getChannelIndexForType (ChannelType type) const noexcept;
/** Returns a string containing a whitespace-separated list of speaker types
corresponding to each channel. For example in a 5.1 arrangement,
the string may be "L R C Lfe Ls Rs". If the speaker arrangement is unknown,
the returned string will be empty.*/
String getSpeakerArrangementAsString() const;
/** Returns an AudioChannelSet from a string returned by getSpeakerArrangementAsString
@see getSpeakerArrangementAsString */
static AudioChannelSet fromAbbreviatedString (const String& set);
/** Returns the description of the current layout. For example, this method may return
"Quadraphonic". Note that the returned string may not be unique. */
String getDescription() const;
/** Returns if this is a channel layout made-up of discrete channels. */
bool isDiscreteLayout() const noexcept;
/** Intersect two channel layouts. */
void intersect (const AudioChannelSet& other) { channels &= other.channels; }
/** Creates a channel set for a list of channel types. This function will assert
if you supply a duplicate channel.
Note that this method ignores the order in which the channels are given, i.e.
two arrays with the same elements but in a different order will still result
in the same channel set.
*/
static AudioChannelSet JUCE_CALLTYPE channelSetWithChannels (const Array<ChannelType>&);
//==============================================================================
// Conversion between wave and juce channel layout identifiers
/** Create an AudioChannelSet from a WAVEFORMATEXTENSIBLE channelMask (typically used
in .wav files). */
static AudioChannelSet JUCE_CALLTYPE fromWaveChannelMask (int32 dwChannelMask);
/** Returns a WAVEFORMATEXTENSIBLE channelMask representation (typically used in .wav
files) of the receiver.
Returns -1 if the receiver cannot be represented in a WAVEFORMATEXTENSIBLE channelMask
representation.
*/
int32 getWaveChannelMask() const noexcept;
//==============================================================================
bool operator== (const AudioChannelSet&) const noexcept;
bool operator!= (const AudioChannelSet&) const noexcept;
bool operator< (const AudioChannelSet&) const noexcept;
private:
//==============================================================================
BigInteger channels;
//==============================================================================
explicit AudioChannelSet (uint32);
explicit AudioChannelSet (const std::initializer_list<ChannelType>&);
//==============================================================================
static int JUCE_CALLTYPE getAmbisonicOrderForNumChannels (int);
};
} // namespace juce

+ 0
- 635
libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioDataConverters.cpp View File

@@ -1,635 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
JUCE_BEGIN_IGNORE_WARNINGS_MSVC (4996)
void AudioDataConverters::convertFloatToInt16LE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
auto maxVal = (double) 0x7fff;
auto intData = static_cast<char*> (dest);
if (dest != (void*) source || destBytesPerSample <= 4)
{
for (int i = 0; i < numSamples; ++i)
{
*unalignedPointerCast<uint16*> (intData) = ByteOrder::swapIfBigEndian ((uint16) (short) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
intData += destBytesPerSample;
}
}
else
{
intData += destBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= destBytesPerSample;
*unalignedPointerCast<uint16*> (intData) = ByteOrder::swapIfBigEndian ((uint16) (short) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
}
}
}
void AudioDataConverters::convertFloatToInt16BE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
auto maxVal = (double) 0x7fff;
auto intData = static_cast<char*> (dest);
if (dest != (void*) source || destBytesPerSample <= 4)
{
for (int i = 0; i < numSamples; ++i)
{
*unalignedPointerCast<uint16*> (intData) = ByteOrder::swapIfLittleEndian ((uint16) (short) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
intData += destBytesPerSample;
}
}
else
{
intData += destBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= destBytesPerSample;
*unalignedPointerCast<uint16*> (intData) = ByteOrder::swapIfLittleEndian ((uint16) (short) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
}
}
}
void AudioDataConverters::convertFloatToInt24LE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
auto maxVal = (double) 0x7fffff;
auto intData = static_cast<char*> (dest);
if (dest != (void*) source || destBytesPerSample <= 4)
{
for (int i = 0; i < numSamples; ++i)
{
ByteOrder::littleEndian24BitToChars (roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])), intData);
intData += destBytesPerSample;
}
}
else
{
intData += destBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= destBytesPerSample;
ByteOrder::littleEndian24BitToChars (roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])), intData);
}
}
}
void AudioDataConverters::convertFloatToInt24BE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
auto maxVal = (double) 0x7fffff;
auto intData = static_cast<char*> (dest);
if (dest != (void*) source || destBytesPerSample <= 4)
{
for (int i = 0; i < numSamples; ++i)
{
ByteOrder::bigEndian24BitToChars (roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])), intData);
intData += destBytesPerSample;
}
}
else
{
intData += destBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= destBytesPerSample;
ByteOrder::bigEndian24BitToChars (roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])), intData);
}
}
}
void AudioDataConverters::convertFloatToInt32LE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
auto maxVal = (double) 0x7fffffff;
auto intData = static_cast<char*> (dest);
if (dest != (void*) source || destBytesPerSample <= 4)
{
for (int i = 0; i < numSamples; ++i)
{
*unalignedPointerCast<uint32*> (intData) = ByteOrder::swapIfBigEndian ((uint32) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
intData += destBytesPerSample;
}
}
else
{
intData += destBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= destBytesPerSample;
*unalignedPointerCast<uint32*> (intData) = ByteOrder::swapIfBigEndian ((uint32) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
}
}
}
void AudioDataConverters::convertFloatToInt32BE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
auto maxVal = (double) 0x7fffffff;
auto intData = static_cast<char*> (dest);
if (dest != (void*) source || destBytesPerSample <= 4)
{
for (int i = 0; i < numSamples; ++i)
{
*unalignedPointerCast<uint32*> (intData) = ByteOrder::swapIfLittleEndian ((uint32) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
intData += destBytesPerSample;
}
}
else
{
intData += destBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= destBytesPerSample;
*unalignedPointerCast<uint32*> (intData) = ByteOrder::swapIfLittleEndian ((uint32) roundToInt (jlimit (-maxVal, maxVal, maxVal * source[i])));
}
}
}
void AudioDataConverters::convertFloatToFloat32LE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
jassert (dest != (void*) source || destBytesPerSample <= 4); // This op can't be performed on in-place data!
char* d = static_cast<char*> (dest);
for (int i = 0; i < numSamples; ++i)
{
*unalignedPointerCast<float*> (d) = source[i];
#if JUCE_BIG_ENDIAN
*unalignedPointerCast<uint32*> (d) = ByteOrder::swap (*unalignedPointerCast<uint32*> (d));
#endif
d += destBytesPerSample;
}
}
void AudioDataConverters::convertFloatToFloat32BE (const float* source, void* dest, int numSamples, int destBytesPerSample)
{
jassert (dest != (void*) source || destBytesPerSample <= 4); // This op can't be performed on in-place data!
auto d = static_cast<char*> (dest);
for (int i = 0; i < numSamples; ++i)
{
*unalignedPointerCast<float*> (d) = source[i];
#if JUCE_LITTLE_ENDIAN
*unalignedPointerCast<uint32*> (d) = ByteOrder::swap (*unalignedPointerCast<uint32*> (d));
#endif
d += destBytesPerSample;
}
}
//==============================================================================
void AudioDataConverters::convertInt16LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
const float scale = 1.0f / 0x7fff;
auto intData = static_cast<const char*> (source);
if (source != (void*) dest || srcBytesPerSample >= 4)
{
for (int i = 0; i < numSamples; ++i)
{
dest[i] = scale * (short) ByteOrder::swapIfBigEndian (*unalignedPointerCast<const uint16*> (intData));
intData += srcBytesPerSample;
}
}
else
{
intData += srcBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= srcBytesPerSample;
dest[i] = scale * (short) ByteOrder::swapIfBigEndian (*unalignedPointerCast<const uint16*> (intData));
}
}
}
void AudioDataConverters::convertInt16BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
const float scale = 1.0f / 0x7fff;
auto intData = static_cast<const char*> (source);
if (source != (void*) dest || srcBytesPerSample >= 4)
{
for (int i = 0; i < numSamples; ++i)
{
dest[i] = scale * (short) ByteOrder::swapIfLittleEndian (*unalignedPointerCast<const uint16*> (intData));
intData += srcBytesPerSample;
}
}
else
{
intData += srcBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= srcBytesPerSample;
dest[i] = scale * (short) ByteOrder::swapIfLittleEndian (*unalignedPointerCast<const uint16*> (intData));
}
}
}
void AudioDataConverters::convertInt24LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
const float scale = 1.0f / 0x7fffff;
auto intData = static_cast<const char*> (source);
if (source != (void*) dest || srcBytesPerSample >= 4)
{
for (int i = 0; i < numSamples; ++i)
{
dest[i] = scale * (short) ByteOrder::littleEndian24Bit (intData);
intData += srcBytesPerSample;
}
}
else
{
intData += srcBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= srcBytesPerSample;
dest[i] = scale * (short) ByteOrder::littleEndian24Bit (intData);
}
}
}
void AudioDataConverters::convertInt24BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
const float scale = 1.0f / 0x7fffff;
auto intData = static_cast<const char*> (source);
if (source != (void*) dest || srcBytesPerSample >= 4)
{
for (int i = 0; i < numSamples; ++i)
{
dest[i] = scale * (short) ByteOrder::bigEndian24Bit (intData);
intData += srcBytesPerSample;
}
}
else
{
intData += srcBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= srcBytesPerSample;
dest[i] = scale * (short) ByteOrder::bigEndian24Bit (intData);
}
}
}
void AudioDataConverters::convertInt32LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
const float scale = 1.0f / (float) 0x7fffffff;
auto intData = static_cast<const char*> (source);
if (source != (void*) dest || srcBytesPerSample >= 4)
{
for (int i = 0; i < numSamples; ++i)
{
dest[i] = scale * (float) ByteOrder::swapIfBigEndian (*unalignedPointerCast<const uint32*> (intData));
intData += srcBytesPerSample;
}
}
else
{
intData += srcBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= srcBytesPerSample;
dest[i] = scale * (float) ByteOrder::swapIfBigEndian (*unalignedPointerCast<const uint32*> (intData));
}
}
}
void AudioDataConverters::convertInt32BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
const float scale = 1.0f / (float) 0x7fffffff;
auto intData = static_cast<const char*> (source);
if (source != (void*) dest || srcBytesPerSample >= 4)
{
for (int i = 0; i < numSamples; ++i)
{
dest[i] = scale * (float) ByteOrder::swapIfLittleEndian (*unalignedPointerCast<const uint32*> (intData));
intData += srcBytesPerSample;
}
}
else
{
intData += srcBytesPerSample * numSamples;
for (int i = numSamples; --i >= 0;)
{
intData -= srcBytesPerSample;
dest[i] = scale * (float) ByteOrder::swapIfLittleEndian (*unalignedPointerCast<const uint32*> (intData));
}
}
}
void AudioDataConverters::convertFloat32LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
auto s = static_cast<const char*> (source);
for (int i = 0; i < numSamples; ++i)
{
dest[i] = *unalignedPointerCast<const float*> (s);
#if JUCE_BIG_ENDIAN
auto d = unalignedPointerCast<uint32*> (dest + i);
*d = ByteOrder::swap (*d);
#endif
s += srcBytesPerSample;
}
}
void AudioDataConverters::convertFloat32BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample)
{
auto s = static_cast<const char*> (source);
for (int i = 0; i < numSamples; ++i)
{
dest[i] = *unalignedPointerCast<const float*> (s);
#if JUCE_LITTLE_ENDIAN
auto d = unalignedPointerCast<uint32*> (dest + i);
*d = ByteOrder::swap (*d);
#endif
s += srcBytesPerSample;
}
}
//==============================================================================
void AudioDataConverters::convertFloatToFormat (DataFormat destFormat, const float* source, void* dest, int numSamples)
{
switch (destFormat)
{
case int16LE: convertFloatToInt16LE (source, dest, numSamples); break;
case int16BE: convertFloatToInt16BE (source, dest, numSamples); break;
case int24LE: convertFloatToInt24LE (source, dest, numSamples); break;
case int24BE: convertFloatToInt24BE (source, dest, numSamples); break;
case int32LE: convertFloatToInt32LE (source, dest, numSamples); break;
case int32BE: convertFloatToInt32BE (source, dest, numSamples); break;
case float32LE: convertFloatToFloat32LE (source, dest, numSamples); break;
case float32BE: convertFloatToFloat32BE (source, dest, numSamples); break;
default: jassertfalse; break;
}
}
void AudioDataConverters::convertFormatToFloat (DataFormat sourceFormat, const void* source, float* dest, int numSamples)
{
switch (sourceFormat)
{
case int16LE: convertInt16LEToFloat (source, dest, numSamples); break;
case int16BE: convertInt16BEToFloat (source, dest, numSamples); break;
case int24LE: convertInt24LEToFloat (source, dest, numSamples); break;
case int24BE: convertInt24BEToFloat (source, dest, numSamples); break;
case int32LE: convertInt32LEToFloat (source, dest, numSamples); break;
case int32BE: convertInt32BEToFloat (source, dest, numSamples); break;
case float32LE: convertFloat32LEToFloat (source, dest, numSamples); break;
case float32BE: convertFloat32BEToFloat (source, dest, numSamples); break;
default: jassertfalse; break;
}
}
//==============================================================================
void AudioDataConverters::interleaveSamples (const float** source, float* dest, int numSamples, int numChannels)
{
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { source, numChannels },
AudioData::InterleavedDest<Format> { dest, numChannels },
numSamples);
}
void AudioDataConverters::deinterleaveSamples (const float* source, float** dest, int numSamples, int numChannels)
{
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { source, numChannels },
AudioData::NonInterleavedDest<Format> { dest, numChannels },
numSamples);
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class AudioConversionTests : public UnitTest
{
public:
AudioConversionTests()
: UnitTest ("Audio data conversion", UnitTestCategories::audio)
{}
template <class F1, class E1, class F2, class E2>
struct Test5
{
static void test (UnitTest& unitTest, Random& r)
{
test (unitTest, false, r);
test (unitTest, true, r);
}
JUCE_BEGIN_IGNORE_WARNINGS_MSVC (6262)
static void test (UnitTest& unitTest, bool inPlace, Random& r)
{
const int numSamples = 2048;
int32 original [(size_t) numSamples],
converted[(size_t) numSamples],
reversed [(size_t) numSamples];
{
AudioData::Pointer<F1, E1, AudioData::NonInterleaved, AudioData::NonConst> d (original);
bool clippingFailed = false;
for (int i = 0; i < numSamples / 2; ++i)
{
d.setAsFloat (r.nextFloat() * 2.2f - 1.1f);
if (! d.isFloatingPoint())
clippingFailed = d.getAsFloat() > 1.0f || d.getAsFloat() < -1.0f || clippingFailed;
++d;
d.setAsInt32 (r.nextInt());
++d;
}
unitTest.expect (! clippingFailed);
}
// convert data from the source to dest format..
std::unique_ptr<AudioData::Converter> conv (new AudioData::ConverterInstance<AudioData::Pointer<F1, E1, AudioData::NonInterleaved, AudioData::Const>,
AudioData::Pointer<F2, E2, AudioData::NonInterleaved, AudioData::NonConst>>());
conv->convertSamples (inPlace ? reversed : converted, original, numSamples);
// ..and back again..
conv.reset (new AudioData::ConverterInstance<AudioData::Pointer<F2, E2, AudioData::NonInterleaved, AudioData::Const>,
AudioData::Pointer<F1, E1, AudioData::NonInterleaved, AudioData::NonConst>>());
if (! inPlace)
zeromem (reversed, sizeof (reversed));
conv->convertSamples (reversed, inPlace ? reversed : converted, numSamples);
{
int biggestDiff = 0;
AudioData::Pointer<F1, E1, AudioData::NonInterleaved, AudioData::Const> d1 (original);
AudioData::Pointer<F1, E1, AudioData::NonInterleaved, AudioData::Const> d2 (reversed);
const int errorMargin = 2 * AudioData::Pointer<F1, E1, AudioData::NonInterleaved, AudioData::Const>::get32BitResolution()
+ AudioData::Pointer<F2, E2, AudioData::NonInterleaved, AudioData::Const>::get32BitResolution();
for (int i = 0; i < numSamples; ++i)
{
biggestDiff = jmax (biggestDiff, std::abs (d1.getAsInt32() - d2.getAsInt32()));
++d1;
++d2;
}
unitTest.expect (biggestDiff <= errorMargin);
}
}
JUCE_END_IGNORE_WARNINGS_MSVC
};
template <class F1, class E1, class FormatType>
struct Test3
{
static void test (UnitTest& unitTest, Random& r)
{
Test5 <F1, E1, FormatType, AudioData::BigEndian>::test (unitTest, r);
Test5 <F1, E1, FormatType, AudioData::LittleEndian>::test (unitTest, r);
}
};
template <class FormatType, class Endianness>
struct Test2
{
static void test (UnitTest& unitTest, Random& r)
{
Test3 <FormatType, Endianness, AudioData::Int8>::test (unitTest, r);
Test3 <FormatType, Endianness, AudioData::UInt8>::test (unitTest, r);
Test3 <FormatType, Endianness, AudioData::Int16>::test (unitTest, r);
Test3 <FormatType, Endianness, AudioData::Int24>::test (unitTest, r);
Test3 <FormatType, Endianness, AudioData::Int32>::test (unitTest, r);
Test3 <FormatType, Endianness, AudioData::Float32>::test (unitTest, r);
}
};
template <class FormatType>
struct Test1
{
static void test (UnitTest& unitTest, Random& r)
{
Test2 <FormatType, AudioData::BigEndian>::test (unitTest, r);
Test2 <FormatType, AudioData::LittleEndian>::test (unitTest, r);
}
};
void runTest() override
{
auto r = getRandom();
beginTest ("Round-trip conversion: Int8");
Test1 <AudioData::Int8>::test (*this, r);
beginTest ("Round-trip conversion: Int16");
Test1 <AudioData::Int16>::test (*this, r);
beginTest ("Round-trip conversion: Int24");
Test1 <AudioData::Int24>::test (*this, r);
beginTest ("Round-trip conversion: Int32");
Test1 <AudioData::Int32>::test (*this, r);
beginTest ("Round-trip conversion: Float32");
Test1 <AudioData::Float32>::test (*this, r);
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
beginTest ("Interleaving");
{
constexpr auto numChannels = 4;
constexpr auto numSamples = 512;
AudioBuffer<float> sourceBuffer { numChannels, numSamples },
destBuffer { 1, numChannels * numSamples };
for (int ch = 0; ch < numChannels; ++ch)
for (int i = 0; i < numSamples; ++i)
sourceBuffer.setSample (ch, i, r.nextFloat());
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { sourceBuffer.getArrayOfReadPointers(), numChannels },
AudioData::InterleavedDest<Format> { destBuffer.getWritePointer (0), numChannels },
numSamples);
for (int ch = 0; ch < numChannels; ++ch)
for (int i = 0; i < numSamples; ++i)
expect (destBuffer.getSample (0, ch + (i * numChannels)) == sourceBuffer.getSample (ch, i));
}
beginTest ("Deinterleaving");
{
constexpr auto numChannels = 4;
constexpr auto numSamples = 512;
AudioBuffer<float> sourceBuffer { 1, numChannels * numSamples },
destBuffer { numChannels, numSamples };
for (int ch = 0; ch < numChannels; ++ch)
for (int i = 0; i < numSamples; ++i)
sourceBuffer.setSample (0, ch + (i * numChannels), r.nextFloat());
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { sourceBuffer.getReadPointer (0), numChannels },
AudioData::NonInterleavedDest<Format> { destBuffer.getArrayOfWritePointers(), numChannels },
numSamples);
for (int ch = 0; ch < numChannels; ++ch)
for (int i = 0; i < numSamples; ++i)
expect (sourceBuffer.getSample (0, ch + (i * numChannels)) == destBuffer.getSample (ch, i));
}
}
};
static AudioConversionTests audioConversionUnitTests;
#endif
JUCE_END_IGNORE_WARNINGS_MSVC
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
} // namespace juce

+ 0
- 857
libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioDataConverters.h View File

@@ -1,857 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This class a container which holds all the classes pertaining to the AudioData::Pointer
audio sample format class.
@see AudioData::Pointer.
@tags{Audio}
*/
class JUCE_API AudioData
{
public:
//==============================================================================
// These types can be used as the SampleFormat template parameter for the AudioData::Pointer class.
class Int8; /**< Used as a template parameter for AudioData::Pointer. Indicates an 8-bit integer packed data format. */
class UInt8; /**< Used as a template parameter for AudioData::Pointer. Indicates an 8-bit unsigned integer packed data format. */
class Int16; /**< Used as a template parameter for AudioData::Pointer. Indicates an 16-bit integer packed data format. */
class Int24; /**< Used as a template parameter for AudioData::Pointer. Indicates an 24-bit integer packed data format. */
class Int32; /**< Used as a template parameter for AudioData::Pointer. Indicates an 32-bit integer packed data format. */
class Float32; /**< Used as a template parameter for AudioData::Pointer. Indicates an 32-bit float data format. */
//==============================================================================
// These types can be used as the Endianness template parameter for the AudioData::Pointer class.
class BigEndian; /**< Used as a template parameter for AudioData::Pointer. Indicates that the samples are stored in big-endian order. */
class LittleEndian; /**< Used as a template parameter for AudioData::Pointer. Indicates that the samples are stored in little-endian order. */
class NativeEndian; /**< Used as a template parameter for AudioData::Pointer. Indicates that the samples are stored in the CPU's native endianness. */
//==============================================================================
// These types can be used as the InterleavingType template parameter for the AudioData::Pointer class.
class NonInterleaved; /**< Used as a template parameter for AudioData::Pointer. Indicates that the samples are stored contiguously. */
class Interleaved; /**< Used as a template parameter for AudioData::Pointer. Indicates that the samples are interleaved with a number of other channels. */
//==============================================================================
// These types can be used as the Constness template parameter for the AudioData::Pointer class.
class NonConst; /**< Used as a template parameter for AudioData::Pointer. Indicates that the pointer can be used for non-const data. */
class Const; /**< Used as a template parameter for AudioData::Pointer. Indicates that the samples can only be used for const data.. */
#ifndef DOXYGEN
//==============================================================================
class BigEndian
{
public:
template <class SampleFormatType> static float getAsFloat (SampleFormatType& s) noexcept { return s.getAsFloatBE(); }
template <class SampleFormatType> static void setAsFloat (SampleFormatType& s, float newValue) noexcept { s.setAsFloatBE (newValue); }
template <class SampleFormatType> static int32 getAsInt32 (SampleFormatType& s) noexcept { return s.getAsInt32BE(); }
template <class SampleFormatType> static void setAsInt32 (SampleFormatType& s, int32 newValue) noexcept { s.setAsInt32BE (newValue); }
template <class SourceType, class DestType> static void copyFrom (DestType& dest, SourceType& source) noexcept { dest.copyFromBE (source); }
enum { isBigEndian = 1 };
};
class LittleEndian
{
public:
template <class SampleFormatType> static float getAsFloat (SampleFormatType& s) noexcept { return s.getAsFloatLE(); }
template <class SampleFormatType> static void setAsFloat (SampleFormatType& s, float newValue) noexcept { s.setAsFloatLE (newValue); }
template <class SampleFormatType> static int32 getAsInt32 (SampleFormatType& s) noexcept { return s.getAsInt32LE(); }
template <class SampleFormatType> static void setAsInt32 (SampleFormatType& s, int32 newValue) noexcept { s.setAsInt32LE (newValue); }
template <class SourceType, class DestType> static void copyFrom (DestType& dest, SourceType& source) noexcept { dest.copyFromLE (source); }
enum { isBigEndian = 0 };
};
#if JUCE_BIG_ENDIAN
class NativeEndian : public BigEndian {};
#else
class NativeEndian : public LittleEndian {};
#endif
//==============================================================================
class Int8
{
public:
inline Int8 (void* d) noexcept : data (static_cast<int8*> (d)) {}
inline void advance() noexcept { ++data; }
inline void skip (int numSamples) noexcept { data += numSamples; }
inline float getAsFloatLE() const noexcept { return (float) (*data * (1.0 / (1.0 + (double) maxValue))); }
inline float getAsFloatBE() const noexcept { return getAsFloatLE(); }
inline void setAsFloatLE (float newValue) noexcept { *data = (int8) jlimit ((int) -maxValue, (int) maxValue, roundToInt (newValue * (1.0 + (double) maxValue))); }
inline void setAsFloatBE (float newValue) noexcept { setAsFloatLE (newValue); }
inline int32 getAsInt32LE() const noexcept { return (int) (*((uint8*) data) << 24); }
inline int32 getAsInt32BE() const noexcept { return getAsInt32LE(); }
inline void setAsInt32LE (int newValue) noexcept { *data = (int8) (newValue >> 24); }
inline void setAsInt32BE (int newValue) noexcept { setAsInt32LE (newValue); }
inline void clear() noexcept { *data = 0; }
inline void clearMultiple (int num) noexcept { zeromem (data, (size_t) (num * bytesPerSample)) ;}
template <class SourceType> inline void copyFromLE (SourceType& source) noexcept { setAsInt32LE (source.getAsInt32()); }
template <class SourceType> inline void copyFromBE (SourceType& source) noexcept { setAsInt32BE (source.getAsInt32()); }
inline void copyFromSameType (Int8& source) noexcept { *data = *source.data; }
int8* data;
enum { bytesPerSample = 1, maxValue = 0x7f, resolution = (1 << 24), isFloat = 0 };
};
class UInt8
{
public:
inline UInt8 (void* d) noexcept : data (static_cast<uint8*> (d)) {}
inline void advance() noexcept { ++data; }
inline void skip (int numSamples) noexcept { data += numSamples; }
inline float getAsFloatLE() const noexcept { return (float) ((*data - 128) * (1.0 / (1.0 + (double) maxValue))); }
inline float getAsFloatBE() const noexcept { return getAsFloatLE(); }
inline void setAsFloatLE (float newValue) noexcept { *data = (uint8) jlimit (0, 255, 128 + roundToInt (newValue * (1.0 + (double) maxValue))); }
inline void setAsFloatBE (float newValue) noexcept { setAsFloatLE (newValue); }
inline int32 getAsInt32LE() const noexcept { return (int) (((uint8) (*data - 128)) << 24); }
inline int32 getAsInt32BE() const noexcept { return getAsInt32LE(); }
inline void setAsInt32LE (int newValue) noexcept { *data = (uint8) (128 + (newValue >> 24)); }
inline void setAsInt32BE (int newValue) noexcept { setAsInt32LE (newValue); }
inline void clear() noexcept { *data = 128; }
inline void clearMultiple (int num) noexcept { memset (data, 128, (size_t) num) ;}
template <class SourceType> inline void copyFromLE (SourceType& source) noexcept { setAsInt32LE (source.getAsInt32()); }
template <class SourceType> inline void copyFromBE (SourceType& source) noexcept { setAsInt32BE (source.getAsInt32()); }
inline void copyFromSameType (UInt8& source) noexcept { *data = *source.data; }
uint8* data;
enum { bytesPerSample = 1, maxValue = 0x7f, resolution = (1 << 24), isFloat = 0 };
};
class Int16
{
public:
inline Int16 (void* d) noexcept : data (static_cast<uint16*> (d)) {}
inline void advance() noexcept { ++data; }
inline void skip (int numSamples) noexcept { data += numSamples; }
inline float getAsFloatLE() const noexcept { return (float) ((1.0 / (1.0 + (double) maxValue)) * (int16) ByteOrder::swapIfBigEndian (*data)); }
inline float getAsFloatBE() const noexcept { return (float) ((1.0 / (1.0 + (double) maxValue)) * (int16) ByteOrder::swapIfLittleEndian (*data)); }
inline void setAsFloatLE (float newValue) noexcept { *data = ByteOrder::swapIfBigEndian ((uint16) jlimit ((int) -maxValue, (int) maxValue, roundToInt (newValue * (1.0 + (double) maxValue)))); }
inline void setAsFloatBE (float newValue) noexcept { *data = ByteOrder::swapIfLittleEndian ((uint16) jlimit ((int) -maxValue, (int) maxValue, roundToInt (newValue * (1.0 + (double) maxValue)))); }
inline int32 getAsInt32LE() const noexcept { return (int32) (ByteOrder::swapIfBigEndian ((uint16) *data) << 16); }
inline int32 getAsInt32BE() const noexcept { return (int32) (ByteOrder::swapIfLittleEndian ((uint16) *data) << 16); }
inline void setAsInt32LE (int32 newValue) noexcept { *data = ByteOrder::swapIfBigEndian ((uint16) (newValue >> 16)); }
inline void setAsInt32BE (int32 newValue) noexcept { *data = ByteOrder::swapIfLittleEndian ((uint16) (newValue >> 16)); }
inline void clear() noexcept { *data = 0; }
inline void clearMultiple (int num) noexcept { zeromem (data, (size_t) (num * bytesPerSample)) ;}
template <class SourceType> inline void copyFromLE (SourceType& source) noexcept { setAsInt32LE (source.getAsInt32()); }
template <class SourceType> inline void copyFromBE (SourceType& source) noexcept { setAsInt32BE (source.getAsInt32()); }
inline void copyFromSameType (Int16& source) noexcept { *data = *source.data; }
uint16* data;
enum { bytesPerSample = 2, maxValue = 0x7fff, resolution = (1 << 16), isFloat = 0 };
};
class Int24
{
public:
inline Int24 (void* d) noexcept : data (static_cast<char*> (d)) {}
inline void advance() noexcept { data += 3; }
inline void skip (int numSamples) noexcept { data += 3 * numSamples; }
inline float getAsFloatLE() const noexcept { return (float) (ByteOrder::littleEndian24Bit (data) * (1.0 / (1.0 + (double) maxValue))); }
inline float getAsFloatBE() const noexcept { return (float) (ByteOrder::bigEndian24Bit (data) * (1.0 / (1.0 + (double) maxValue))); }
inline void setAsFloatLE (float newValue) noexcept { ByteOrder::littleEndian24BitToChars (jlimit ((int) -maxValue, (int) maxValue, roundToInt (newValue * (1.0 + (double) maxValue))), data); }
inline void setAsFloatBE (float newValue) noexcept { ByteOrder::bigEndian24BitToChars (jlimit ((int) -maxValue, (int) maxValue, roundToInt (newValue * (1.0 + (double) maxValue))), data); }
inline int32 getAsInt32LE() const noexcept { return (int32) (((unsigned int) ByteOrder::littleEndian24Bit (data)) << 8); }
inline int32 getAsInt32BE() const noexcept { return (int32) (((unsigned int) ByteOrder::bigEndian24Bit (data)) << 8); }
inline void setAsInt32LE (int32 newValue) noexcept { ByteOrder::littleEndian24BitToChars (newValue >> 8, data); }
inline void setAsInt32BE (int32 newValue) noexcept { ByteOrder::bigEndian24BitToChars (newValue >> 8, data); }
inline void clear() noexcept { data[0] = 0; data[1] = 0; data[2] = 0; }
inline void clearMultiple (int num) noexcept { zeromem (data, (size_t) (num * bytesPerSample)) ;}
template <class SourceType> inline void copyFromLE (SourceType& source) noexcept { setAsInt32LE (source.getAsInt32()); }
template <class SourceType> inline void copyFromBE (SourceType& source) noexcept { setAsInt32BE (source.getAsInt32()); }
inline void copyFromSameType (Int24& source) noexcept { data[0] = source.data[0]; data[1] = source.data[1]; data[2] = source.data[2]; }
char* data;
enum { bytesPerSample = 3, maxValue = 0x7fffff, resolution = (1 << 8), isFloat = 0 };
};
class Int32
{
public:
inline Int32 (void* d) noexcept : data (static_cast<uint32*> (d)) {}
inline void advance() noexcept { ++data; }
inline void skip (int numSamples) noexcept { data += numSamples; }
inline float getAsFloatLE() const noexcept { return (float) ((1.0 / (1.0 + (double) maxValue)) * (int32) ByteOrder::swapIfBigEndian (*data)); }
inline float getAsFloatBE() const noexcept { return (float) ((1.0 / (1.0 + (double) maxValue)) * (int32) ByteOrder::swapIfLittleEndian (*data)); }
inline void setAsFloatLE (float newValue) noexcept { *data = ByteOrder::swapIfBigEndian ((uint32) (int32) ((double) maxValue * jlimit (-1.0, 1.0, (double) newValue))); }
inline void setAsFloatBE (float newValue) noexcept { *data = ByteOrder::swapIfLittleEndian ((uint32) (int32) ((double) maxValue * jlimit (-1.0, 1.0, (double) newValue))); }
inline int32 getAsInt32LE() const noexcept { return (int32) ByteOrder::swapIfBigEndian (*data); }
inline int32 getAsInt32BE() const noexcept { return (int32) ByteOrder::swapIfLittleEndian (*data); }
inline void setAsInt32LE (int32 newValue) noexcept { *data = ByteOrder::swapIfBigEndian ((uint32) newValue); }
inline void setAsInt32BE (int32 newValue) noexcept { *data = ByteOrder::swapIfLittleEndian ((uint32) newValue); }
inline void clear() noexcept { *data = 0; }
inline void clearMultiple (int num) noexcept { zeromem (data, (size_t) (num * bytesPerSample)) ;}
template <class SourceType> inline void copyFromLE (SourceType& source) noexcept { setAsInt32LE (source.getAsInt32()); }
template <class SourceType> inline void copyFromBE (SourceType& source) noexcept { setAsInt32BE (source.getAsInt32()); }
inline void copyFromSameType (Int32& source) noexcept { *data = *source.data; }
uint32* data;
enum { bytesPerSample = 4, maxValue = 0x7fffffff, resolution = 1, isFloat = 0 };
};
/** A 32-bit integer type, of which only the bottom 24 bits are used. */
class Int24in32 : public Int32
{
public:
inline Int24in32 (void* d) noexcept : Int32 (d) {}
inline float getAsFloatLE() const noexcept { return (float) ((1.0 / (1.0 + (double) maxValue)) * (int32) ByteOrder::swapIfBigEndian (*data)); }
inline float getAsFloatBE() const noexcept { return (float) ((1.0 / (1.0 + (double) maxValue)) * (int32) ByteOrder::swapIfLittleEndian (*data)); }
inline void setAsFloatLE (float newValue) noexcept { *data = ByteOrder::swapIfBigEndian ((uint32) ((double) maxValue * jlimit (-1.0, 1.0, (double) newValue))); }
inline void setAsFloatBE (float newValue) noexcept { *data = ByteOrder::swapIfLittleEndian ((uint32) ((double) maxValue * jlimit (-1.0, 1.0, (double) newValue))); }
inline int32 getAsInt32LE() const noexcept { return (int32) ByteOrder::swapIfBigEndian (*data) << 8; }
inline int32 getAsInt32BE() const noexcept { return (int32) ByteOrder::swapIfLittleEndian (*data) << 8; }
inline void setAsInt32LE (int32 newValue) noexcept { *data = ByteOrder::swapIfBigEndian ((uint32) newValue >> 8); }
inline void setAsInt32BE (int32 newValue) noexcept { *data = ByteOrder::swapIfLittleEndian ((uint32) newValue >> 8); }
template <class SourceType> inline void copyFromLE (SourceType& source) noexcept { setAsInt32LE (source.getAsInt32()); }
template <class SourceType> inline void copyFromBE (SourceType& source) noexcept { setAsInt32BE (source.getAsInt32()); }
inline void copyFromSameType (Int24in32& source) noexcept { *data = *source.data; }
enum { bytesPerSample = 4, maxValue = 0x7fffff, resolution = (1 << 8), isFloat = 0 };
};
class Float32
{
public:
inline Float32 (void* d) noexcept : data (static_cast<float*> (d)) {}
inline void advance() noexcept { ++data; }
inline void skip (int numSamples) noexcept { data += numSamples; }
#if JUCE_BIG_ENDIAN
inline float getAsFloatBE() const noexcept { return *data; }
inline void setAsFloatBE (float newValue) noexcept { *data = newValue; }
inline float getAsFloatLE() const noexcept { union { uint32 asInt; float asFloat; } n; n.asInt = ByteOrder::swap (*(uint32*) data); return n.asFloat; }
inline void setAsFloatLE (float newValue) noexcept { union { uint32 asInt; float asFloat; } n; n.asFloat = newValue; *(uint32*) data = ByteOrder::swap (n.asInt); }
#else
inline float getAsFloatLE() const noexcept { return *data; }
inline void setAsFloatLE (float newValue) noexcept { *data = newValue; }
inline float getAsFloatBE() const noexcept { union { uint32 asInt; float asFloat; } n; n.asInt = ByteOrder::swap (*(uint32*) data); return n.asFloat; }
inline void setAsFloatBE (float newValue) noexcept { union { uint32 asInt; float asFloat; } n; n.asFloat = newValue; *(uint32*) data = ByteOrder::swap (n.asInt); }
#endif
inline int32 getAsInt32LE() const noexcept { return (int32) roundToInt (jlimit (-1.0, 1.0, (double) getAsFloatLE()) * (double) maxValue); }
inline int32 getAsInt32BE() const noexcept { return (int32) roundToInt (jlimit (-1.0, 1.0, (double) getAsFloatBE()) * (double) maxValue); }
inline void setAsInt32LE (int32 newValue) noexcept { setAsFloatLE ((float) (newValue * (1.0 / (1.0 + (double) maxValue)))); }
inline void setAsInt32BE (int32 newValue) noexcept { setAsFloatBE ((float) (newValue * (1.0 / (1.0 + (double) maxValue)))); }
inline void clear() noexcept { *data = 0; }
inline void clearMultiple (int num) noexcept { zeromem (data, (size_t) (num * bytesPerSample)) ;}
template <class SourceType> inline void copyFromLE (SourceType& source) noexcept { setAsFloatLE (source.getAsFloat()); }
template <class SourceType> inline void copyFromBE (SourceType& source) noexcept { setAsFloatBE (source.getAsFloat()); }
inline void copyFromSameType (Float32& source) noexcept { *data = *source.data; }
float* data;
enum { bytesPerSample = 4, maxValue = 0x7fffffff, resolution = (1 << 8), isFloat = 1 };
};
//==============================================================================
class NonInterleaved
{
public:
inline NonInterleaved() = default;
inline NonInterleaved (const NonInterleaved&) = default;
inline NonInterleaved (const int) noexcept {}
inline void copyFrom (const NonInterleaved&) noexcept {}
template <class SampleFormatType> inline void advanceData (SampleFormatType& s) noexcept { s.advance(); }
template <class SampleFormatType> inline void advanceDataBy (SampleFormatType& s, int numSamples) noexcept { s.skip (numSamples); }
template <class SampleFormatType> inline void clear (SampleFormatType& s, int numSamples) noexcept { s.clearMultiple (numSamples); }
template <class SampleFormatType> static int getNumBytesBetweenSamples (const SampleFormatType&) noexcept { return SampleFormatType::bytesPerSample; }
enum { isInterleavedType = 0, numInterleavedChannels = 1 };
};
class Interleaved
{
public:
inline Interleaved() noexcept {}
inline Interleaved (const Interleaved& other) = default;
inline Interleaved (const int numInterleavedChans) noexcept : numInterleavedChannels (numInterleavedChans) {}
inline void copyFrom (const Interleaved& other) noexcept { numInterleavedChannels = other.numInterleavedChannels; }
template <class SampleFormatType> inline void advanceData (SampleFormatType& s) noexcept { s.skip (numInterleavedChannels); }
template <class SampleFormatType> inline void advanceDataBy (SampleFormatType& s, int numSamples) noexcept { s.skip (numInterleavedChannels * numSamples); }
template <class SampleFormatType> inline void clear (SampleFormatType& s, int numSamples) noexcept { while (--numSamples >= 0) { s.clear(); s.skip (numInterleavedChannels); } }
template <class SampleFormatType> inline int getNumBytesBetweenSamples (const SampleFormatType&) const noexcept { return numInterleavedChannels * SampleFormatType::bytesPerSample; }
int numInterleavedChannels = 1;
enum { isInterleavedType = 1 };
};
//==============================================================================
class NonConst
{
public:
using VoidType = void;
static void* toVoidPtr (VoidType* v) noexcept { return v; }
enum { isConst = 0 };
};
class Const
{
public:
using VoidType = const void;
static void* toVoidPtr (VoidType* v) noexcept { return const_cast<void*> (v); }
enum { isConst = 1 };
};
#endif
//==============================================================================
/**
A pointer to a block of audio data with a particular encoding.
This object can be used to read and write from blocks of encoded audio samples. To create one, you specify
the audio format as a series of template parameters, e.g.
@code
// this creates a pointer for reading from a const array of 16-bit little-endian packed samples.
AudioData::Pointer <AudioData::Int16,
AudioData::LittleEndian,
AudioData::NonInterleaved,
AudioData::Const> pointer (someRawAudioData);
// These methods read the sample that is being pointed to
float firstSampleAsFloat = pointer.getAsFloat();
int32 firstSampleAsInt = pointer.getAsInt32();
++pointer; // moves the pointer to the next sample.
pointer += 3; // skips the next 3 samples.
@endcode
The convertSamples() method lets you copy a range of samples from one format to another, automatically
converting its format.
@see AudioData::Converter
*/
template <typename SampleFormat,
typename Endianness,
typename InterleavingType,
typename Constness>
class Pointer : private InterleavingType // (inherited for EBCO)
{
public:
//==============================================================================
/** Creates a non-interleaved pointer from some raw data in the appropriate format.
This constructor is only used if you've specified the AudioData::NonInterleaved option -
for interleaved formats, use the constructor that also takes a number of channels.
*/
Pointer (typename Constness::VoidType* sourceData) noexcept
: data (Constness::toVoidPtr (sourceData))
{
// If you're using interleaved data, call the other constructor! If you're using non-interleaved data,
// you should pass NonInterleaved as the template parameter for the interleaving type!
static_assert (InterleavingType::isInterleavedType == 0, "Incorrect constructor for interleaved data");
}
/** Creates a pointer from some raw data in the appropriate format with the specified number of interleaved channels.
For non-interleaved data, use the other constructor.
*/
Pointer (typename Constness::VoidType* sourceData, int numInterleaved) noexcept
: InterleavingType (numInterleaved), data (Constness::toVoidPtr (sourceData))
{
}
/** Creates a copy of another pointer. */
Pointer (const Pointer& other) noexcept
: InterleavingType (other), data (other.data)
{
}
Pointer& operator= (const Pointer& other) noexcept
{
InterleavingType::operator= (other);
data = other.data;
return *this;
}
//==============================================================================
/** Returns the value of the first sample as a floating point value.
The value will be in the range -1.0 to 1.0 for integer formats. For floating point
formats, the value could be outside that range, although -1 to 1 is the standard range.
*/
inline float getAsFloat() const noexcept { return Endianness::getAsFloat (data); }
/** Sets the value of the first sample as a floating point value.
(This method can only be used if the AudioData::NonConst option was used).
The value should be in the range -1.0 to 1.0 - for integer formats, values outside that
range will be clipped. For floating point formats, any value passed in here will be
written directly, although -1 to 1 is the standard range.
*/
inline void setAsFloat (float newValue) noexcept
{
// trying to write to a const pointer! For a writeable one, use AudioData::NonConst instead!
static_assert (Constness::isConst == 0, "Attempt to write to a const pointer");
Endianness::setAsFloat (data, newValue);
}
/** Returns the value of the first sample as a 32-bit integer.
The value returned will be in the range 0x80000000 to 0x7fffffff, and shorter values will be
shifted to fill this range (e.g. if you're reading from 24-bit data, the values will be shifted up
by 8 bits when returned here). If the source data is floating point, values beyond -1.0 to 1.0 will
be clipped so that -1.0 maps onto -0x7fffffff and 1.0 maps to 0x7fffffff.
*/
inline int32 getAsInt32() const noexcept { return Endianness::getAsInt32 (data); }
/** Sets the value of the first sample as a 32-bit integer.
This will be mapped to the range of the format that is being written - see getAsInt32().
*/
inline void setAsInt32 (int32 newValue) noexcept
{
// trying to write to a const pointer! For a writeable one, use AudioData::NonConst instead!
static_assert (Constness::isConst == 0, "Attempt to write to a const pointer");
Endianness::setAsInt32 (data, newValue);
}
/** Moves the pointer along to the next sample. */
inline Pointer& operator++() noexcept { advance(); return *this; }
/** Moves the pointer back to the previous sample. */
inline Pointer& operator--() noexcept { this->advanceDataBy (data, -1); return *this; }
/** Adds a number of samples to the pointer's position. */
Pointer& operator+= (int samplesToJump) noexcept { this->advanceDataBy (data, samplesToJump); return *this; }
/** Writes a stream of samples into this pointer from another pointer.
This will copy the specified number of samples, converting between formats appropriately.
*/
void convertSamples (Pointer source, int numSamples) const noexcept
{
// trying to write to a const pointer! For a writeable one, use AudioData::NonConst instead!
static_assert (Constness::isConst == 0, "Attempt to write to a const pointer");
for (Pointer dest (*this); --numSamples >= 0;)
{
dest.data.copyFromSameType (source.data);
dest.advance();
source.advance();
}
}
/** Writes a stream of samples into this pointer from another pointer.
This will copy the specified number of samples, converting between formats appropriately.
*/
template <class OtherPointerType>
void convertSamples (OtherPointerType source, int numSamples) const noexcept
{
// trying to write to a const pointer! For a writeable one, use AudioData::NonConst instead!
static_assert (Constness::isConst == 0, "Attempt to write to a const pointer");
Pointer dest (*this);
if (source.getRawData() != getRawData() || source.getNumBytesBetweenSamples() >= getNumBytesBetweenSamples())
{
while (--numSamples >= 0)
{
Endianness::copyFrom (dest.data, source);
dest.advance();
++source;
}
}
else // copy backwards if we're increasing the sample width..
{
dest += numSamples;
source += numSamples;
while (--numSamples >= 0)
Endianness::copyFrom ((--dest).data, --source);
}
}
/** Sets a number of samples to zero. */
void clearSamples (int numSamples) const noexcept
{
Pointer dest (*this);
dest.clear (dest.data, numSamples);
}
/** Scans a block of data, returning the lowest and highest levels as floats */
Range<float> findMinAndMax (size_t numSamples) const noexcept
{
if (numSamples == 0)
return Range<float>();
Pointer dest (*this);
if (isFloatingPoint())
{
float mn = dest.getAsFloat();
dest.advance();
float mx = mn;
while (--numSamples > 0)
{
const float v = dest.getAsFloat();
dest.advance();
if (mx < v) mx = v;
if (v < mn) mn = v;
}
return Range<float> (mn, mx);
}
int32 mn = dest.getAsInt32();
dest.advance();
int32 mx = mn;
while (--numSamples > 0)
{
const int v = dest.getAsInt32();
dest.advance();
if (mx < v) mx = v;
if (v < mn) mn = v;
}
return Range<float> ((float) mn * (float) (1.0 / (1.0 + (double) Int32::maxValue)),
(float) mx * (float) (1.0 / (1.0 + (double) Int32::maxValue)));
}
/** Scans a block of data, returning the lowest and highest levels as floats */
void findMinAndMax (size_t numSamples, float& minValue, float& maxValue) const noexcept
{
Range<float> r (findMinAndMax (numSamples));
minValue = r.getStart();
maxValue = r.getEnd();
}
/** Returns true if the pointer is using a floating-point format. */
static bool isFloatingPoint() noexcept { return (bool) SampleFormat::isFloat; }
/** Returns true if the format is big-endian. */
static bool isBigEndian() noexcept { return (bool) Endianness::isBigEndian; }
/** Returns the number of bytes in each sample (ignoring the number of interleaved channels). */
static int getBytesPerSample() noexcept { return (int) SampleFormat::bytesPerSample; }
/** Returns the number of interleaved channels in the format. */
int getNumInterleavedChannels() const noexcept { return (int) this->numInterleavedChannels; }
/** Returns the number of bytes between the start address of each sample. */
int getNumBytesBetweenSamples() const noexcept { return InterleavingType::getNumBytesBetweenSamples (data); }
/** Returns the accuracy of this format when represented as a 32-bit integer.
This is the smallest number above 0 that can be represented in the sample format, converted to
a 32-bit range. E,g. if the format is 8-bit, its resolution is 0x01000000; if the format is 24-bit,
its resolution is 0x100.
*/
static int get32BitResolution() noexcept { return (int) SampleFormat::resolution; }
/** Returns a pointer to the underlying data. */
const void* getRawData() const noexcept { return data.data; }
private:
//==============================================================================
SampleFormat data;
inline void advance() noexcept { this->advanceData (data); }
Pointer operator++ (int); // private to force you to use the more efficient pre-increment!
Pointer operator-- (int);
};
//==============================================================================
/** A base class for objects that are used to convert between two different sample formats.
The AudioData::ConverterInstance implements this base class and can be templated, so
you can create an instance that converts between two particular formats, and then
store this in the abstract base class.
@see AudioData::ConverterInstance
*/
class Converter
{
public:
virtual ~Converter() = default;
/** Converts a sequence of samples from the converter's source format into the dest format. */
virtual void convertSamples (void* destSamples, const void* sourceSamples, int numSamples) const = 0;
/** Converts a sequence of samples from the converter's source format into the dest format.
This method takes sub-channel indexes, which can be used with interleaved formats in order to choose a
particular sub-channel of the data to be used.
*/
virtual void convertSamples (void* destSamples, int destSubChannel,
const void* sourceSamples, int sourceSubChannel, int numSamples) const = 0;
};
//==============================================================================
/**
A class that converts between two templated AudioData::Pointer types, and which
implements the AudioData::Converter interface.
This can be used as a concrete instance of the AudioData::Converter abstract class.
@see AudioData::Converter
*/
template <class SourceSampleType, class DestSampleType>
class ConverterInstance : public Converter
{
public:
ConverterInstance (int numSourceChannels = 1, int numDestChannels = 1)
: sourceChannels (numSourceChannels), destChannels (numDestChannels)
{}
void convertSamples (void* dest, const void* source, int numSamples) const override
{
SourceSampleType s (source, sourceChannels);
DestSampleType d (dest, destChannels);
d.convertSamples (s, numSamples);
}
void convertSamples (void* dest, int destSubChannel,
const void* source, int sourceSubChannel, int numSamples) const override
{
jassert (destSubChannel < destChannels && sourceSubChannel < sourceChannels);
SourceSampleType s (addBytesToPointer (source, sourceSubChannel * SourceSampleType::getBytesPerSample()), sourceChannels);
DestSampleType d (addBytesToPointer (dest, destSubChannel * DestSampleType::getBytesPerSample()), destChannels);
d.convertSamples (s, numSamples);
}
private:
JUCE_DECLARE_NON_COPYABLE (ConverterInstance)
const int sourceChannels, destChannels;
};
//==============================================================================
/** A struct that contains a SampleFormat and Endianness to be used with the source and
destination types when calling the interleaveSamples() and deinterleaveSamples() helpers.
@see interleaveSamples, deinterleaveSamples
*/
template <typename DataFormatIn, typename EndiannessIn>
struct Format
{
using DataFormat = DataFormatIn;
using Endianness = EndiannessIn;
};
private:
template <bool IsInterleaved, bool IsConst, typename...>
struct ChannelDataSubtypes;
template <bool IsInterleaved, bool IsConst, typename DataFormat, typename Endianness>
struct ChannelDataSubtypes<IsInterleaved, IsConst, DataFormat, Endianness>
{
using ElementType = std::remove_pointer_t<decltype (DataFormat::data)>;
using ChannelType = std::conditional_t<IsConst, const ElementType*, ElementType*>;
using DataType = std::conditional_t<IsInterleaved, ChannelType, ChannelType*>;
using PointerType = Pointer<DataFormat,
Endianness,
std::conditional_t<IsInterleaved, Interleaved, NonInterleaved>,
std::conditional_t<IsConst, Const, NonConst>>;
};
template <bool IsInterleaved, bool IsConst, typename DataFormat, typename Endianness>
struct ChannelDataSubtypes<IsInterleaved, IsConst, Format<DataFormat, Endianness>>
{
using Subtypes = ChannelDataSubtypes<IsInterleaved, IsConst, DataFormat, Endianness>;
using DataType = typename Subtypes::DataType;
using PointerType = typename Subtypes::PointerType;
};
template <bool IsInterleaved, bool IsConst, typename... Format>
struct ChannelData
{
using Subtypes = ChannelDataSubtypes<IsInterleaved, IsConst, Format...>;
using DataType = typename Subtypes::DataType;
using PointerType = typename Subtypes::PointerType;
DataType data;
int channels;
};
public:
//==============================================================================
/** A sequence of interleaved samples used as the source for the deinterleaveSamples() method. */
template <typename... Format> using InterleavedSource = ChannelData<true, true, Format...>;
/** A sequence of interleaved samples used as the destination for the interleaveSamples() method. */
template <typename... Format> using InterleavedDest = ChannelData<true, false, Format...>;
/** A sequence of non-interleaved samples used as the source for the interleaveSamples() method. */
template <typename... Format> using NonInterleavedSource = ChannelData<false, true, Format...>;
/** A sequence of non-interleaved samples used as the destination for the deinterleaveSamples() method. */
template <typename... Format> using NonInterleavedDest = ChannelData<false, false, Format...>;
/** A helper function for converting a sequence of samples from a non-interleaved source
to an interleaved destination.
When calling this method you need to specify the source and destination data format and endianness
from the AudioData SampleFormat and Endianness types and provide the data and number of channels
for each. For example, to convert a floating-point stream of big endian samples to an interleaved,
native endian stream of 16-bit integer samples you would do the following:
@code
using SourceFormat = AudioData::Format<AudioData::Float32, AudioData::BigEndian>;
using DestFormat = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
AudioData::interleaveSamples (AudioData::NonInterleavedSource<SourceFormat> { sourceData, numSourceChannels },
AudioData::InterleavedDest<DestFormat> { destData, numDestChannels },
numSamples);
@endcode
*/
template <typename... SourceFormat, typename... DestFormat>
static void interleaveSamples (NonInterleavedSource<SourceFormat...> source,
InterleavedDest<DestFormat...> dest,
int numSamples)
{
using SourceType = typename decltype (source)::PointerType;
using DestType = typename decltype (dest) ::PointerType;
for (int i = 0; i < dest.channels; ++i)
{
const DestType destType (addBytesToPointer (dest.data, i * DestType::getBytesPerSample()), dest.channels);
if (i < source.channels)
{
if (*source.data != nullptr)
{
destType.convertSamples (SourceType { *source.data }, numSamples);
++source.data;
}
}
else
{
destType.clearSamples (numSamples);
}
}
}
/** A helper function for converting a sequence of samples from an interleaved source
to a non-interleaved destination.
When calling this method you need to specify the source and destination data format and endianness
from the AudioData SampleFormat and Endianness types and provide the data and number of channels
for each. For example, to convert a floating-point stream of big endian samples to an non-interleaved,
native endian stream of 16-bit integer samples you would do the following:
@code
using SourceFormat = AudioData::Format<AudioData::Float32, AudioData::BigEndian>;
using DestFormat = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
AudioData::deinterleaveSamples (AudioData::InterleavedSource<SourceFormat> { sourceData, numSourceChannels },
AudioData::NonInterleavedDest<DestFormat> { destData, numDestChannels },
numSamples);
@endcode
*/
template <typename... SourceFormat, typename... DestFormat>
static void deinterleaveSamples (InterleavedSource<SourceFormat...> source,
NonInterleavedDest<DestFormat...> dest,
int numSamples)
{
using SourceType = typename decltype (source)::PointerType;
using DestType = typename decltype (dest) ::PointerType;
for (int i = 0; i < dest.channels; ++i)
{
if (auto* targetChan = dest.data[i])
{
const DestType destType (targetChan);
if (i < source.channels)
destType.convertSamples (SourceType (addBytesToPointer (source.data, i * SourceType::getBytesPerSample()), source.channels), numSamples);
else
destType.clearSamples (numSamples);
}
}
}
};
//==============================================================================
#ifndef DOXYGEN
/**
A set of routines to convert buffers of 32-bit floating point data to and from
various integer formats.
Note that these functions are deprecated - the AudioData class provides a much more
flexible set of conversion classes now.
@tags{Audio}
*/
class [[deprecated]] JUCE_API AudioDataConverters
{
public:
//==============================================================================
static void convertFloatToInt16LE (const float* source, void* dest, int numSamples, int destBytesPerSample = 2);
static void convertFloatToInt16BE (const float* source, void* dest, int numSamples, int destBytesPerSample = 2);
static void convertFloatToInt24LE (const float* source, void* dest, int numSamples, int destBytesPerSample = 3);
static void convertFloatToInt24BE (const float* source, void* dest, int numSamples, int destBytesPerSample = 3);
static void convertFloatToInt32LE (const float* source, void* dest, int numSamples, int destBytesPerSample = 4);
static void convertFloatToInt32BE (const float* source, void* dest, int numSamples, int destBytesPerSample = 4);
static void convertFloatToFloat32LE (const float* source, void* dest, int numSamples, int destBytesPerSample = 4);
static void convertFloatToFloat32BE (const float* source, void* dest, int numSamples, int destBytesPerSample = 4);
//==============================================================================
static void convertInt16LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 2);
static void convertInt16BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 2);
static void convertInt24LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 3);
static void convertInt24BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 3);
static void convertInt32LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 4);
static void convertInt32BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 4);
static void convertFloat32LEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 4);
static void convertFloat32BEToFloat (const void* source, float* dest, int numSamples, int srcBytesPerSample = 4);
//==============================================================================
enum DataFormat
{
int16LE,
int16BE,
int24LE,
int24BE,
int32LE,
int32BE,
float32LE,
float32BE,
};
static void convertFloatToFormat (DataFormat destFormat,
const float* source, void* dest, int numSamples);
static void convertFormatToFloat (DataFormat sourceFormat,
const void* source, float* dest, int numSamples);
//==============================================================================
static void interleaveSamples (const float** source, float* dest,
int numSamples, int numChannels);
static void deinterleaveSamples (const float* source, float** dest,
int numSamples, int numChannels);
private:
AudioDataConverters();
};
#endif
} // namespace juce

+ 0
- 99
libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioProcessLoadMeasurer.cpp View File

@@ -1,99 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
AudioProcessLoadMeasurer::AudioProcessLoadMeasurer() = default;
AudioProcessLoadMeasurer::~AudioProcessLoadMeasurer() = default;
void AudioProcessLoadMeasurer::reset()
{
reset (0, 0);
}
void AudioProcessLoadMeasurer::reset (double sampleRate, int blockSize)
{
const SpinLock::ScopedLockType lock (mutex);
cpuUsageProportion = 0;
xruns = 0;
samplesPerBlock = blockSize;
msPerSample = (sampleRate > 0.0 && blockSize > 0) ? 1000.0 / sampleRate : 0;
}
void AudioProcessLoadMeasurer::registerBlockRenderTime (double milliseconds)
{
const SpinLock::ScopedTryLockType lock (mutex);
if (lock.isLocked())
registerRenderTimeLocked (milliseconds, samplesPerBlock);
}
void AudioProcessLoadMeasurer::registerRenderTime (double milliseconds, int numSamples)
{
const SpinLock::ScopedTryLockType lock (mutex);
if (lock.isLocked())
registerRenderTimeLocked (milliseconds, numSamples);
}
void AudioProcessLoadMeasurer::registerRenderTimeLocked (double milliseconds, int numSamples)
{
if (msPerSample == 0)
return;
const auto maxMilliseconds = numSamples * msPerSample;
const auto usedProportion = milliseconds / maxMilliseconds;
const auto filterAmount = 0.2;
const auto proportion = cpuUsageProportion.load();
cpuUsageProportion = proportion + filterAmount * (usedProportion - proportion);
if (milliseconds > maxMilliseconds)
++xruns;
}
double AudioProcessLoadMeasurer::getLoadAsProportion() const { return jlimit (0.0, 1.0, cpuUsageProportion.load()); }
double AudioProcessLoadMeasurer::getLoadAsPercentage() const { return 100.0 * getLoadAsProportion(); }
int AudioProcessLoadMeasurer::getXRunCount() const { return xruns; }
AudioProcessLoadMeasurer::ScopedTimer::ScopedTimer (AudioProcessLoadMeasurer& p)
: ScopedTimer (p, p.samplesPerBlock)
{
}
AudioProcessLoadMeasurer::ScopedTimer::ScopedTimer (AudioProcessLoadMeasurer& p, int numSamplesInBlock)
: owner (p), startTime (Time::getMillisecondCounterHiRes()), samplesInBlock (numSamplesInBlock)
{
// numSamplesInBlock should never be zero. Did you remember to call AudioProcessLoadMeasurer::reset(),
// passing the expected samples per block?
jassert (numSamplesInBlock);
}
AudioProcessLoadMeasurer::ScopedTimer::~ScopedTimer()
{
owner.registerRenderTime (Time::getMillisecondCounterHiRes() - startTime, samplesInBlock);
}
} // namespace juce

+ 0
- 109
libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioProcessLoadMeasurer.h View File

@@ -1,109 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Maintains an ongoing measurement of the proportion of time which is being
spent inside an audio callback.
@tags{Audio}
*/
class JUCE_API AudioProcessLoadMeasurer
{
public:
/** */
AudioProcessLoadMeasurer();
/** Destructor. */
~AudioProcessLoadMeasurer();
//==============================================================================
/** Resets the state. */
void reset();
/** Resets the counter, in preparation for use with the given sample rate and block size. */
void reset (double sampleRate, int blockSize);
/** Returns the current load as a proportion 0 to 1.0 */
double getLoadAsProportion() const;
/** Returns the current load as a percentage 0 to 100.0 */
double getLoadAsPercentage() const;
/** Returns the number of over- (or under-) runs recorded since the state was reset. */
int getXRunCount() const;
//==============================================================================
/** This class measures the time between its construction and destruction and
adds it to an AudioProcessLoadMeasurer.
e.g.
@code
{
AudioProcessLoadMeasurer::ScopedTimer timer (myProcessLoadMeasurer);
myCallback->doTheCallback();
}
@endcode
@tags{Audio}
*/
struct JUCE_API ScopedTimer
{
ScopedTimer (AudioProcessLoadMeasurer&);
ScopedTimer (AudioProcessLoadMeasurer&, int numSamplesInBlock);
~ScopedTimer();
private:
AudioProcessLoadMeasurer& owner;
double startTime;
int samplesInBlock;
JUCE_DECLARE_NON_COPYABLE (ScopedTimer)
};
/** Can be called manually to add the time of a callback to the stats.
Normally you probably would never call this - it's simpler and more robust to
use a ScopedTimer to measure the time using an RAII pattern.
*/
void registerBlockRenderTime (double millisecondsTaken);
/** Can be called manually to add the time of a callback to the stats.
Normally you probably would never call this - it's simpler and more robust to
use a ScopedTimer to measure the time using an RAII pattern.
*/
void registerRenderTime (double millisecondsTaken, int numSamples);
private:
void registerRenderTimeLocked (double, int);
SpinLock mutex;
int samplesPerBlock = 0;
double msPerSample = 0;
std::atomic<double> cpuUsageProportion { 0 };
std::atomic<int> xruns { 0 };
};
} // namespace juce

+ 0
- 1279
libs/juce7/source/modules/juce_audio_basics/buffers/juce_AudioSampleBuffer.h
File diff suppressed because it is too large
View File


+ 0
- 1712
libs/juce7/source/modules/juce_audio_basics/buffers/juce_FloatVectorOperations.cpp
File diff suppressed because it is too large
View File


+ 0
- 269
libs/juce7/source/modules/juce_audio_basics/buffers/juce_FloatVectorOperations.h View File

@@ -1,269 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
#ifndef JUCE_SNAP_TO_ZERO
#if JUCE_INTEL
#define JUCE_SNAP_TO_ZERO(n) if (! (n < -1.0e-8f || n > 1.0e-8f)) n = 0;
#else
#define JUCE_SNAP_TO_ZERO(n) ignoreUnused (n)
#endif
#endif
class ScopedNoDenormals;
//==============================================================================
/**
A collection of simple vector operations on arrays of floating point numbers,
accelerated with SIMD instructions where possible, usually accessed from
the FloatVectorOperations class.
@code
float data[64];
// The following two function calls are equivalent:
FloatVectorOperationsBase<float, int>::clear (data, 64);
FloatVectorOperations::clear (data, 64);
@endcode
@see FloatVectorOperations
@tags{Audio}
*/
template <typename FloatType, typename CountType>
struct FloatVectorOperationsBase
{
/** Clears a vector of floating point numbers. */
static void JUCE_CALLTYPE clear (FloatType* dest, CountType numValues) noexcept;
/** Copies a repeated value into a vector of floating point numbers. */
static void JUCE_CALLTYPE fill (FloatType* dest, FloatType valueToFill, CountType numValues) noexcept;
/** Copies a vector of floating point numbers. */
static void JUCE_CALLTYPE copy (FloatType* dest, const FloatType* src, CountType numValues) noexcept;
/** Copies a vector of floating point numbers, multiplying each value by a given multiplier */
static void JUCE_CALLTYPE copyWithMultiply (FloatType* dest, const FloatType* src, FloatType multiplier, CountType numValues) noexcept;
/** Adds a fixed value to the destination values. */
static void JUCE_CALLTYPE add (FloatType* dest, FloatType amountToAdd, CountType numValues) noexcept;
/** Adds a fixed value to each source value and stores it in the destination array. */
static void JUCE_CALLTYPE add (FloatType* dest, const FloatType* src, FloatType amount, CountType numValues) noexcept;
/** Adds the source values to the destination values. */
static void JUCE_CALLTYPE add (FloatType* dest, const FloatType* src, CountType numValues) noexcept;
/** Adds each source1 value to the corresponding source2 value and stores the result in the destination array. */
static void JUCE_CALLTYPE add (FloatType* dest, const FloatType* src1, const FloatType* src2, CountType num) noexcept;
/** Subtracts the source values from the destination values. */
static void JUCE_CALLTYPE subtract (FloatType* dest, const FloatType* src, CountType numValues) noexcept;
/** Subtracts each source2 value from the corresponding source1 value and stores the result in the destination array. */
static void JUCE_CALLTYPE subtract (FloatType* dest, const FloatType* src1, const FloatType* src2, CountType num) noexcept;
/** Multiplies each source value by the given multiplier, then adds it to the destination value. */
static void JUCE_CALLTYPE addWithMultiply (FloatType* dest, const FloatType* src, FloatType multiplier, CountType numValues) noexcept;
/** Multiplies each source1 value by the corresponding source2 value, then adds it to the destination value. */
static void JUCE_CALLTYPE addWithMultiply (FloatType* dest, const FloatType* src1, const FloatType* src2, CountType num) noexcept;
/** Multiplies each source value by the given multiplier, then subtracts it to the destination value. */
static void JUCE_CALLTYPE subtractWithMultiply (FloatType* dest, const FloatType* src, FloatType multiplier, CountType numValues) noexcept;
/** Multiplies each source1 value by the corresponding source2 value, then subtracts it to the destination value. */
static void JUCE_CALLTYPE subtractWithMultiply (FloatType* dest, const FloatType* src1, const FloatType* src2, CountType num) noexcept;
/** Multiplies the destination values by the source values. */
static void JUCE_CALLTYPE multiply (FloatType* dest, const FloatType* src, CountType numValues) noexcept;
/** Multiplies each source1 value by the correspinding source2 value, then stores it in the destination array. */
static void JUCE_CALLTYPE multiply (FloatType* dest, const FloatType* src1, const FloatType* src2, CountType numValues) noexcept;
/** Multiplies each of the destination values by a fixed multiplier. */
static void JUCE_CALLTYPE multiply (FloatType* dest, FloatType multiplier, CountType numValues) noexcept;
/** Multiplies each of the source values by a fixed multiplier and stores the result in the destination array. */
static void JUCE_CALLTYPE multiply (FloatType* dest, const FloatType* src, FloatType multiplier, CountType num) noexcept;
/** Copies a source vector to a destination, negating each value. */
static void JUCE_CALLTYPE negate (FloatType* dest, const FloatType* src, CountType numValues) noexcept;
/** Copies a source vector to a destination, taking the absolute of each value. */
static void JUCE_CALLTYPE abs (FloatType* dest, const FloatType* src, CountType numValues) noexcept;
/** Each element of dest will be the minimum of the corresponding element of the source array and the given comp value. */
static void JUCE_CALLTYPE min (FloatType* dest, const FloatType* src, FloatType comp, CountType num) noexcept;
/** Each element of dest will be the minimum of the corresponding source1 and source2 values. */
static void JUCE_CALLTYPE min (FloatType* dest, const FloatType* src1, const FloatType* src2, CountType num) noexcept;
/** Each element of dest will be the maximum of the corresponding element of the source array and the given comp value. */
static void JUCE_CALLTYPE max (FloatType* dest, const FloatType* src, FloatType comp, CountType num) noexcept;
/** Each element of dest will be the maximum of the corresponding source1 and source2 values. */
static void JUCE_CALLTYPE max (FloatType* dest, const FloatType* src1, const FloatType* src2, CountType num) noexcept;
/** Each element of dest is calculated by hard clipping the corresponding src element so that it is in the range specified by the arguments low and high. */
static void JUCE_CALLTYPE clip (FloatType* dest, const FloatType* src, FloatType low, FloatType high, CountType num) noexcept;
/** Finds the minimum and maximum values in the given array. */
static Range<FloatType> JUCE_CALLTYPE findMinAndMax (const FloatType* src, CountType numValues) noexcept;
/** Finds the minimum value in the given array. */
static FloatType JUCE_CALLTYPE findMinimum (const FloatType* src, CountType numValues) noexcept;
/** Finds the maximum value in the given array. */
static FloatType JUCE_CALLTYPE findMaximum (const FloatType* src, CountType numValues) noexcept;
};
#if ! DOXYGEN
namespace detail
{
template <typename...>
struct NameForwarder;
template <typename Head>
struct NameForwarder<Head> : Head {};
template <typename Head, typename... Tail>
struct NameForwarder<Head, Tail...> : Head, NameForwarder<Tail...>
{
using Head::clear;
using NameForwarder<Tail...>::clear;
using Head::fill;
using NameForwarder<Tail...>::fill;
using Head::copy;
using NameForwarder<Tail...>::copy;
using Head::copyWithMultiply;
using NameForwarder<Tail...>::copyWithMultiply;
using Head::add;
using NameForwarder<Tail...>::add;
using Head::subtract;
using NameForwarder<Tail...>::subtract;
using Head::addWithMultiply;
using NameForwarder<Tail...>::addWithMultiply;
using Head::subtractWithMultiply;
using NameForwarder<Tail...>::subtractWithMultiply;
using Head::multiply;
using NameForwarder<Tail...>::multiply;
using Head::negate;
using NameForwarder<Tail...>::negate;
using Head::abs;
using NameForwarder<Tail...>::abs;
using Head::min;
using NameForwarder<Tail...>::min;
using Head::max;
using NameForwarder<Tail...>::max;
using Head::clip;
using NameForwarder<Tail...>::clip;
using Head::findMinAndMax;
using NameForwarder<Tail...>::findMinAndMax;
using Head::findMinimum;
using NameForwarder<Tail...>::findMinimum;
using Head::findMaximum;
using NameForwarder<Tail...>::findMaximum;
};
} // namespace detail
#endif
//==============================================================================
/**
A collection of simple vector operations on arrays of floating point numbers,
accelerated with SIMD instructions where possible and providing all methods
from FloatVectorOperationsBase.
@see FloatVectorOperationsBase
@tags{Audio}
*/
class JUCE_API FloatVectorOperations : public detail::NameForwarder<FloatVectorOperationsBase<float, int>,
FloatVectorOperationsBase<float, size_t>,
FloatVectorOperationsBase<double, int>,
FloatVectorOperationsBase<double, size_t>>
{
public:
static void JUCE_CALLTYPE convertFixedToFloat (float* dest, const int* src, float multiplier, int num) noexcept;
static void JUCE_CALLTYPE convertFixedToFloat (float* dest, const int* src, float multiplier, size_t num) noexcept;
/** This method enables or disables the SSE/NEON flush-to-zero mode. */
static void JUCE_CALLTYPE enableFlushToZeroMode (bool shouldEnable) noexcept;
/** On Intel CPUs, this method enables the SSE flush-to-zero and denormalised-are-zero modes.
This effectively sets the DAZ and FZ bits of the MXCSR register. On arm CPUs this will
enable flush to zero mode.
It's a convenient thing to call before audio processing code where you really want to
avoid denormalisation performance hits.
*/
static void JUCE_CALLTYPE disableDenormalisedNumberSupport (bool shouldDisable = true) noexcept;
/** This method returns true if denormals are currently disabled. */
static bool JUCE_CALLTYPE areDenormalsDisabled() noexcept;
private:
friend ScopedNoDenormals;
static intptr_t JUCE_CALLTYPE getFpStatusRegister() noexcept;
static void JUCE_CALLTYPE setFpStatusRegister (intptr_t) noexcept;
};
//==============================================================================
/**
Helper class providing an RAII-based mechanism for temporarily disabling
denormals on your CPU.
@tags{Audio}
*/
class ScopedNoDenormals
{
public:
ScopedNoDenormals() noexcept;
~ScopedNoDenormals() noexcept;
private:
#if JUCE_USE_SSE_INTRINSICS || (JUCE_USE_ARM_NEON || defined (__arm64__) || defined (__aarch64__))
intptr_t fpsr;
#endif
};
} // namespace juce

+ 0
- 99
libs/juce7/source/modules/juce_audio_basics/juce_audio_basics.cpp View File

@@ -1,99 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifdef JUCE_AUDIO_BASICS_H_INCLUDED
/* When you add this cpp file to your project, you mustn't include it in a file where you've
already included any other headers - just put it inside a file on its own, possibly with your config
flags preceding it, but don't include anything else. That also includes avoiding any automatic prefix
header files that the compiler may be using.
*/
#error "Incorrect use of JUCE cpp file"
#endif
#include "juce_audio_basics.h"
#if JUCE_MINGW && ! defined (alloca)
#define alloca __builtin_alloca
#endif
#if JUCE_USE_SSE_INTRINSICS
#include <emmintrin.h>
#endif
#ifndef JUCE_USE_VDSP_FRAMEWORK
#define JUCE_USE_VDSP_FRAMEWORK 1
#endif
#if (JUCE_MAC || JUCE_IOS) && JUCE_USE_VDSP_FRAMEWORK
#include <Accelerate/Accelerate.h>
#else
#undef JUCE_USE_VDSP_FRAMEWORK
#endif
#if JUCE_USE_ARM_NEON
#include <arm_neon.h>
#endif
#include "buffers/juce_AudioDataConverters.cpp"
#include "buffers/juce_FloatVectorOperations.cpp"
#include "buffers/juce_AudioChannelSet.cpp"
#include "buffers/juce_AudioProcessLoadMeasurer.cpp"
#include "utilities/juce_IIRFilter.cpp"
#include "utilities/juce_LagrangeInterpolator.cpp"
#include "utilities/juce_WindowedSincInterpolator.cpp"
#include "utilities/juce_Interpolators.cpp"
#include "utilities/juce_SmoothedValue.cpp"
#include "midi/juce_MidiBuffer.cpp"
#include "midi/juce_MidiFile.cpp"
#include "midi/juce_MidiKeyboardState.cpp"
#include "midi/juce_MidiMessage.cpp"
#include "midi/juce_MidiMessageSequence.cpp"
#include "midi/juce_MidiRPN.cpp"
#include "mpe/juce_MPEValue.cpp"
#include "mpe/juce_MPENote.cpp"
#include "mpe/juce_MPEZoneLayout.cpp"
#include "mpe/juce_MPEInstrument.cpp"
#include "mpe/juce_MPEMessages.cpp"
#include "mpe/juce_MPESynthesiserBase.cpp"
#include "mpe/juce_MPESynthesiserVoice.cpp"
#include "mpe/juce_MPESynthesiser.cpp"
#include "mpe/juce_MPEUtils.cpp"
#include "sources/juce_BufferingAudioSource.cpp"
#include "sources/juce_ChannelRemappingAudioSource.cpp"
#include "sources/juce_IIRFilterAudioSource.cpp"
#include "sources/juce_MemoryAudioSource.cpp"
#include "sources/juce_MixerAudioSource.cpp"
#include "sources/juce_ResamplingAudioSource.cpp"
#include "sources/juce_ReverbAudioSource.cpp"
#include "sources/juce_ToneGeneratorAudioSource.cpp"
#include "synthesisers/juce_Synthesiser.cpp"
#include "midi/ump/juce_UMP.h"
#include "midi/ump/juce_UMPUtils.cpp"
#include "midi/ump/juce_UMPView.cpp"
#include "midi/ump/juce_UMPSysEx7.cpp"
#include "midi/ump/juce_UMPMidi1ToMidi2DefaultTranslator.cpp"
#if JUCE_UNIT_TESTS
#include "utilities/juce_ADSR_test.cpp"
#include "midi/ump/juce_UMP_test.cpp"
#endif

+ 0
- 123
libs/juce7/source/modules/juce_audio_basics/juce_audio_basics.h View File

@@ -1,123 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
/*******************************************************************************
The block below describes the properties of this module, and is read by
the Projucer to automatically generate project code that uses it.
For details about the syntax and how to create or use a module, see the
JUCE Module Format.md file.
BEGIN_JUCE_MODULE_DECLARATION
ID: juce_audio_basics
vendor: juce
version: 7.0.1
name: JUCE audio and MIDI data classes
description: Classes for audio buffer manipulation, midi message handling, synthesis, etc.
website: http://www.juce.com/juce
license: ISC
minimumCppStandard: 14
dependencies: juce_core
OSXFrameworks: Accelerate
iOSFrameworks: Accelerate
END_JUCE_MODULE_DECLARATION
*******************************************************************************/
#pragma once
#define JUCE_AUDIO_BASICS_H_INCLUDED
#include <juce_core/juce_core.h>
//==============================================================================
#undef Complex // apparently some C libraries actually define these symbols (!)
#undef Factor
//==============================================================================
#if JUCE_MINGW && ! defined (__SSE2__)
#define JUCE_USE_SSE_INTRINSICS 0
#endif
#ifndef JUCE_USE_SSE_INTRINSICS
#define JUCE_USE_SSE_INTRINSICS 1
#endif
#if ! JUCE_INTEL
#undef JUCE_USE_SSE_INTRINSICS
#endif
#if __ARM_NEON__ && ! (JUCE_USE_VDSP_FRAMEWORK || defined (JUCE_USE_ARM_NEON))
#define JUCE_USE_ARM_NEON 1
#endif
#if TARGET_IPHONE_SIMULATOR
#ifdef JUCE_USE_ARM_NEON
#undef JUCE_USE_ARM_NEON
#endif
#define JUCE_USE_ARM_NEON 0
#endif
//==============================================================================
#include "buffers/juce_AudioDataConverters.h"
#include "buffers/juce_FloatVectorOperations.h"
#include "buffers/juce_AudioSampleBuffer.h"
#include "buffers/juce_AudioChannelSet.h"
#include "buffers/juce_AudioProcessLoadMeasurer.h"
#include "utilities/juce_Decibels.h"
#include "utilities/juce_IIRFilter.h"
#include "utilities/juce_GenericInterpolator.h"
#include "utilities/juce_Interpolators.h"
#include "utilities/juce_SmoothedValue.h"
#include "utilities/juce_Reverb.h"
#include "utilities/juce_ADSR.h"
#include "midi/juce_MidiMessage.h"
#include "midi/juce_MidiBuffer.h"
#include "midi/juce_MidiMessageSequence.h"
#include "midi/juce_MidiFile.h"
#include "midi/juce_MidiKeyboardState.h"
#include "midi/juce_MidiRPN.h"
#include "mpe/juce_MPEValue.h"
#include "mpe/juce_MPENote.h"
#include "mpe/juce_MPEZoneLayout.h"
#include "mpe/juce_MPEInstrument.h"
#include "mpe/juce_MPEMessages.h"
#include "mpe/juce_MPESynthesiserBase.h"
#include "mpe/juce_MPESynthesiserVoice.h"
#include "mpe/juce_MPESynthesiser.h"
#include "mpe/juce_MPEUtils.h"
#include "sources/juce_AudioSource.h"
#include "sources/juce_PositionableAudioSource.h"
#include "sources/juce_BufferingAudioSource.h"
#include "sources/juce_ChannelRemappingAudioSource.h"
#include "sources/juce_IIRFilterAudioSource.h"
#include "sources/juce_MemoryAudioSource.h"
#include "sources/juce_MixerAudioSource.h"
#include "sources/juce_ResamplingAudioSource.h"
#include "sources/juce_ReverbAudioSource.h"
#include "sources/juce_ToneGeneratorAudioSource.h"
#include "synthesisers/juce_Synthesiser.h"
#include "audio_play_head/juce_AudioPlayHead.h"

+ 0
- 23
libs/juce7/source/modules/juce_audio_basics/juce_audio_basics.mm View File

@@ -1,23 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#include "juce_audio_basics.cpp"

+ 0
- 320
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiBuffer.cpp View File

@@ -1,320 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace MidiBufferHelpers
{
inline int getEventTime (const void* d) noexcept
{
return readUnaligned<int32> (d);
}
inline uint16 getEventDataSize (const void* d) noexcept
{
return readUnaligned<uint16> (static_cast<const char*> (d) + sizeof (int32));
}
inline uint16 getEventTotalSize (const void* d) noexcept
{
return (uint16) (getEventDataSize (d) + sizeof (int32) + sizeof (uint16));
}
static int findActualEventLength (const uint8* data, int maxBytes) noexcept
{
auto byte = (unsigned int) *data;
if (byte == 0xf0 || byte == 0xf7)
{
int i = 1;
while (i < maxBytes)
if (data[i++] == 0xf7)
break;
return i;
}
if (byte == 0xff)
{
if (maxBytes == 1)
return 1;
const auto var = MidiMessage::readVariableLengthValue (data + 1, maxBytes - 1);
return jmin (maxBytes, var.value + 2 + var.bytesUsed);
}
if (byte >= 0x80)
return jmin (maxBytes, MidiMessage::getMessageLengthFromFirstByte ((uint8) byte));
return 0;
}
static uint8* findEventAfter (uint8* d, uint8* endData, int samplePosition) noexcept
{
while (d < endData && getEventTime (d) <= samplePosition)
d += getEventTotalSize (d);
return d;
}
}
//==============================================================================
MidiBufferIterator& MidiBufferIterator::operator++() noexcept
{
data += sizeof (int32) + sizeof (uint16) + size_t (MidiBufferHelpers::getEventDataSize (data));
return *this;
}
MidiBufferIterator MidiBufferIterator::operator++ (int) noexcept
{
auto copy = *this;
++(*this);
return copy;
}
MidiBufferIterator::reference MidiBufferIterator::operator*() const noexcept
{
return { data + sizeof (int32) + sizeof (uint16),
MidiBufferHelpers::getEventDataSize (data),
MidiBufferHelpers::getEventTime (data) };
}
//==============================================================================
MidiBuffer::MidiBuffer (const MidiMessage& message) noexcept
{
addEvent (message, 0);
}
void MidiBuffer::swapWith (MidiBuffer& other) noexcept { data.swapWith (other.data); }
void MidiBuffer::clear() noexcept { data.clearQuick(); }
void MidiBuffer::ensureSize (size_t minimumNumBytes) { data.ensureStorageAllocated ((int) minimumNumBytes); }
bool MidiBuffer::isEmpty() const noexcept { return data.size() == 0; }
void MidiBuffer::clear (int startSample, int numSamples)
{
auto start = MidiBufferHelpers::findEventAfter (data.begin(), data.end(), startSample - 1);
auto end = MidiBufferHelpers::findEventAfter (start, data.end(), startSample + numSamples - 1);
data.removeRange ((int) (start - data.begin()), (int) (end - start));
}
bool MidiBuffer::addEvent (const MidiMessage& m, int sampleNumber)
{
return addEvent (m.getRawData(), m.getRawDataSize(), sampleNumber);
}
bool MidiBuffer::addEvent (const void* newData, int maxBytes, int sampleNumber)
{
auto numBytes = MidiBufferHelpers::findActualEventLength (static_cast<const uint8*> (newData), maxBytes);
if (numBytes <= 0)
return true;
if (std::numeric_limits<uint16>::max() < numBytes)
{
// This method only supports messages smaller than (1 << 16) bytes
return false;
}
auto newItemSize = (size_t) numBytes + sizeof (int32) + sizeof (uint16);
auto offset = (int) (MidiBufferHelpers::findEventAfter (data.begin(), data.end(), sampleNumber) - data.begin());
data.insertMultiple (offset, 0, (int) newItemSize);
auto* d = data.begin() + offset;
writeUnaligned<int32> (d, sampleNumber);
d += sizeof (int32);
writeUnaligned<uint16> (d, static_cast<uint16> (numBytes));
d += sizeof (uint16);
memcpy (d, newData, (size_t) numBytes);
return true;
}
void MidiBuffer::addEvents (const MidiBuffer& otherBuffer,
int startSample, int numSamples, int sampleDeltaToAdd)
{
for (auto i = otherBuffer.findNextSamplePosition (startSample); i != otherBuffer.cend(); ++i)
{
const auto metadata = *i;
if (metadata.samplePosition >= startSample + numSamples && numSamples >= 0)
break;
addEvent (metadata.data, metadata.numBytes, metadata.samplePosition + sampleDeltaToAdd);
}
}
int MidiBuffer::getNumEvents() const noexcept
{
int n = 0;
auto end = data.end();
for (auto d = data.begin(); d < end; ++n)
d += MidiBufferHelpers::getEventTotalSize (d);
return n;
}
int MidiBuffer::getFirstEventTime() const noexcept
{
return data.size() > 0 ? MidiBufferHelpers::getEventTime (data.begin()) : 0;
}
int MidiBuffer::getLastEventTime() const noexcept
{
if (data.size() == 0)
return 0;
auto endData = data.end();
for (auto d = data.begin();;)
{
auto nextOne = d + MidiBufferHelpers::getEventTotalSize (d);
if (nextOne >= endData)
return MidiBufferHelpers::getEventTime (d);
d = nextOne;
}
}
MidiBufferIterator MidiBuffer::findNextSamplePosition (int samplePosition) const noexcept
{
return std::find_if (cbegin(), cend(), [&] (const MidiMessageMetadata& metadata) noexcept
{
return metadata.samplePosition >= samplePosition;
});
}
//==============================================================================
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wdeprecated-declarations")
JUCE_BEGIN_IGNORE_WARNINGS_MSVC (4996)
MidiBuffer::Iterator::Iterator (const MidiBuffer& b) noexcept
: buffer (b), iterator (b.data.begin())
{
}
void MidiBuffer::Iterator::setNextSamplePosition (int samplePosition) noexcept
{
iterator = buffer.findNextSamplePosition (samplePosition);
}
bool MidiBuffer::Iterator::getNextEvent (const uint8*& midiData, int& numBytes, int& samplePosition) noexcept
{
if (iterator == buffer.cend())
return false;
const auto metadata = *iterator++;
midiData = metadata.data;
numBytes = metadata.numBytes;
samplePosition = metadata.samplePosition;
return true;
}
bool MidiBuffer::Iterator::getNextEvent (MidiMessage& result, int& samplePosition) noexcept
{
if (iterator == buffer.cend())
return false;
const auto metadata = *iterator++;
result = metadata.getMessage();
samplePosition = metadata.samplePosition;
return true;
}
JUCE_END_IGNORE_WARNINGS_MSVC
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
struct MidiBufferTest : public UnitTest
{
MidiBufferTest()
: UnitTest ("MidiBuffer", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("Clear messages");
{
const auto message = MidiMessage::noteOn (1, 64, 0.5f);
const auto testBuffer = [&]
{
MidiBuffer buffer;
buffer.addEvent (message, 0);
buffer.addEvent (message, 10);
buffer.addEvent (message, 20);
buffer.addEvent (message, 30);
return buffer;
}();
{
auto buffer = testBuffer;
buffer.clear (10, 0);
expectEquals (buffer.getNumEvents(), 4);
}
{
auto buffer = testBuffer;
buffer.clear (10, 1);
expectEquals (buffer.getNumEvents(), 3);
}
{
auto buffer = testBuffer;
buffer.clear (10, 10);
expectEquals (buffer.getNumEvents(), 3);
}
{
auto buffer = testBuffer;
buffer.clear (10, 20);
expectEquals (buffer.getNumEvents(), 2);
}
{
auto buffer = testBuffer;
buffer.clear (10, 30);
expectEquals (buffer.getNumEvents(), 1);
}
{
auto buffer = testBuffer;
buffer.clear (10, 300);
expectEquals (buffer.getNumEvents(), 1);
}
}
}
};
static MidiBufferTest midiBufferTest;
#endif
} // namespace juce

+ 0
- 345
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiBuffer.h View File

@@ -1,345 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
A view of MIDI message data stored in a contiguous buffer.
Instances of this class do *not* own the midi data bytes that they point to.
Instead, they expect the midi data to live in a separate buffer that outlives
the MidiMessageMetadata instance.
@tags{Audio}
*/
struct MidiMessageMetadata final
{
MidiMessageMetadata() noexcept = default;
MidiMessageMetadata (const uint8* dataIn, int numBytesIn, int positionIn) noexcept
: data (dataIn), numBytes (numBytesIn), samplePosition (positionIn)
{
}
/** Constructs a new MidiMessage instance from the data that this object is viewing.
Note that MidiMessage owns its data storage, whereas MidiMessageMetadata does not.
*/
MidiMessage getMessage() const { return MidiMessage (data, numBytes, samplePosition); }
/** Pointer to the first byte of a MIDI message. */
const uint8* data = nullptr;
/** The number of bytes in the MIDI message. */
int numBytes = 0;
/** The MIDI message's timestamp. */
int samplePosition = 0;
};
//==============================================================================
/**
An iterator to move over contiguous raw MIDI data, which Allows iterating
over a MidiBuffer using C++11 range-for syntax.
In the following example, we log all three-byte messages in a midi buffer.
@code
void processBlock (AudioBuffer<float>&, MidiBuffer& midiBuffer) override
{
for (const MidiMessageMetadata metadata : midiBuffer)
if (metadata.numBytes == 3)
Logger::writeToLog (metadata.getMessage().getDescription());
}
@endcode
@tags{Audio}
*/
class JUCE_API MidiBufferIterator
{
using Ptr = const uint8*;
public:
MidiBufferIterator() = default;
/** Constructs an iterator pointing at the message starting at the byte `dataIn`.
`dataIn` must point to the start of a valid MIDI message. If it does not,
calling other member functions on the iterator will result in undefined
behaviour.
*/
explicit MidiBufferIterator (const uint8* dataIn) noexcept
: data (dataIn)
{
}
using difference_type = std::iterator_traits<Ptr>::difference_type;
using value_type = MidiMessageMetadata;
using reference = MidiMessageMetadata;
using pointer = void;
using iterator_category = std::input_iterator_tag;
/** Make this iterator point to the next message in the buffer. */
MidiBufferIterator& operator++() noexcept;
/** Create a copy of this object, make this iterator point to the next message in
the buffer, then return the copy.
*/
MidiBufferIterator operator++ (int) noexcept;
/** Return true if this iterator points to the same message as another
iterator instance, otherwise return false.
*/
bool operator== (const MidiBufferIterator& other) const noexcept { return data == other.data; }
/** Return false if this iterator points to the same message as another
iterator instance, otherwise returns true.
*/
bool operator!= (const MidiBufferIterator& other) const noexcept { return ! operator== (other); }
/** Return an instance of MidiMessageMetadata which describes the message to which
the iterator is currently pointing.
*/
reference operator*() const noexcept;
private:
Ptr data = nullptr;
};
//==============================================================================
/**
Holds a sequence of time-stamped midi events.
Analogous to the AudioBuffer, this holds a set of midi events with
integer time-stamps. The buffer is kept sorted in order of the time-stamps.
If you're working with a sequence of midi events that may need to be manipulated
or read/written to a midi file, then MidiMessageSequence is probably a more
appropriate container. MidiBuffer is designed for lower-level streams of raw
midi data.
@see MidiMessage
@tags{Audio}
*/
class JUCE_API MidiBuffer
{
public:
//==============================================================================
/** Creates an empty MidiBuffer. */
MidiBuffer() noexcept = default;
/** Creates a MidiBuffer containing a single midi message. */
explicit MidiBuffer (const MidiMessage& message) noexcept;
//==============================================================================
/** Removes all events from the buffer. */
void clear() noexcept;
/** Removes all events between two times from the buffer.
All events for which (start <= event position < start + numSamples) will
be removed.
*/
void clear (int start, int numSamples);
/** Returns true if the buffer is empty.
To actually retrieve the events, use a MidiBufferIterator object
*/
bool isEmpty() const noexcept;
/** Counts the number of events in the buffer.
This is actually quite a slow operation, as it has to iterate through all
the events, so you might prefer to call isEmpty() if that's all you need
to know.
*/
int getNumEvents() const noexcept;
/** Adds an event to the buffer.
The sample number will be used to determine the position of the event in
the buffer, which is always kept sorted. The MidiMessage's timestamp is
ignored.
If an event is added whose sample position is the same as one or more events
already in the buffer, the new event will be placed after the existing ones.
To retrieve events, use a MidiBufferIterator object.
Returns true on success, or false on failure.
*/
bool addEvent (const MidiMessage& midiMessage, int sampleNumber);
/** Adds an event to the buffer from raw midi data.
The sample number will be used to determine the position of the event in
the buffer, which is always kept sorted.
If an event is added whose sample position is the same as one or more events
already in the buffer, the new event will be placed after the existing ones.
The event data will be inspected to calculate the number of bytes in length that
the midi event really takes up, so maxBytesOfMidiData may be longer than the data
that actually gets stored. E.g. if you pass in a note-on and a length of 4 bytes,
it'll actually only store 3 bytes. If the midi data is invalid, it might not
add an event at all.
To retrieve events, use a MidiBufferIterator object.
Returns true on success, or false on failure.
*/
bool addEvent (const void* rawMidiData,
int maxBytesOfMidiData,
int sampleNumber);
/** Adds some events from another buffer to this one.
@param otherBuffer the buffer containing the events you want to add
@param startSample the lowest sample number in the source buffer for which
events should be added. Any source events whose timestamp is
less than this will be ignored
@param numSamples the valid range of samples from the source buffer for which
events should be added - i.e. events in the source buffer whose
timestamp is greater than or equal to (startSample + numSamples)
will be ignored. If this value is less than 0, all events after
startSample will be taken.
@param sampleDeltaToAdd a value which will be added to the source timestamps of the events
that are added to this buffer
*/
void addEvents (const MidiBuffer& otherBuffer,
int startSample,
int numSamples,
int sampleDeltaToAdd);
/** Returns the sample number of the first event in the buffer.
If the buffer's empty, this will just return 0.
*/
int getFirstEventTime() const noexcept;
/** Returns the sample number of the last event in the buffer.
If the buffer's empty, this will just return 0.
*/
int getLastEventTime() const noexcept;
//==============================================================================
/** Exchanges the contents of this buffer with another one.
This is a quick operation, because no memory allocating or copying is done, it
just swaps the internal state of the two buffers.
*/
void swapWith (MidiBuffer&) noexcept;
/** Preallocates some memory for the buffer to use.
This helps to avoid needing to reallocate space when the buffer has messages
added to it.
*/
void ensureSize (size_t minimumNumBytes);
/** Get a read-only iterator pointing to the beginning of this buffer. */
MidiBufferIterator begin() const noexcept { return cbegin(); }
/** Get a read-only iterator pointing one past the end of this buffer. */
MidiBufferIterator end() const noexcept { return cend(); }
/** Get a read-only iterator pointing to the beginning of this buffer. */
MidiBufferIterator cbegin() const noexcept { return MidiBufferIterator (data.begin()); }
/** Get a read-only iterator pointing one past the end of this buffer. */
MidiBufferIterator cend() const noexcept { return MidiBufferIterator (data.end()); }
/** Get an iterator pointing to the first event with a timestamp greater-than or
equal-to `samplePosition`.
*/
MidiBufferIterator findNextSamplePosition (int samplePosition) const noexcept;
//==============================================================================
#ifndef DOXYGEN
/** This class is now deprecated in favour of MidiBufferIterator.
Used to iterate through the events in a MidiBuffer.
Note that altering the buffer while an iterator is using it will produce
undefined behaviour.
@see MidiBuffer
*/
class [[deprecated]] JUCE_API Iterator
{
public:
//==============================================================================
/** Creates an Iterator for this MidiBuffer. */
Iterator (const MidiBuffer& b) noexcept;
//==============================================================================
/** Repositions the iterator so that the next event retrieved will be the first
one whose sample position is at greater than or equal to the given position.
*/
void setNextSamplePosition (int samplePosition) noexcept;
/** Retrieves a copy of the next event from the buffer.
@param result on return, this will be the message. The MidiMessage's timestamp
is set to the same value as samplePosition.
@param samplePosition on return, this will be the position of the event, as a
sample index in the buffer
@returns true if an event was found, or false if the iterator has reached
the end of the buffer
*/
bool getNextEvent (MidiMessage& result,
int& samplePosition) noexcept;
/** Retrieves the next event from the buffer.
@param midiData on return, this pointer will be set to a block of data containing
the midi message. Note that to make it fast, this is a pointer
directly into the MidiBuffer's internal data, so is only valid
temporarily until the MidiBuffer is altered.
@param numBytesOfMidiData on return, this is the number of bytes of data used by the
midi message
@param samplePosition on return, this will be the position of the event, as a
sample index in the buffer
@returns true if an event was found, or false if the iterator has reached
the end of the buffer
*/
bool getNextEvent (const uint8* &midiData,
int& numBytesOfMidiData,
int& samplePosition) noexcept;
private:
//==============================================================================
const MidiBuffer& buffer;
MidiBufferIterator iterator;
};
#endif
/** The raw data holding this buffer.
Obviously access to this data is provided at your own risk. Its internal format could
change in future, so don't write code that relies on it!
*/
Array<uint8> data;
private:
JUCE_LEAK_DETECTOR (MidiBuffer)
};
} // namespace juce

+ 0
- 188
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiDataConcatenator.h View File

@@ -1,188 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Helper class that takes chunks of incoming midi bytes, packages them into
messages, and dispatches them to a midi callback.
@tags{Audio}
*/
class MidiDataConcatenator
{
public:
MidiDataConcatenator (int initialBufferSize)
: pendingSysexData ((size_t) initialBufferSize)
{
}
void reset()
{
currentMessageLen = 0;
pendingSysexSize = 0;
pendingSysexTime = 0;
}
template <typename UserDataType, typename CallbackType>
void pushMidiData (const void* inputData, int numBytes, double time,
UserDataType* input, CallbackType& callback)
{
auto d = static_cast<const uint8*> (inputData);
while (numBytes > 0)
{
auto nextByte = *d;
if (pendingSysexSize != 0 || nextByte == 0xf0)
{
processSysex (d, numBytes, time, input, callback);
currentMessageLen = 0;
continue;
}
++d;
--numBytes;
if (isRealtimeMessage (nextByte))
{
callback.handleIncomingMidiMessage (input, MidiMessage (nextByte, time));
// These can be embedded in the middle of a normal message, so we won't
// reset the currentMessageLen here.
continue;
}
if (isInitialByte (nextByte))
{
currentMessage[0] = nextByte;
currentMessageLen = 1;
}
else if (currentMessageLen > 0 && currentMessageLen < 3)
{
currentMessage[currentMessageLen++] = nextByte;
}
else
{
// message is too long or invalid MIDI - abandon it and start again with the next byte
currentMessageLen = 0;
continue;
}
auto expectedLength = MidiMessage::getMessageLengthFromFirstByte (currentMessage[0]);
if (expectedLength == currentMessageLen)
{
callback.handleIncomingMidiMessage (input, MidiMessage (currentMessage, expectedLength, time));
currentMessageLen = 1; // reset, but leave the first byte to use as the running status byte
}
}
}
private:
template <typename UserDataType, typename CallbackType>
void processSysex (const uint8*& d, int& numBytes, double time,
UserDataType* input, CallbackType& callback)
{
if (*d == 0xf0)
{
pendingSysexSize = 0;
pendingSysexTime = time;
}
pendingSysexData.ensureSize ((size_t) (pendingSysexSize + numBytes), false);
auto totalMessage = static_cast<uint8*> (pendingSysexData.getData());
auto dest = totalMessage + pendingSysexSize;
do
{
if (pendingSysexSize > 0 && isStatusByte (*d))
{
if (*d == 0xf7)
{
*dest++ = *d++;
++pendingSysexSize;
--numBytes;
break;
}
if (*d >= 0xfa || *d == 0xf8)
{
callback.handleIncomingMidiMessage (input, MidiMessage (*d, time));
++d;
--numBytes;
}
else
{
pendingSysexSize = 0;
int used = 0;
const MidiMessage m (d, numBytes, used, 0, time);
if (used > 0)
{
callback.handleIncomingMidiMessage (input, m);
numBytes -= used;
d += used;
}
break;
}
}
else
{
*dest++ = *d++;
++pendingSysexSize;
--numBytes;
}
}
while (numBytes > 0);
if (pendingSysexSize > 0)
{
if (totalMessage [pendingSysexSize - 1] == 0xf7)
{
callback.handleIncomingMidiMessage (input, MidiMessage (totalMessage, pendingSysexSize, pendingSysexTime));
pendingSysexSize = 0;
}
else
{
callback.handlePartialSysexMessage (input, totalMessage, pendingSysexSize, pendingSysexTime);
}
}
}
static bool isRealtimeMessage (uint8 byte) { return byte >= 0xf8 && byte <= 0xfe; }
static bool isStatusByte (uint8 byte) { return byte >= 0x80; }
static bool isInitialByte (uint8 byte) { return isStatusByte (byte) && byte != 0xf7; }
uint8 currentMessage[3];
int currentMessageLen = 0;
MemoryBlock pendingSysexData;
double pendingSysexTime = 0;
int pendingSysexSize = 0;
JUCE_DECLARE_NON_COPYABLE (MidiDataConcatenator)
};
} // namespace juce

+ 0
- 794
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiFile.cpp View File

@@ -1,794 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace MidiFileHelpers
{
static void writeVariableLengthInt (OutputStream& out, uint32 v)
{
auto buffer = v & 0x7f;
while ((v >>= 7) != 0)
{
buffer <<= 8;
buffer |= ((v & 0x7f) | 0x80);
}
for (;;)
{
out.writeByte ((char) buffer);
if (buffer & 0x80)
buffer >>= 8;
else
break;
}
}
template <typename Integral>
struct ReadTrait;
template <>
struct ReadTrait<uint32> { static constexpr auto read = ByteOrder::bigEndianInt; };
template <>
struct ReadTrait<uint16> { static constexpr auto read = ByteOrder::bigEndianShort; };
template <typename Integral>
Optional<Integral> tryRead (const uint8*& data, size_t& remaining)
{
using Trait = ReadTrait<Integral>;
constexpr auto size = sizeof (Integral);
if (remaining < size)
return {};
const Optional<Integral> result { Trait::read (data) };
data += size;
remaining -= size;
return result;
}
struct HeaderDetails
{
size_t bytesRead = 0;
short timeFormat = 0;
short fileType = 0;
short numberOfTracks = 0;
};
static Optional<HeaderDetails> parseMidiHeader (const uint8* const initialData,
const size_t maxSize)
{
auto* data = initialData;
auto remaining = maxSize;
auto ch = tryRead<uint32> (data, remaining);
if (! ch.hasValue())
return {};
if (*ch != ByteOrder::bigEndianInt ("MThd"))
{
auto ok = false;
if (*ch == ByteOrder::bigEndianInt ("RIFF"))
{
for (int i = 0; i < 8; ++i)
{
ch = tryRead<uint32> (data, remaining);
if (! ch.hasValue())
return {};
if (*ch == ByteOrder::bigEndianInt ("MThd"))
{
ok = true;
break;
}
}
}
if (! ok)
return {};
}
const auto bytesRemaining = tryRead<uint32> (data, remaining);
if (! bytesRemaining.hasValue() || *bytesRemaining > remaining)
return {};
const auto optFileType = tryRead<uint16> (data, remaining);
if (! optFileType.hasValue() || 2 < *optFileType)
return {};
const auto optNumTracks = tryRead<uint16> (data, remaining);
if (! optNumTracks.hasValue() || (*optFileType == 0 && *optNumTracks != 1))
return {};
const auto optTimeFormat = tryRead<uint16> (data, remaining);
if (! optTimeFormat.hasValue())
return {};
HeaderDetails result;
result.fileType = (short) *optFileType;
result.timeFormat = (short) *optTimeFormat;
result.numberOfTracks = (short) *optNumTracks;
result.bytesRead = maxSize - remaining;
return { result };
}
static double convertTicksToSeconds (double time,
const MidiMessageSequence& tempoEvents,
int timeFormat)
{
if (timeFormat < 0)
return time / (-(timeFormat >> 8) * (timeFormat & 0xff));
double lastTime = 0, correctedTime = 0;
auto tickLen = 1.0 / (timeFormat & 0x7fff);
auto secsPerTick = 0.5 * tickLen;
auto numEvents = tempoEvents.getNumEvents();
for (int i = 0; i < numEvents; ++i)
{
auto& m = tempoEvents.getEventPointer(i)->message;
auto eventTime = m.getTimeStamp();
if (eventTime >= time)
break;
correctedTime += (eventTime - lastTime) * secsPerTick;
lastTime = eventTime;
if (m.isTempoMetaEvent())
secsPerTick = tickLen * m.getTempoSecondsPerQuarterNote();
while (i + 1 < numEvents)
{
auto& m2 = tempoEvents.getEventPointer(i + 1)->message;
if (m2.getTimeStamp() != eventTime)
break;
if (m2.isTempoMetaEvent())
secsPerTick = tickLen * m2.getTempoSecondsPerQuarterNote();
++i;
}
}
return correctedTime + (time - lastTime) * secsPerTick;
}
template <typename MethodType>
static void findAllMatchingEvents (const OwnedArray<MidiMessageSequence>& tracks,
MidiMessageSequence& results,
MethodType method)
{
for (auto* track : tracks)
{
auto numEvents = track->getNumEvents();
for (int j = 0; j < numEvents; ++j)
{
auto& m = track->getEventPointer(j)->message;
if ((m.*method)())
results.addEvent (m);
}
}
}
static MidiMessageSequence readTrack (const uint8* data, int size)
{
double time = 0;
uint8 lastStatusByte = 0;
MidiMessageSequence result;
while (size > 0)
{
const auto delay = MidiMessage::readVariableLengthValue (data, (int) size);
if (! delay.isValid())
break;
data += delay.bytesUsed;
size -= delay.bytesUsed;
time += delay.value;
if (size <= 0)
break;
int messSize = 0;
const MidiMessage mm (data, size, messSize, lastStatusByte, time);
if (messSize <= 0)
break;
size -= messSize;
data += messSize;
result.addEvent (mm);
auto firstByte = *(mm.getRawData());
if ((firstByte & 0xf0) != 0xf0)
lastStatusByte = firstByte;
}
return result;
}
}
//==============================================================================
MidiFile::MidiFile() : timeFormat ((short) (unsigned short) 0xe728) {}
MidiFile::MidiFile (const MidiFile& other) : timeFormat (other.timeFormat)
{
tracks.addCopiesOf (other.tracks);
}
MidiFile& MidiFile::operator= (const MidiFile& other)
{
tracks.clear();
tracks.addCopiesOf (other.tracks);
timeFormat = other.timeFormat;
return *this;
}
MidiFile::MidiFile (MidiFile&& other)
: tracks (std::move (other.tracks)),
timeFormat (other.timeFormat)
{
}
MidiFile& MidiFile::operator= (MidiFile&& other)
{
tracks = std::move (other.tracks);
timeFormat = other.timeFormat;
return *this;
}
void MidiFile::clear()
{
tracks.clear();
}
//==============================================================================
int MidiFile::getNumTracks() const noexcept
{
return tracks.size();
}
const MidiMessageSequence* MidiFile::getTrack (int index) const noexcept
{
return tracks[index];
}
void MidiFile::addTrack (const MidiMessageSequence& trackSequence)
{
tracks.add (new MidiMessageSequence (trackSequence));
}
//==============================================================================
short MidiFile::getTimeFormat() const noexcept
{
return timeFormat;
}
void MidiFile::setTicksPerQuarterNote (int ticks) noexcept
{
timeFormat = (short) ticks;
}
void MidiFile::setSmpteTimeFormat (int framesPerSecond, int subframeResolution) noexcept
{
timeFormat = (short) (((-framesPerSecond) << 8) | subframeResolution);
}
//==============================================================================
void MidiFile::findAllTempoEvents (MidiMessageSequence& results) const
{
MidiFileHelpers::findAllMatchingEvents (tracks, results, &MidiMessage::isTempoMetaEvent);
}
void MidiFile::findAllTimeSigEvents (MidiMessageSequence& results) const
{
MidiFileHelpers::findAllMatchingEvents (tracks, results, &MidiMessage::isTimeSignatureMetaEvent);
}
void MidiFile::findAllKeySigEvents (MidiMessageSequence& results) const
{
MidiFileHelpers::findAllMatchingEvents (tracks, results, &MidiMessage::isKeySignatureMetaEvent);
}
double MidiFile::getLastTimestamp() const
{
double t = 0.0;
for (auto* ms : tracks)
t = jmax (t, ms->getEndTime());
return t;
}
//==============================================================================
bool MidiFile::readFrom (InputStream& sourceStream,
bool createMatchingNoteOffs,
int* fileType)
{
clear();
MemoryBlock data;
const int maxSensibleMidiFileSize = 200 * 1024 * 1024;
// (put a sanity-check on the file size, as midi files are generally small)
if (! sourceStream.readIntoMemoryBlock (data, maxSensibleMidiFileSize))
return false;
auto size = data.getSize();
auto d = static_cast<const uint8*> (data.getData());
const auto optHeader = MidiFileHelpers::parseMidiHeader (d, size);
if (! optHeader.hasValue())
return false;
const auto header = *optHeader;
timeFormat = header.timeFormat;
d += header.bytesRead;
size -= (size_t) header.bytesRead;
for (int track = 0; track < header.numberOfTracks; ++track)
{
const auto optChunkType = MidiFileHelpers::tryRead<uint32> (d, size);
if (! optChunkType.hasValue())
return false;
const auto optChunkSize = MidiFileHelpers::tryRead<uint32> (d, size);
if (! optChunkSize.hasValue())
return false;
const auto chunkSize = *optChunkSize;
if (size < chunkSize)
return false;
if (*optChunkType == ByteOrder::bigEndianInt ("MTrk"))
readNextTrack (d, (int) chunkSize, createMatchingNoteOffs);
size -= chunkSize;
d += chunkSize;
}
const auto successful = (size == 0);
if (successful && fileType != nullptr)
*fileType = header.fileType;
return successful;
}
void MidiFile::readNextTrack (const uint8* data, int size, bool createMatchingNoteOffs)
{
auto sequence = MidiFileHelpers::readTrack (data, size);
// sort so that we put all the note-offs before note-ons that have the same time
std::stable_sort (sequence.list.begin(), sequence.list.end(),
[] (const MidiMessageSequence::MidiEventHolder* a,
const MidiMessageSequence::MidiEventHolder* b)
{
auto t1 = a->message.getTimeStamp();
auto t2 = b->message.getTimeStamp();
if (t1 < t2) return true;
if (t2 < t1) return false;
return a->message.isNoteOff() && b->message.isNoteOn();
});
if (createMatchingNoteOffs)
sequence.updateMatchedPairs();
addTrack (sequence);
}
//==============================================================================
void MidiFile::convertTimestampTicksToSeconds()
{
MidiMessageSequence tempoEvents;
findAllTempoEvents (tempoEvents);
findAllTimeSigEvents (tempoEvents);
if (timeFormat != 0)
{
for (auto* ms : tracks)
{
for (int j = ms->getNumEvents(); --j >= 0;)
{
auto& m = ms->getEventPointer(j)->message;
m.setTimeStamp (MidiFileHelpers::convertTicksToSeconds (m.getTimeStamp(), tempoEvents, timeFormat));
}
}
}
}
//==============================================================================
bool MidiFile::writeTo (OutputStream& out, int midiFileType) const
{
jassert (midiFileType >= 0 && midiFileType <= 2);
if (! out.writeIntBigEndian ((int) ByteOrder::bigEndianInt ("MThd"))) return false;
if (! out.writeIntBigEndian (6)) return false;
if (! out.writeShortBigEndian ((short) midiFileType)) return false;
if (! out.writeShortBigEndian ((short) tracks.size())) return false;
if (! out.writeShortBigEndian (timeFormat)) return false;
for (auto* ms : tracks)
if (! writeTrack (out, *ms))
return false;
out.flush();
return true;
}
bool MidiFile::writeTrack (OutputStream& mainOut, const MidiMessageSequence& ms) const
{
MemoryOutputStream out;
int lastTick = 0;
uint8 lastStatusByte = 0;
bool endOfTrackEventWritten = false;
for (int i = 0; i < ms.getNumEvents(); ++i)
{
auto& mm = ms.getEventPointer(i)->message;
if (mm.isEndOfTrackMetaEvent())
endOfTrackEventWritten = true;
auto tick = roundToInt (mm.getTimeStamp());
auto delta = jmax (0, tick - lastTick);
MidiFileHelpers::writeVariableLengthInt (out, (uint32) delta);
lastTick = tick;
auto* data = mm.getRawData();
auto dataSize = mm.getRawDataSize();
auto statusByte = data[0];
if (statusByte == lastStatusByte
&& (statusByte & 0xf0) != 0xf0
&& dataSize > 1
&& i > 0)
{
++data;
--dataSize;
}
else if (statusByte == 0xf0) // Write sysex message with length bytes.
{
out.writeByte ((char) statusByte);
++data;
--dataSize;
MidiFileHelpers::writeVariableLengthInt (out, (uint32) dataSize);
}
out.write (data, (size_t) dataSize);
lastStatusByte = statusByte;
}
if (! endOfTrackEventWritten)
{
out.writeByte (0); // (tick delta)
auto m = MidiMessage::endOfTrack();
out.write (m.getRawData(), (size_t) m.getRawDataSize());
}
if (! mainOut.writeIntBigEndian ((int) ByteOrder::bigEndianInt ("MTrk"))) return false;
if (! mainOut.writeIntBigEndian ((int) out.getDataSize())) return false;
mainOut << out;
return true;
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
struct MidiFileTest : public UnitTest
{
MidiFileTest()
: UnitTest ("MidiFile", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("ReadTrack respects running status");
{
const auto sequence = parseSequence ([] (OutputStream& os)
{
MidiFileHelpers::writeVariableLengthInt (os, 100);
writeBytes (os, { 0x90, 0x40, 0x40 });
MidiFileHelpers::writeVariableLengthInt (os, 200);
writeBytes (os, { 0x40, 0x40 });
MidiFileHelpers::writeVariableLengthInt (os, 300);
writeBytes (os, { 0xff, 0x2f, 0x00 });
});
expectEquals (sequence.getNumEvents(), 3);
expect (sequence.getEventPointer (0)->message.isNoteOn());
expect (sequence.getEventPointer (1)->message.isNoteOn());
expect (sequence.getEventPointer (2)->message.isEndOfTrackMetaEvent());
}
beginTest ("ReadTrack returns available messages if input is truncated");
{
{
const auto sequence = parseSequence ([] (OutputStream& os)
{
// Incomplete delta time
writeBytes (os, { 0xff });
});
expectEquals (sequence.getNumEvents(), 0);
}
{
const auto sequence = parseSequence ([] (OutputStream& os)
{
// Complete delta with no following event
MidiFileHelpers::writeVariableLengthInt (os, 0xffff);
});
expectEquals (sequence.getNumEvents(), 0);
}
{
const auto sequence = parseSequence ([] (OutputStream& os)
{
// Complete delta with malformed following event
MidiFileHelpers::writeVariableLengthInt (os, 0xffff);
writeBytes (os, { 0x90, 0x40 });
});
expectEquals (sequence.getNumEvents(), 1);
expect (sequence.getEventPointer (0)->message.isNoteOff());
expectEquals (sequence.getEventPointer (0)->message.getNoteNumber(), 0x40);
expectEquals (sequence.getEventPointer (0)->message.getVelocity(), (uint8) 0x00);
}
}
beginTest ("Header parsing works");
{
{
// No data
const auto header = parseHeader ([] (OutputStream&) {});
expect (! header.hasValue());
}
{
// Invalid initial byte
const auto header = parseHeader ([] (OutputStream& os)
{
writeBytes (os, { 0xff });
});
expect (! header.hasValue());
}
{
// Type block, but no header data
const auto header = parseHeader ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd' });
});
expect (! header.hasValue());
}
{
// We (ll-formed header, but track type is 0 and channels != 1
const auto header = parseHeader ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 0, 0, 16, 0, 1 });
});
expect (! header.hasValue());
}
{
// Well-formed header, but track type is 5
const auto header = parseHeader ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 5, 0, 16, 0, 1 });
});
expect (! header.hasValue());
}
{
// Well-formed header
const auto header = parseHeader ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 1, 0, 16, 0, 1 });
});
expect (header.hasValue());
expectEquals (header->fileType, (short) 1);
expectEquals (header->numberOfTracks, (short) 16);
expectEquals (header->timeFormat, (short) 1);
expectEquals ((int) header->bytesRead, 14);
}
}
beginTest ("Read from stream");
{
{
// Empty input
const auto file = parseFile ([] (OutputStream&) {});
expect (! file.hasValue());
}
{
// Malformed header
const auto file = parseFile ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd' });
});
expect (! file.hasValue());
}
{
// Header, no channels
const auto file = parseFile ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 1, 0, 0, 0, 1 });
});
expect (file.hasValue());
expectEquals (file->getNumTracks(), 0);
}
{
// Header, one malformed channel
const auto file = parseFile ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 1, 0, 1, 0, 1 });
writeBytes (os, { 'M', 'T', 'r', '?' });
});
expect (! file.hasValue());
}
{
// Header, one channel with malformed message
const auto file = parseFile ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 1, 0, 1, 0, 1 });
writeBytes (os, { 'M', 'T', 'r', 'k', 0, 0, 0, 1, 0xff });
});
expect (file.hasValue());
expectEquals (file->getNumTracks(), 1);
expectEquals (file->getTrack (0)->getNumEvents(), 0);
}
{
// Header, one channel with incorrect length message
const auto file = parseFile ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 1, 0, 1, 0, 1 });
writeBytes (os, { 'M', 'T', 'r', 'k', 0x0f, 0, 0, 0, 0xff });
});
expect (! file.hasValue());
}
{
// Header, one channel, all well-formed
const auto file = parseFile ([] (OutputStream& os)
{
writeBytes (os, { 'M', 'T', 'h', 'd', 0, 0, 0, 6, 0, 1, 0, 1, 0, 1 });
writeBytes (os, { 'M', 'T', 'r', 'k', 0, 0, 0, 4 });
MidiFileHelpers::writeVariableLengthInt (os, 0x0f);
writeBytes (os, { 0x80, 0x00, 0x00 });
});
expect (file.hasValue());
expectEquals (file->getNumTracks(), 1);
auto& track = *file->getTrack (0);
expectEquals (track.getNumEvents(), 1);
expect (track.getEventPointer (0)->message.isNoteOff());
expectEquals (track.getEventPointer (0)->message.getTimeStamp(), (double) 0x0f);
}
}
}
template <typename Fn>
static MidiMessageSequence parseSequence (Fn&& fn)
{
MemoryOutputStream os;
fn (os);
return MidiFileHelpers::readTrack (reinterpret_cast<const uint8*> (os.getData()),
(int) os.getDataSize());
}
template <typename Fn>
static Optional<MidiFileHelpers::HeaderDetails> parseHeader (Fn&& fn)
{
MemoryOutputStream os;
fn (os);
return MidiFileHelpers::parseMidiHeader (reinterpret_cast<const uint8*> (os.getData()),
os.getDataSize());
}
template <typename Fn>
static Optional<MidiFile> parseFile (Fn&& fn)
{
MemoryOutputStream os;
fn (os);
MemoryInputStream is (os.getData(), os.getDataSize(), false);
MidiFile mf;
int fileType = 0;
if (mf.readFrom (is, true, &fileType))
return mf;
return {};
}
static void writeBytes (OutputStream& os, const std::vector<uint8>& bytes)
{
for (const auto& byte : bytes)
os.writeByte ((char) byte);
}
};
static MidiFileTest midiFileTests;
#endif
} // namespace juce

+ 0
- 198
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiFile.h View File

@@ -1,198 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Reads/writes standard midi format files.
To read a midi file, create a MidiFile object and call its readFrom() method. You
can then get the individual midi tracks from it using the getTrack() method.
To write a file, create a MidiFile object, add some MidiMessageSequence objects
to it using the addTrack() method, and then call its writeTo() method to stream
it out.
@see MidiMessageSequence
@tags{Audio}
*/
class JUCE_API MidiFile
{
public:
//==============================================================================
/** Creates an empty MidiFile object. */
MidiFile();
/** Creates a copy of another MidiFile. */
MidiFile (const MidiFile&);
/** Copies from another MidiFile object */
MidiFile& operator= (const MidiFile&);
/** Creates a copy of another MidiFile. */
MidiFile (MidiFile&&);
/** Copies from another MidiFile object */
MidiFile& operator= (MidiFile&&);
//==============================================================================
/** Returns the number of tracks in the file.
@see getTrack, addTrack
*/
int getNumTracks() const noexcept;
/** Returns a pointer to one of the tracks in the file.
@returns a pointer to the track, or nullptr if the index is out-of-range
@see getNumTracks, addTrack
*/
const MidiMessageSequence* getTrack (int index) const noexcept;
/** Adds a midi track to the file.
This will make its own internal copy of the sequence that is passed-in.
@see getNumTracks, getTrack
*/
void addTrack (const MidiMessageSequence& trackSequence);
/** Removes all midi tracks from the file.
@see getNumTracks
*/
void clear();
/** Returns the raw time format code that will be written to a stream.
After reading a midi file, this method will return the time-format that
was read from the file's header. It can be changed using the setTicksPerQuarterNote()
or setSmpteTimeFormat() methods.
If the value returned is positive, it indicates the number of midi ticks
per quarter-note - see setTicksPerQuarterNote().
It it's negative, the upper byte indicates the frames-per-second (but negative), and
the lower byte is the number of ticks per frame - see setSmpteTimeFormat().
*/
short getTimeFormat() const noexcept;
/** Sets the time format to use when this file is written to a stream.
If this is called, the file will be written as bars/beats using the
specified resolution, rather than SMPTE absolute times, as would be
used if setSmpteTimeFormat() had been called instead.
@param ticksPerQuarterNote e.g. 96, 960
@see setSmpteTimeFormat
*/
void setTicksPerQuarterNote (int ticksPerQuarterNote) noexcept;
/** Sets the time format to use when this file is written to a stream.
If this is called, the file will be written using absolute times, rather
than bars/beats as would be the case if setTicksPerBeat() had been called
instead.
@param framesPerSecond must be 24, 25, 29 or 30
@param subframeResolution the sub-second resolution, e.g. 4 (midi time code),
8, 10, 80 (SMPTE bit resolution), or 100. For millisecond
timing, setSmpteTimeFormat (25, 40)
@see setTicksPerBeat
*/
void setSmpteTimeFormat (int framesPerSecond,
int subframeResolution) noexcept;
//==============================================================================
/** Makes a list of all the tempo-change meta-events from all tracks in the midi file.
Useful for finding the positions of all the tempo changes in a file.
@param tempoChangeEvents a list to which all the events will be added
*/
void findAllTempoEvents (MidiMessageSequence& tempoChangeEvents) const;
/** Makes a list of all the time-signature meta-events from all tracks in the midi file.
Useful for finding the positions of all the tempo changes in a file.
@param timeSigEvents a list to which all the events will be added
*/
void findAllTimeSigEvents (MidiMessageSequence& timeSigEvents) const;
/** Makes a list of all the key-signature meta-events from all tracks in the midi file.
@param keySigEvents a list to which all the events will be added
*/
void findAllKeySigEvents (MidiMessageSequence& keySigEvents) const;
/** Returns the latest timestamp in any of the tracks.
(Useful for finding the length of the file).
*/
double getLastTimestamp() const;
//==============================================================================
/** Reads a midi file format stream.
After calling this, you can get the tracks that were read from the file by using the
getNumTracks() and getTrack() methods.
The timestamps of the midi events in the tracks will represent their positions in
terms of midi ticks. To convert them to seconds, use the convertTimestampTicksToSeconds()
method.
@param sourceStream the source stream
@param createMatchingNoteOffs if true, any missing note-offs for previous note-ons will
be automatically added at the end of the file by calling
MidiMessageSequence::updateMatchedPairs on each track.
@param midiFileType if not nullptr, the integer at this address will be set
to 0, 1, or 2 depending on the type of the midi file
@returns true if the stream was read successfully
*/
bool readFrom (InputStream& sourceStream,
bool createMatchingNoteOffs = true,
int* midiFileType = nullptr);
/** Writes the midi tracks as a standard midi file.
The midiFileType value is written as the file's format type, which can be 0, 1
or 2 - see the midi file spec for more info about that.
@param destStream the destination stream
@param midiFileType the type of midi file
@returns true if the operation succeeded.
*/
bool writeTo (OutputStream& destStream, int midiFileType = 1) const;
/** Converts the timestamp of all the midi events from midi ticks to seconds.
This will use the midi time format and tempo/time signature info in the
tracks to convert all the timestamps to absolute values in seconds.
*/
void convertTimestampTicksToSeconds();
private:
//==============================================================================
OwnedArray<MidiMessageSequence> tracks;
short timeFormat;
void readNextTrack (const uint8*, int, bool);
bool writeTrack (OutputStream&, const MidiMessageSequence&) const;
JUCE_LEAK_DETECTOR (MidiFile)
};
} // namespace juce

+ 0
- 173
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiKeyboardState.cpp View File

@@ -1,173 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MidiKeyboardState::MidiKeyboardState()
{
zerostruct (noteStates);
}
//==============================================================================
void MidiKeyboardState::reset()
{
const ScopedLock sl (lock);
zerostruct (noteStates);
eventsToAdd.clear();
}
bool MidiKeyboardState::isNoteOn (const int midiChannel, const int n) const noexcept
{
jassert (midiChannel > 0 && midiChannel <= 16);
return isPositiveAndBelow (n, 128)
&& (noteStates[n] & (1 << (midiChannel - 1))) != 0;
}
bool MidiKeyboardState::isNoteOnForChannels (const int midiChannelMask, const int n) const noexcept
{
return isPositiveAndBelow (n, 128)
&& (noteStates[n] & midiChannelMask) != 0;
}
void MidiKeyboardState::noteOn (const int midiChannel, const int midiNoteNumber, const float velocity)
{
jassert (midiChannel > 0 && midiChannel <= 16);
jassert (isPositiveAndBelow (midiNoteNumber, 128));
const ScopedLock sl (lock);
if (isPositiveAndBelow (midiNoteNumber, 128))
{
const int timeNow = (int) Time::getMillisecondCounter();
eventsToAdd.addEvent (MidiMessage::noteOn (midiChannel, midiNoteNumber, velocity), timeNow);
eventsToAdd.clear (0, timeNow - 500);
noteOnInternal (midiChannel, midiNoteNumber, velocity);
}
}
void MidiKeyboardState::noteOnInternal (const int midiChannel, const int midiNoteNumber, const float velocity)
{
if (isPositiveAndBelow (midiNoteNumber, 128))
{
noteStates[midiNoteNumber] = static_cast<uint16> (noteStates[midiNoteNumber] | (1 << (midiChannel - 1)));
listeners.call ([&] (Listener& l) { l.handleNoteOn (this, midiChannel, midiNoteNumber, velocity); });
}
}
void MidiKeyboardState::noteOff (const int midiChannel, const int midiNoteNumber, const float velocity)
{
const ScopedLock sl (lock);
if (isNoteOn (midiChannel, midiNoteNumber))
{
const int timeNow = (int) Time::getMillisecondCounter();
eventsToAdd.addEvent (MidiMessage::noteOff (midiChannel, midiNoteNumber), timeNow);
eventsToAdd.clear (0, timeNow - 500);
noteOffInternal (midiChannel, midiNoteNumber, velocity);
}
}
void MidiKeyboardState::noteOffInternal (const int midiChannel, const int midiNoteNumber, const float velocity)
{
if (isNoteOn (midiChannel, midiNoteNumber))
{
noteStates[midiNoteNumber] = static_cast<uint16> (noteStates[midiNoteNumber] & ~(1 << (midiChannel - 1)));
listeners.call ([&] (Listener& l) { l.handleNoteOff (this, midiChannel, midiNoteNumber, velocity); });
}
}
void MidiKeyboardState::allNotesOff (const int midiChannel)
{
const ScopedLock sl (lock);
if (midiChannel <= 0)
{
for (int i = 1; i <= 16; ++i)
allNotesOff (i);
}
else
{
for (int i = 0; i < 128; ++i)
noteOff (midiChannel, i, 0.0f);
}
}
void MidiKeyboardState::processNextMidiEvent (const MidiMessage& message)
{
if (message.isNoteOn())
{
noteOnInternal (message.getChannel(), message.getNoteNumber(), message.getFloatVelocity());
}
else if (message.isNoteOff())
{
noteOffInternal (message.getChannel(), message.getNoteNumber(), message.getFloatVelocity());
}
else if (message.isAllNotesOff())
{
for (int i = 0; i < 128; ++i)
noteOffInternal (message.getChannel(), i, 0.0f);
}
}
void MidiKeyboardState::processNextMidiBuffer (MidiBuffer& buffer,
const int startSample,
const int numSamples,
const bool injectIndirectEvents)
{
const ScopedLock sl (lock);
for (const auto metadata : buffer)
processNextMidiEvent (metadata.getMessage());
if (injectIndirectEvents)
{
const int firstEventToAdd = eventsToAdd.getFirstEventTime();
const double scaleFactor = numSamples / (double) (eventsToAdd.getLastEventTime() + 1 - firstEventToAdd);
for (const auto metadata : eventsToAdd)
{
const auto pos = jlimit (0, numSamples - 1, roundToInt ((metadata.samplePosition - firstEventToAdd) * scaleFactor));
buffer.addEvent (metadata.getMessage(), startSample + pos);
}
}
eventsToAdd.clear();
}
//==============================================================================
void MidiKeyboardState::addListener (Listener* listener)
{
const ScopedLock sl (lock);
listeners.add (listener);
}
void MidiKeyboardState::removeListener (Listener* listener)
{
const ScopedLock sl (lock);
listeners.remove (listener);
}
} // namespace juce

+ 0
- 195
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiKeyboardState.h View File

@@ -1,195 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Represents a piano keyboard, keeping track of which keys are currently pressed.
This object can parse a stream of midi events, using them to update its idea
of which keys are pressed for each individual midi channel.
When keys go up or down, it can broadcast these events to listener objects.
It also allows key up/down events to be triggered with its noteOn() and noteOff()
methods, and midi messages for these events will be merged into the
midi stream that gets processed by processNextMidiBuffer().
@tags{Audio}
*/
class JUCE_API MidiKeyboardState
{
public:
//==============================================================================
MidiKeyboardState();
//==============================================================================
/** Resets the state of the object.
All internal data for all the channels is reset, but no events are sent as a
result.
If you want to release any keys that are currently down, and to send out note-up
midi messages for this, use the allNotesOff() method instead.
*/
void reset();
/** Returns true if the given midi key is currently held down for the given midi channel.
The channel number must be between 1 and 16. If you want to see if any notes are
on for a range of channels, use the isNoteOnForChannels() method.
*/
bool isNoteOn (int midiChannel, int midiNoteNumber) const noexcept;
/** Returns true if the given midi key is currently held down on any of a set of midi channels.
The channel mask has a bit set for each midi channel you want to test for - bit
0 = midi channel 1, bit 1 = midi channel 2, etc.
If a note is on for at least one of the specified channels, this returns true.
*/
bool isNoteOnForChannels (int midiChannelMask, int midiNoteNumber) const noexcept;
/** Turns a specified note on.
This will cause a suitable midi note-on event to be injected into the midi buffer during the
next call to processNextMidiBuffer().
It will also trigger a synchronous callback to the listeners to tell them that the key has
gone down.
*/
void noteOn (int midiChannel, int midiNoteNumber, float velocity);
/** Turns a specified note off.
This will cause a suitable midi note-off event to be injected into the midi buffer during the
next call to processNextMidiBuffer().
It will also trigger a synchronous callback to the listeners to tell them that the key has
gone up.
But if the note isn't actually down for the given channel, this method will in fact do nothing.
*/
void noteOff (int midiChannel, int midiNoteNumber, float velocity);
/** This will turn off any currently-down notes for the given midi channel.
If you pass 0 for the midi channel, it will in fact turn off all notes on all channels.
Calling this method will make calls to noteOff(), so can trigger synchronous callbacks
and events being added to the midi stream.
*/
void allNotesOff (int midiChannel);
//==============================================================================
/** Looks at a key-up/down event and uses it to update the state of this object.
To process a buffer full of midi messages, use the processNextMidiBuffer() method
instead.
*/
void processNextMidiEvent (const MidiMessage& message);
/** Scans a midi stream for up/down events and adds its own events to it.
This will look for any up/down events and use them to update the internal state,
synchronously making suitable callbacks to the listeners.
If injectIndirectEvents is true, then midi events to produce the recent noteOn()
and noteOff() calls will be added into the buffer.
Only the section of the buffer whose timestamps are between startSample and
(startSample + numSamples) will be affected, and any events added will be placed
between these times.
If you're going to use this method, you'll need to keep calling it regularly for
it to work satisfactorily.
To process a single midi event at a time, use the processNextMidiEvent() method
instead.
*/
void processNextMidiBuffer (MidiBuffer& buffer,
int startSample,
int numSamples,
bool injectIndirectEvents);
//==============================================================================
/** Receives events from a MidiKeyboardState object. */
class JUCE_API Listener
{
public:
//==============================================================================
virtual ~Listener() = default;
//==============================================================================
/** Called when one of the MidiKeyboardState's keys is pressed.
This will be called synchronously when the state is either processing a
buffer in its MidiKeyboardState::processNextMidiBuffer() method, or
when a note is being played with its MidiKeyboardState::noteOn() method.
Note that this callback could happen from an audio callback thread, so be
careful not to block, and avoid any UI activity in the callback.
*/
virtual void handleNoteOn (MidiKeyboardState* source,
int midiChannel, int midiNoteNumber, float velocity) = 0;
/** Called when one of the MidiKeyboardState's keys is released.
This will be called synchronously when the state is either processing a
buffer in its MidiKeyboardState::processNextMidiBuffer() method, or
when a note is being played with its MidiKeyboardState::noteOff() method.
Note that this callback could happen from an audio callback thread, so be
careful not to block, and avoid any UI activity in the callback.
*/
virtual void handleNoteOff (MidiKeyboardState* source,
int midiChannel, int midiNoteNumber, float velocity) = 0;
};
/** Registers a listener for callbacks when keys go up or down.
@see removeListener
*/
void addListener (Listener* listener);
/** Deregisters a listener.
@see addListener
*/
void removeListener (Listener* listener);
private:
//==============================================================================
CriticalSection lock;
std::atomic<uint16> noteStates[128];
MidiBuffer eventsToAdd;
ListenerList<Listener> listeners;
void noteOnInternal (int midiChannel, int midiNoteNumber, float velocity);
void noteOffInternal (int midiChannel, int midiNoteNumber, float velocity);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MidiKeyboardState)
};
using MidiKeyboardStateListener = MidiKeyboardState::Listener;
} // namespace juce

+ 0
- 1331
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessage.cpp
File diff suppressed because it is too large
View File


+ 0
- 986
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessage.h View File

@@ -1,986 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Encapsulates a MIDI message.
@see MidiMessageSequence, MidiOutput, MidiInput
@tags{Audio}
*/
class JUCE_API MidiMessage
{
public:
//==============================================================================
/** Creates a 3-byte short midi message.
@param byte1 message byte 1
@param byte2 message byte 2
@param byte3 message byte 3
@param timeStamp the time to give the midi message - this value doesn't
use any particular units, so will be application-specific
*/
MidiMessage (int byte1, int byte2, int byte3, double timeStamp = 0) noexcept;
/** Creates a 2-byte short midi message.
@param byte1 message byte 1
@param byte2 message byte 2
@param timeStamp the time to give the midi message - this value doesn't
use any particular units, so will be application-specific
*/
MidiMessage (int byte1, int byte2, double timeStamp = 0) noexcept;
/** Creates a 1-byte short midi message.
@param byte1 message byte 1
@param timeStamp the time to give the midi message - this value doesn't
use any particular units, so will be application-specific
*/
MidiMessage (int byte1, double timeStamp = 0) noexcept;
/** Creates a midi message from a list of bytes. */
template <typename... Data>
MidiMessage (int byte1, int byte2, int byte3, Data... otherBytes) : size (3 + sizeof... (otherBytes))
{
// this checks that the length matches the data..
jassert (size > 3 || byte1 >= 0xf0 || getMessageLengthFromFirstByte ((uint8) byte1) == size);
const uint8 data[] = { (uint8) byte1, (uint8) byte2, (uint8) byte3, static_cast<uint8> (otherBytes)... };
memcpy (allocateSpace (size), data, (size_t) size);
}
/** Creates a midi message from a block of data. */
MidiMessage (const void* data, int numBytes, double timeStamp = 0);
/** Reads the next midi message from some data.
This will read as many bytes from a data stream as it needs to make a
complete message, and will return the number of bytes it used. This lets
you read a sequence of midi messages from a file or stream.
@param data the data to read from
@param maxBytesToUse the maximum number of bytes it's allowed to read
@param numBytesUsed returns the number of bytes that were actually needed
@param lastStatusByte in a sequence of midi messages, the initial byte
can be dropped from a message if it's the same as the
first byte of the previous message, so this lets you
supply the byte to use if the first byte of the message
has in fact been dropped.
@param timeStamp the time to give the midi message - this value doesn't
use any particular units, so will be application-specific
@param sysexHasEmbeddedLength when reading sysexes, this flag indicates whether
to expect the data to begin with a variable-length
field indicating its size
*/
MidiMessage (const void* data, int maxBytesToUse,
int& numBytesUsed, uint8 lastStatusByte,
double timeStamp = 0,
bool sysexHasEmbeddedLength = true);
/** Creates an empty sysex message.
Since the MidiMessage has to contain a valid message, this default constructor
just initialises it with an empty sysex message.
*/
MidiMessage() noexcept;
/** Creates a copy of another midi message. */
MidiMessage (const MidiMessage&);
/** Creates a copy of another midi message, with a different timestamp. */
MidiMessage (const MidiMessage&, double newTimeStamp);
/** Destructor. */
~MidiMessage() noexcept;
/** Copies this message from another one. */
MidiMessage& operator= (const MidiMessage& other);
/** Move constructor */
MidiMessage (MidiMessage&&) noexcept;
/** Move assignment operator */
MidiMessage& operator= (MidiMessage&&) noexcept;
//==============================================================================
/** Returns a pointer to the raw midi data.
@see getRawDataSize
*/
const uint8* getRawData() const noexcept { return getData(); }
/** Returns the number of bytes of data in the message.
@see getRawData
*/
int getRawDataSize() const noexcept { return size; }
//==============================================================================
/** Returns a human-readable description of the midi message as a string,
for example "Note On C#3 Velocity 120 Channel 1".
*/
String getDescription() const;
//==============================================================================
/** Returns the timestamp associated with this message.
The exact meaning of this time and its units will vary, as messages are used in
a variety of different contexts.
If you're getting the message from a midi file, this could be a time in seconds, or
a number of ticks - see MidiFile::convertTimestampTicksToSeconds().
If the message is being used in a MidiBuffer, it might indicate the number of
audio samples from the start of the buffer.
If the message was created by a MidiInput, see MidiInputCallback::handleIncomingMidiMessage()
for details of the way that it initialises this value.
@see setTimeStamp, addToTimeStamp
*/
double getTimeStamp() const noexcept { return timeStamp; }
/** Changes the message's associated timestamp.
The units for the timestamp will be application-specific - see the notes for getTimeStamp().
@see addToTimeStamp, getTimeStamp
*/
void setTimeStamp (double newTimestamp) noexcept { timeStamp = newTimestamp; }
/** Adds a value to the message's timestamp.
The units for the timestamp will be application-specific.
*/
void addToTimeStamp (double delta) noexcept { timeStamp += delta; }
/** Return a copy of this message with a new timestamp.
The units for the timestamp will be application-specific - see the notes for getTimeStamp().
*/
MidiMessage withTimeStamp (double newTimestamp) const;
//==============================================================================
/** Returns the midi channel associated with the message.
@returns a value 1 to 16 if the message has a channel, or 0 if it hasn't (e.g.
if it's a sysex)
@see isForChannel, setChannel
*/
int getChannel() const noexcept;
/** Returns true if the message applies to the given midi channel.
@param channelNumber the channel number to look for, in the range 1 to 16
@see getChannel, setChannel
*/
bool isForChannel (int channelNumber) const noexcept;
/** Changes the message's midi channel.
This won't do anything for non-channel messages like sysexes.
@param newChannelNumber the channel number to change it to, in the range 1 to 16
*/
void setChannel (int newChannelNumber) noexcept;
//==============================================================================
/** Returns true if this is a system-exclusive message.
*/
bool isSysEx() const noexcept;
/** Returns a pointer to the sysex data inside the message.
If this event isn't a sysex event, it'll return 0.
@see getSysExDataSize
*/
const uint8* getSysExData() const noexcept;
/** Returns the size of the sysex data.
This value excludes the 0xf0 header byte and the 0xf7 at the end.
@see getSysExData
*/
int getSysExDataSize() const noexcept;
//==============================================================================
/** Returns true if this message is a 'key-down' event.
@param returnTrueForVelocity0 if true, then if this event is a note-on with
velocity 0, it will still be considered to be a note-on and the
method will return true. If returnTrueForVelocity0 is false, then
if this is a note-on event with velocity 0, it'll be regarded as
a note-off, and the method will return false
@see isNoteOff, getNoteNumber, getVelocity, noteOn
*/
bool isNoteOn (bool returnTrueForVelocity0 = false) const noexcept;
/** Creates a key-down message (using a floating-point velocity).
@param channel the midi channel, in the range 1 to 16
@param noteNumber the key number, 0 to 127
@param velocity in the range 0 to 1.0
@see isNoteOn
*/
static MidiMessage noteOn (int channel, int noteNumber, float velocity) noexcept;
/** Creates a key-down message (using an integer velocity).
@param channel the midi channel, in the range 1 to 16
@param noteNumber the key number, 0 to 127
@param velocity in the range 0 to 127
@see isNoteOn
*/
static MidiMessage noteOn (int channel, int noteNumber, uint8 velocity) noexcept;
/** Returns true if this message is a 'key-up' event.
If returnTrueForNoteOnVelocity0 is true, then his will also return true
for a note-on event with a velocity of 0.
@see isNoteOn, getNoteNumber, getVelocity, noteOff
*/
bool isNoteOff (bool returnTrueForNoteOnVelocity0 = true) const noexcept;
/** Creates a key-up message.
@param channel the midi channel, in the range 1 to 16
@param noteNumber the key number, 0 to 127
@param velocity in the range 0 to 1.0
@see isNoteOff
*/
static MidiMessage noteOff (int channel, int noteNumber, float velocity) noexcept;
/** Creates a key-up message.
@param channel the midi channel, in the range 1 to 16
@param noteNumber the key number, 0 to 127
@param velocity in the range 0 to 127
@see isNoteOff
*/
static MidiMessage noteOff (int channel, int noteNumber, uint8 velocity) noexcept;
/** Creates a key-up message.
@param channel the midi channel, in the range 1 to 16
@param noteNumber the key number, 0 to 127
@see isNoteOff
*/
static MidiMessage noteOff (int channel, int noteNumber) noexcept;
/** Returns true if this message is a 'key-down' or 'key-up' event.
@see isNoteOn, isNoteOff
*/
bool isNoteOnOrOff() const noexcept;
/** Returns the midi note number for note-on and note-off messages.
If the message isn't a note-on or off, the value returned is undefined.
@see isNoteOff, getMidiNoteName, getMidiNoteInHertz, setNoteNumber
*/
int getNoteNumber() const noexcept;
/** Changes the midi note number of a note-on or note-off message.
If the message isn't a note on or off, this will do nothing.
*/
void setNoteNumber (int newNoteNumber) noexcept;
//==============================================================================
/** Returns the velocity of a note-on or note-off message.
The value returned will be in the range 0 to 127.
If the message isn't a note-on or off event, it will return 0.
@see getFloatVelocity
*/
uint8 getVelocity() const noexcept;
/** Returns the velocity of a note-on or note-off message.
The value returned will be in the range 0 to 1.0
If the message isn't a note-on or off event, it will return 0.
@see getVelocity, setVelocity
*/
float getFloatVelocity() const noexcept;
/** Changes the velocity of a note-on or note-off message.
If the message isn't a note on or off, this will do nothing.
@param newVelocity the new velocity, in the range 0 to 1.0
@see getFloatVelocity, multiplyVelocity
*/
void setVelocity (float newVelocity) noexcept;
/** Multiplies the velocity of a note-on or note-off message by a given amount.
If the message isn't a note on or off, this will do nothing.
@param scaleFactor the value by which to multiply the velocity
@see setVelocity
*/
void multiplyVelocity (float scaleFactor) noexcept;
//==============================================================================
/** Returns true if this message is a 'sustain pedal down' controller message. */
bool isSustainPedalOn() const noexcept;
/** Returns true if this message is a 'sustain pedal up' controller message. */
bool isSustainPedalOff() const noexcept;
/** Returns true if this message is a 'sostenuto pedal down' controller message. */
bool isSostenutoPedalOn() const noexcept;
/** Returns true if this message is a 'sostenuto pedal up' controller message. */
bool isSostenutoPedalOff() const noexcept;
/** Returns true if this message is a 'soft pedal down' controller message. */
bool isSoftPedalOn() const noexcept;
/** Returns true if this message is a 'soft pedal up' controller message. */
bool isSoftPedalOff() const noexcept;
//==============================================================================
/** Returns true if the message is a program (patch) change message.
@see getProgramChangeNumber, getGMInstrumentName
*/
bool isProgramChange() const noexcept;
/** Returns the new program number of a program change message.
If the message isn't a program change, the value returned is undefined.
@see isProgramChange, getGMInstrumentName
*/
int getProgramChangeNumber() const noexcept;
/** Creates a program-change message.
@param channel the midi channel, in the range 1 to 16
@param programNumber the midi program number, 0 to 127
@see isProgramChange, getGMInstrumentName
*/
static MidiMessage programChange (int channel, int programNumber) noexcept;
//==============================================================================
/** Returns true if the message is a pitch-wheel move.
@see getPitchWheelValue, pitchWheel
*/
bool isPitchWheel() const noexcept;
/** Returns the pitch wheel position from a pitch-wheel move message.
The value returned is a 14-bit number from 0 to 0x3fff, indicating the wheel position.
If called for messages which aren't pitch wheel events, the number returned will be
nonsense.
@see isPitchWheel
*/
int getPitchWheelValue() const noexcept;
/** Creates a pitch-wheel move message.
@param channel the midi channel, in the range 1 to 16
@param position the wheel position, in the range 0 to 16383
@see isPitchWheel
*/
static MidiMessage pitchWheel (int channel, int position) noexcept;
//==============================================================================
/** Returns true if the message is an aftertouch event.
For aftertouch events, use the getNoteNumber() method to find out the key
that it applies to, and getAfterTouchValue() to find out the amount. Use
getChannel() to find out the channel.
@see getAftertouchValue, getNoteNumber
*/
bool isAftertouch() const noexcept;
/** Returns the amount of aftertouch from an aftertouch messages.
The value returned is in the range 0 to 127, and will be nonsense for messages
other than aftertouch messages.
@see isAftertouch
*/
int getAfterTouchValue() const noexcept;
/** Creates an aftertouch message.
@param channel the midi channel, in the range 1 to 16
@param noteNumber the key number, 0 to 127
@param aftertouchAmount the amount of aftertouch, 0 to 127
@see isAftertouch
*/
static MidiMessage aftertouchChange (int channel,
int noteNumber,
int aftertouchAmount) noexcept;
/** Returns true if the message is a channel-pressure change event.
This is like aftertouch, but common to the whole channel rather than a specific
note. Use getChannelPressureValue() to find out the pressure, and getChannel()
to find out the channel.
@see channelPressureChange
*/
bool isChannelPressure() const noexcept;
/** Returns the pressure from a channel pressure change message.
@returns the pressure, in the range 0 to 127
@see isChannelPressure, channelPressureChange
*/
int getChannelPressureValue() const noexcept;
/** Creates a channel-pressure change event.
@param channel the midi channel: 1 to 16
@param pressure the pressure, 0 to 127
@see isChannelPressure
*/
static MidiMessage channelPressureChange (int channel, int pressure) noexcept;
//==============================================================================
/** Returns true if this is a midi controller message.
@see getControllerNumber, getControllerValue, controllerEvent
*/
bool isController() const noexcept;
/** Returns the controller number of a controller message.
The name of the controller can be looked up using the getControllerName() method.
Note that the value returned is invalid for messages that aren't controller changes.
@see isController, getControllerName, getControllerValue
*/
int getControllerNumber() const noexcept;
/** Returns the controller value from a controller message.
A value 0 to 127 is returned to indicate the new controller position.
Note that the value returned is invalid for messages that aren't controller changes.
@see isController, getControllerNumber
*/
int getControllerValue() const noexcept;
/** Returns true if this message is a controller message and if it has the specified
controller type.
*/
bool isControllerOfType (int controllerType) const noexcept;
/** Creates a controller message.
@param channel the midi channel, in the range 1 to 16
@param controllerType the type of controller
@param value the controller value
@see isController
*/
static MidiMessage controllerEvent (int channel,
int controllerType,
int value) noexcept;
/** Checks whether this message is an all-notes-off message.
@see allNotesOff
*/
bool isAllNotesOff() const noexcept;
/** Checks whether this message is an all-sound-off message.
@see allSoundOff
*/
bool isAllSoundOff() const noexcept;
/** Checks whether this message is a reset all controllers message.
@see allControllerOff
*/
bool isResetAllControllers() const noexcept;
/** Creates an all-notes-off message.
@param channel the midi channel, in the range 1 to 16
@see isAllNotesOff
*/
static MidiMessage allNotesOff (int channel) noexcept;
/** Creates an all-sound-off message.
@param channel the midi channel, in the range 1 to 16
@see isAllSoundOff
*/
static MidiMessage allSoundOff (int channel) noexcept;
/** Creates an all-controllers-off message.
@param channel the midi channel, in the range 1 to 16
*/
static MidiMessage allControllersOff (int channel) noexcept;
//==============================================================================
/** Returns true if this event is a meta-event.
Meta-events are things like tempo changes, track names, etc.
@see getMetaEventType, isTrackMetaEvent, isEndOfTrackMetaEvent,
isTextMetaEvent, isTrackNameEvent, isTempoMetaEvent, isTimeSignatureMetaEvent,
isKeySignatureMetaEvent, isMidiChannelMetaEvent
*/
bool isMetaEvent() const noexcept;
/** Returns a meta-event's type number.
If the message isn't a meta-event, this will return -1.
@see isMetaEvent, isTrackMetaEvent, isEndOfTrackMetaEvent,
isTextMetaEvent, isTrackNameEvent, isTempoMetaEvent, isTimeSignatureMetaEvent,
isKeySignatureMetaEvent, isMidiChannelMetaEvent
*/
int getMetaEventType() const noexcept;
/** Returns a pointer to the data in a meta-event.
@see isMetaEvent, getMetaEventLength
*/
const uint8* getMetaEventData() const noexcept;
/** Returns the length of the data for a meta-event.
@see isMetaEvent, getMetaEventData
*/
int getMetaEventLength() const noexcept;
//==============================================================================
/** Returns true if this is a 'track' meta-event. */
bool isTrackMetaEvent() const noexcept;
/** Returns true if this is an 'end-of-track' meta-event. */
bool isEndOfTrackMetaEvent() const noexcept;
/** Creates an end-of-track meta-event.
@see isEndOfTrackMetaEvent
*/
static MidiMessage endOfTrack() noexcept;
/** Returns true if this is an 'track name' meta-event.
You can use the getTextFromTextMetaEvent() method to get the track's name.
*/
bool isTrackNameEvent() const noexcept;
/** Returns true if this is a 'text' meta-event.
@see getTextFromTextMetaEvent
*/
bool isTextMetaEvent() const noexcept;
/** Returns the text from a text meta-event.
@see isTextMetaEvent
*/
String getTextFromTextMetaEvent() const;
/** Creates a text meta-event. */
static MidiMessage textMetaEvent (int type, StringRef text);
//==============================================================================
/** Returns true if this is a 'tempo' meta-event.
@see getTempoMetaEventTickLength, getTempoSecondsPerQuarterNote
*/
bool isTempoMetaEvent() const noexcept;
/** Returns the tick length from a tempo meta-event.
@param timeFormat the 16-bit time format value from the midi file's header.
@returns the tick length (in seconds).
@see isTempoMetaEvent
*/
double getTempoMetaEventTickLength (short timeFormat) const noexcept;
/** Calculates the seconds-per-quarter-note from a tempo meta-event.
@see isTempoMetaEvent, getTempoMetaEventTickLength
*/
double getTempoSecondsPerQuarterNote() const noexcept;
/** Creates a tempo meta-event.
@see isTempoMetaEvent
*/
static MidiMessage tempoMetaEvent (int microsecondsPerQuarterNote) noexcept;
//==============================================================================
/** Returns true if this is a 'time-signature' meta-event.
@see getTimeSignatureInfo
*/
bool isTimeSignatureMetaEvent() const noexcept;
/** Returns the time-signature values from a time-signature meta-event.
@see isTimeSignatureMetaEvent
*/
void getTimeSignatureInfo (int& numerator, int& denominator) const noexcept;
/** Creates a time-signature meta-event.
@see isTimeSignatureMetaEvent
*/
static MidiMessage timeSignatureMetaEvent (int numerator, int denominator);
//==============================================================================
/** Returns true if this is a 'key-signature' meta-event.
@see getKeySignatureNumberOfSharpsOrFlats, isKeySignatureMajorKey
*/
bool isKeySignatureMetaEvent() const noexcept;
/** Returns the key from a key-signature meta-event.
This method must only be called if isKeySignatureMetaEvent() is true.
A positive number here indicates the number of sharps in the key signature,
and a negative number indicates a number of flats. So e.g. 3 = F# + C# + G#,
-2 = Bb + Eb
@see isKeySignatureMetaEvent, isKeySignatureMajorKey
*/
int getKeySignatureNumberOfSharpsOrFlats() const noexcept;
/** Returns true if this key-signature event is major, or false if it's minor.
This method must only be called if isKeySignatureMetaEvent() is true.
*/
bool isKeySignatureMajorKey() const noexcept;
/** Creates a key-signature meta-event.
@param numberOfSharpsOrFlats if positive, this indicates the number of sharps
in the key; if negative, the number of flats
@param isMinorKey if true, the key is minor; if false, it is major
@see isKeySignatureMetaEvent
*/
static MidiMessage keySignatureMetaEvent (int numberOfSharpsOrFlats, bool isMinorKey);
//==============================================================================
/** Returns true if this is a 'channel' meta-event.
A channel meta-event specifies the midi channel that should be used
for subsequent meta-events.
@see getMidiChannelMetaEventChannel
*/
bool isMidiChannelMetaEvent() const noexcept;
/** Returns the channel number from a channel meta-event.
@returns the channel, in the range 1 to 16.
@see isMidiChannelMetaEvent
*/
int getMidiChannelMetaEventChannel() const noexcept;
/** Creates a midi channel meta-event.
@param channel the midi channel, in the range 1 to 16
@see isMidiChannelMetaEvent
*/
static MidiMessage midiChannelMetaEvent (int channel) noexcept;
//==============================================================================
/** Returns true if this is an active-sense message. */
bool isActiveSense() const noexcept;
//==============================================================================
/** Returns true if this is a midi start event.
@see midiStart
*/
bool isMidiStart() const noexcept;
/** Creates a midi start event. */
static MidiMessage midiStart() noexcept;
/** Returns true if this is a midi continue event.
@see midiContinue
*/
bool isMidiContinue() const noexcept;
/** Creates a midi continue event. */
static MidiMessage midiContinue() noexcept;
/** Returns true if this is a midi stop event.
@see midiStop
*/
bool isMidiStop() const noexcept;
/** Creates a midi stop event. */
static MidiMessage midiStop() noexcept;
/** Returns true if this is a midi clock event.
@see midiClock, songPositionPointer
*/
bool isMidiClock() const noexcept;
/** Creates a midi clock event. */
static MidiMessage midiClock() noexcept;
/** Returns true if this is a song-position-pointer message.
@see getSongPositionPointerMidiBeat, songPositionPointer
*/
bool isSongPositionPointer() const noexcept;
/** Returns the midi beat-number of a song-position-pointer message.
@see isSongPositionPointer, songPositionPointer
*/
int getSongPositionPointerMidiBeat() const noexcept;
/** Creates a song-position-pointer message.
The position is a number of midi beats from the start of the song, where 1 midi
beat is 6 midi clocks, and there are 24 midi clocks in a quarter-note. So there
are 4 midi beats in a quarter-note.
@see isSongPositionPointer, getSongPositionPointerMidiBeat
*/
static MidiMessage songPositionPointer (int positionInMidiBeats) noexcept;
//==============================================================================
/** Returns true if this is a quarter-frame midi timecode message.
@see quarterFrame, getQuarterFrameSequenceNumber, getQuarterFrameValue
*/
bool isQuarterFrame() const noexcept;
/** Returns the sequence number of a quarter-frame midi timecode message.
This will be a value between 0 and 7.
@see isQuarterFrame, getQuarterFrameValue, quarterFrame
*/
int getQuarterFrameSequenceNumber() const noexcept;
/** Returns the value from a quarter-frame message.
This will be the lower nybble of the message's data-byte, a value between 0 and 15
*/
int getQuarterFrameValue() const noexcept;
/** Creates a quarter-frame MTC message.
@param sequenceNumber a value 0 to 7 for the upper nybble of the message's data byte
@param value a value 0 to 15 for the lower nybble of the message's data byte
*/
static MidiMessage quarterFrame (int sequenceNumber, int value) noexcept;
/** SMPTE timecode types.
Used by the getFullFrameParameters() and fullFrame() methods.
*/
enum SmpteTimecodeType
{
fps24 = 0,
fps25 = 1,
fps30drop = 2,
fps30 = 3
};
/** Returns true if this is a full-frame midi timecode message. */
bool isFullFrame() const noexcept;
/** Extracts the timecode information from a full-frame midi timecode message.
You should only call this on messages where you've used isFullFrame() to
check that they're the right kind.
*/
void getFullFrameParameters (int& hours,
int& minutes,
int& seconds,
int& frames,
SmpteTimecodeType& timecodeType) const noexcept;
/** Creates a full-frame MTC message. */
static MidiMessage fullFrame (int hours,
int minutes,
int seconds,
int frames,
SmpteTimecodeType timecodeType);
//==============================================================================
/** Types of MMC command.
@see isMidiMachineControlMessage, getMidiMachineControlCommand, midiMachineControlCommand
*/
enum MidiMachineControlCommand
{
mmc_stop = 1,
mmc_play = 2,
mmc_deferredplay = 3,
mmc_fastforward = 4,
mmc_rewind = 5,
mmc_recordStart = 6,
mmc_recordStop = 7,
mmc_pause = 9
};
/** Checks whether this is an MMC message.
If it is, you can use the getMidiMachineControlCommand() to find out its type.
*/
bool isMidiMachineControlMessage() const noexcept;
/** For an MMC message, this returns its type.
Make sure it's actually an MMC message with isMidiMachineControlMessage() before
calling this method.
*/
MidiMachineControlCommand getMidiMachineControlCommand() const noexcept;
/** Creates an MMC message. */
static MidiMessage midiMachineControlCommand (MidiMachineControlCommand command);
/** Checks whether this is an MMC "goto" message.
If it is, the parameters passed-in are set to the time that the message contains.
@see midiMachineControlGoto
*/
bool isMidiMachineControlGoto (int& hours,
int& minutes,
int& seconds,
int& frames) const noexcept;
/** Creates an MMC "goto" message.
This messages tells the device to go to a specific frame.
@see isMidiMachineControlGoto
*/
static MidiMessage midiMachineControlGoto (int hours,
int minutes,
int seconds,
int frames);
//==============================================================================
/** Creates a master-volume change message.
@param volume the volume, 0 to 1.0
*/
static MidiMessage masterVolume (float volume);
//==============================================================================
/** Creates a system-exclusive message.
The data passed in is wrapped with header and tail bytes of 0xf0 and 0xf7.
*/
static MidiMessage createSysExMessage (const void* sysexData,
int dataSize);
//==============================================================================
#ifndef DOXYGEN
/** Reads a midi variable-length integer.
The `data` argument indicates the data to read the number from,
and `numBytesUsed` is used as an out-parameter to indicate the number
of bytes that were read.
*/
[[deprecated ("This signature has been deprecated in favour of the safer readVariableLengthValue.")]]
static int readVariableLengthVal (const uint8* data, int& numBytesUsed) noexcept;
#endif
/** Holds information about a variable-length value which was parsed
from a stream of bytes.
A valid value requires that `bytesUsed` is greater than 0.
*/
struct VariableLengthValue
{
VariableLengthValue() = default;
VariableLengthValue (int valueIn, int bytesUsedIn)
: value (valueIn), bytesUsed (bytesUsedIn) {}
bool isValid() const noexcept { return bytesUsed > 0; }
int value = 0;
int bytesUsed = 0;
};
/** Reads a midi variable-length integer, with protection against buffer overflow.
@param data the data to read the number from
@param maxBytesToUse the number of bytes in the region following `data`
@returns a struct containing the parsed value, and the number
of bytes that were read. If parsing fails, both the
`value` and `bytesUsed` fields will be set to 0 and
`isValid()` will return false
*/
static VariableLengthValue readVariableLengthValue (const uint8* data,
int maxBytesToUse) noexcept;
/** Based on the first byte of a short midi message, this uses a lookup table
to return the message length (either 1, 2, or 3 bytes).
The value passed in must be 0x80 or higher.
*/
static int getMessageLengthFromFirstByte (uint8 firstByte) noexcept;
//==============================================================================
/** Returns the name of a midi note number.
E.g "C", "D#", etc.
@param noteNumber the midi note number, 0 to 127
@param useSharps if true, sharpened notes are used, e.g. "C#", otherwise
they'll be flattened, e.g. "Db"
@param includeOctaveNumber if true, the octave number will be appended to the string,
e.g. "C#4"
@param octaveNumForMiddleC if an octave number is being appended, this indicates the
number that will be used for middle C's octave
@see getMidiNoteInHertz
*/
static String getMidiNoteName (int noteNumber,
bool useSharps,
bool includeOctaveNumber,
int octaveNumForMiddleC);
/** Returns the frequency of a midi note number.
The frequencyOfA parameter is an optional frequency for 'A', normally 440-444Hz for concert pitch.
@see getMidiNoteName
*/
static double getMidiNoteInHertz (int noteNumber, double frequencyOfA = 440.0) noexcept;
/** Returns true if the given midi note number is a black key. */
static bool isMidiNoteBlack (int noteNumber) noexcept;
/** Returns the standard name of a GM instrument, or nullptr if unknown for this index.
@param midiInstrumentNumber the program number 0 to 127
@see getProgramChangeNumber
*/
static const char* getGMInstrumentName (int midiInstrumentNumber);
/** Returns the name of a bank of GM instruments, or nullptr if unknown for this bank number.
@param midiBankNumber the bank, 0 to 15
*/
static const char* getGMInstrumentBankName (int midiBankNumber);
/** Returns the standard name of a channel 10 percussion sound, or nullptr if unknown for this note number.
@param midiNoteNumber the key number, 35 to 81
*/
static const char* getRhythmInstrumentName (int midiNoteNumber);
/** Returns the name of a controller type number, or nullptr if unknown for this controller number.
@see getControllerNumber
*/
static const char* getControllerName (int controllerNumber);
/** Converts a floating-point value between 0 and 1 to a MIDI 7-bit value between 0 and 127. */
static uint8 floatValueToMidiByte (float valueBetween0and1) noexcept;
/** Converts a pitchbend value in semitones to a MIDI 14-bit pitchwheel position value. */
static uint16 pitchbendToPitchwheelPos (float pitchbendInSemitones,
float pitchbendRangeInSemitones) noexcept;
private:
//==============================================================================
#ifndef DOXYGEN
union PackedData
{
uint8* allocatedData;
uint8 asBytes[sizeof (uint8*)];
};
PackedData packedData;
double timeStamp = 0;
int size;
#endif
inline bool isHeapAllocated() const noexcept { return size > (int) sizeof (packedData); }
inline uint8* getData() const noexcept { return isHeapAllocated() ? packedData.allocatedData : const_cast<uint8*>(packedData.asBytes); }
uint8* allocateSpace (int);
};
} // namespace juce

+ 0
- 870
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessageSequence.cpp View File

@@ -1,870 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MidiMessageSequence::MidiEventHolder::MidiEventHolder (const MidiMessage& mm) : message (mm) {}
MidiMessageSequence::MidiEventHolder::MidiEventHolder (MidiMessage&& mm) : message (std::move (mm)) {}
//==============================================================================
MidiMessageSequence::MidiMessageSequence()
{
}
MidiMessageSequence::MidiMessageSequence (const MidiMessageSequence& other)
{
list.addCopiesOf (other.list);
for (int i = 0; i < list.size(); ++i)
{
auto noteOffIndex = other.getIndexOfMatchingKeyUp (i);
if (noteOffIndex >= 0)
list.getUnchecked(i)->noteOffObject = list.getUnchecked (noteOffIndex);
}
}
MidiMessageSequence& MidiMessageSequence::operator= (const MidiMessageSequence& other)
{
MidiMessageSequence otherCopy (other);
swapWith (otherCopy);
return *this;
}
MidiMessageSequence::MidiMessageSequence (MidiMessageSequence&& other) noexcept
: list (std::move (other.list))
{
}
MidiMessageSequence& MidiMessageSequence::operator= (MidiMessageSequence&& other) noexcept
{
list = std::move (other.list);
return *this;
}
void MidiMessageSequence::swapWith (MidiMessageSequence& other) noexcept
{
list.swapWith (other.list);
}
void MidiMessageSequence::clear()
{
list.clear();
}
int MidiMessageSequence::getNumEvents() const noexcept
{
return list.size();
}
MidiMessageSequence::MidiEventHolder* MidiMessageSequence::getEventPointer (int index) const noexcept
{
return list[index];
}
MidiMessageSequence::MidiEventHolder** MidiMessageSequence::begin() noexcept { return list.begin(); }
MidiMessageSequence::MidiEventHolder* const* MidiMessageSequence::begin() const noexcept { return list.begin(); }
MidiMessageSequence::MidiEventHolder** MidiMessageSequence::end() noexcept { return list.end(); }
MidiMessageSequence::MidiEventHolder* const* MidiMessageSequence::end() const noexcept { return list.end(); }
double MidiMessageSequence::getTimeOfMatchingKeyUp (int index) const noexcept
{
if (auto* meh = list[index])
if (auto* noteOff = meh->noteOffObject)
return noteOff->message.getTimeStamp();
return 0;
}
int MidiMessageSequence::getIndexOfMatchingKeyUp (int index) const noexcept
{
if (auto* meh = list[index])
{
if (auto* noteOff = meh->noteOffObject)
{
for (int i = index; i < list.size(); ++i)
if (list.getUnchecked(i) == noteOff)
return i;
jassertfalse; // we've somehow got a pointer to a note-off object that isn't in the sequence
}
}
return -1;
}
int MidiMessageSequence::getIndexOf (const MidiEventHolder* event) const noexcept
{
return list.indexOf (event);
}
int MidiMessageSequence::getNextIndexAtTime (double timeStamp) const noexcept
{
auto numEvents = list.size();
int i;
for (i = 0; i < numEvents; ++i)
if (list.getUnchecked(i)->message.getTimeStamp() >= timeStamp)
break;
return i;
}
//==============================================================================
double MidiMessageSequence::getStartTime() const noexcept
{
return getEventTime (0);
}
double MidiMessageSequence::getEndTime() const noexcept
{
return getEventTime (list.size() - 1);
}
double MidiMessageSequence::getEventTime (const int index) const noexcept
{
if (auto* meh = list[index])
return meh->message.getTimeStamp();
return 0;
}
//==============================================================================
MidiMessageSequence::MidiEventHolder* MidiMessageSequence::addEvent (MidiEventHolder* newEvent, double timeAdjustment)
{
newEvent->message.addToTimeStamp (timeAdjustment);
auto time = newEvent->message.getTimeStamp();
int i;
for (i = list.size(); --i >= 0;)
if (list.getUnchecked(i)->message.getTimeStamp() <= time)
break;
list.insert (i + 1, newEvent);
return newEvent;
}
MidiMessageSequence::MidiEventHolder* MidiMessageSequence::addEvent (const MidiMessage& newMessage, double timeAdjustment)
{
return addEvent (new MidiEventHolder (newMessage), timeAdjustment);
}
MidiMessageSequence::MidiEventHolder* MidiMessageSequence::addEvent (MidiMessage&& newMessage, double timeAdjustment)
{
return addEvent (new MidiEventHolder (std::move (newMessage)), timeAdjustment);
}
void MidiMessageSequence::deleteEvent (int index, bool deleteMatchingNoteUp)
{
if (isPositiveAndBelow (index, list.size()))
{
if (deleteMatchingNoteUp)
deleteEvent (getIndexOfMatchingKeyUp (index), false);
list.remove (index);
}
}
void MidiMessageSequence::addSequence (const MidiMessageSequence& other, double timeAdjustment)
{
for (auto* m : other)
{
auto newOne = new MidiEventHolder (m->message);
newOne->message.addToTimeStamp (timeAdjustment);
list.add (newOne);
}
sort();
}
void MidiMessageSequence::addSequence (const MidiMessageSequence& other,
double timeAdjustment,
double firstAllowableTime,
double endOfAllowableDestTimes)
{
for (auto* m : other)
{
auto t = m->message.getTimeStamp() + timeAdjustment;
if (t >= firstAllowableTime && t < endOfAllowableDestTimes)
{
auto newOne = new MidiEventHolder (m->message);
newOne->message.setTimeStamp (t);
list.add (newOne);
}
}
sort();
}
void MidiMessageSequence::sort() noexcept
{
std::stable_sort (list.begin(), list.end(),
[] (const MidiEventHolder* a, const MidiEventHolder* b) { return a->message.getTimeStamp() < b->message.getTimeStamp(); });
}
void MidiMessageSequence::updateMatchedPairs() noexcept
{
for (int i = 0; i < list.size(); ++i)
{
auto* meh = list.getUnchecked(i);
auto& m1 = meh->message;
if (m1.isNoteOn())
{
meh->noteOffObject = nullptr;
auto note = m1.getNoteNumber();
auto chan = m1.getChannel();
auto len = list.size();
for (int j = i + 1; j < len; ++j)
{
auto* meh2 = list.getUnchecked(j);
auto& m = meh2->message;
if (m.getNoteNumber() == note && m.getChannel() == chan)
{
if (m.isNoteOff())
{
meh->noteOffObject = meh2;
break;
}
if (m.isNoteOn())
{
auto newEvent = new MidiEventHolder (MidiMessage::noteOff (chan, note));
list.insert (j, newEvent);
newEvent->message.setTimeStamp (m.getTimeStamp());
meh->noteOffObject = newEvent;
break;
}
}
}
}
}
}
void MidiMessageSequence::addTimeToMessages (double delta) noexcept
{
if (delta != 0)
for (auto* m : list)
m->message.addToTimeStamp (delta);
}
//==============================================================================
void MidiMessageSequence::extractMidiChannelMessages (const int channelNumberToExtract,
MidiMessageSequence& destSequence,
const bool alsoIncludeMetaEvents) const
{
for (auto* meh : list)
if (meh->message.isForChannel (channelNumberToExtract)
|| (alsoIncludeMetaEvents && meh->message.isMetaEvent()))
destSequence.addEvent (meh->message);
}
void MidiMessageSequence::extractSysExMessages (MidiMessageSequence& destSequence) const
{
for (auto* meh : list)
if (meh->message.isSysEx())
destSequence.addEvent (meh->message);
}
void MidiMessageSequence::deleteMidiChannelMessages (const int channelNumberToRemove)
{
for (int i = list.size(); --i >= 0;)
if (list.getUnchecked(i)->message.isForChannel (channelNumberToRemove))
list.remove(i);
}
void MidiMessageSequence::deleteSysExMessages()
{
for (int i = list.size(); --i >= 0;)
if (list.getUnchecked(i)->message.isSysEx())
list.remove(i);
}
//==============================================================================
class OptionalPitchWheel
{
Optional<int> value;
public:
void emit (int channel, Array<MidiMessage>& out) const
{
if (value.hasValue())
out.add (MidiMessage::pitchWheel (channel, *value));
}
void set (int v)
{
value = v;
}
};
class OptionalControllerValues
{
Optional<char> values[128];
public:
void emit (int channel, Array<MidiMessage>& out) const
{
for (auto it = std::begin (values); it != std::end (values); ++it)
if (it->hasValue())
out.add (MidiMessage::controllerEvent (channel, (int) std::distance (std::begin (values), it), **it));
}
void set (int controller, int value)
{
values[controller] = (char) value;
}
};
class OptionalProgramChange
{
Optional<char> value, bankLSB, bankMSB;
public:
void emit (int channel, double time, Array<MidiMessage>& out) const
{
if (! value.hasValue())
return;
if (bankLSB.hasValue() && bankMSB.hasValue())
{
out.add (MidiMessage::controllerEvent (channel, 0x00, *bankMSB).withTimeStamp (time));
out.add (MidiMessage::controllerEvent (channel, 0x20, *bankLSB).withTimeStamp (time));
}
out.add (MidiMessage::programChange (channel, *value).withTimeStamp (time));
}
// Returns true if this is a bank number change, and false otherwise.
bool trySetBank (int controller, int v)
{
switch (controller)
{
case 0x00: bankMSB = (char) v; return true;
case 0x20: bankLSB = (char) v; return true;
}
return false;
}
void setProgram (int v) { value = (char) v; }
};
class ParameterNumberState
{
enum class Kind { rpn, nrpn };
Optional<char> newestRpnLsb, newestRpnMsb, newestNrpnLsb, newestNrpnMsb, lastSentLsb, lastSentMsb;
Kind lastSentKind = Kind::rpn, newestKind = Kind::rpn;
public:
// If the effective parameter number has changed since the last time this function was called,
// this will emit the current parameter in full (MSB and LSB).
// This should be called before each data message (entry, increment, decrement: 0x06, 0x26, 0x60, 0x61)
// to ensure that the data message operates on the correct parameter number.
void sendIfNecessary (int channel, double time, Array<MidiMessage>& out)
{
const auto newestMsb = newestKind == Kind::rpn ? newestRpnMsb : newestNrpnMsb;
const auto newestLsb = newestKind == Kind::rpn ? newestRpnLsb : newestNrpnLsb;
auto lastSent = std::tie (lastSentKind, lastSentMsb, lastSentLsb);
const auto newest = std::tie (newestKind, newestMsb, newestLsb);
if (lastSent == newest || ! newestMsb.hasValue() || ! newestLsb.hasValue())
return;
out.add (MidiMessage::controllerEvent (channel, newestKind == Kind::rpn ? 0x65 : 0x63, *newestMsb).withTimeStamp (time));
out.add (MidiMessage::controllerEvent (channel, newestKind == Kind::rpn ? 0x64 : 0x62, *newestLsb).withTimeStamp (time));
lastSent = newest;
}
// Returns true if this is a parameter number change, and false otherwise.
bool trySetProgramNumber (int controller, int value)
{
switch (controller)
{
case 0x65: newestRpnMsb = (char) value; newestKind = Kind::rpn; return true;
case 0x64: newestRpnLsb = (char) value; newestKind = Kind::rpn; return true;
case 0x63: newestNrpnMsb = (char) value; newestKind = Kind::nrpn; return true;
case 0x62: newestNrpnLsb = (char) value; newestKind = Kind::nrpn; return true;
}
return false;
}
};
void MidiMessageSequence::createControllerUpdatesForTime (int channel, double time, Array<MidiMessage>& dest)
{
OptionalProgramChange programChange;
OptionalControllerValues controllers;
OptionalPitchWheel pitchWheel;
ParameterNumberState parameterNumberState;
for (const auto& item : list)
{
const auto& mm = item->message;
if (! (mm.isForChannel (channel) && mm.getTimeStamp() <= time))
continue;
if (mm.isController())
{
const auto num = mm.getControllerNumber();
if (parameterNumberState.trySetProgramNumber (num, mm.getControllerValue()))
continue;
if (programChange.trySetBank (num, mm.getControllerValue()))
continue;
constexpr int passthroughs[] { 0x06, 0x26, 0x60, 0x61 };
if (std::find (std::begin (passthroughs), std::end (passthroughs), num) != std::end (passthroughs))
{
parameterNumberState.sendIfNecessary (channel, mm.getTimeStamp(), dest);
dest.add (mm);
}
else
{
controllers.set (num, mm.getControllerValue());
}
}
else if (mm.isProgramChange())
{
programChange.setProgram (mm.getProgramChangeNumber());
}
else if (mm.isPitchWheel())
{
pitchWheel.set (mm.getPitchWheelValue());
}
}
pitchWheel.emit (channel, dest);
controllers.emit (channel, dest);
// Also emits bank change messages if necessary.
programChange.emit (channel, time, dest);
// Set the parameter number to its final state.
parameterNumberState.sendIfNecessary (channel, time, dest);
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
struct MidiMessageSequenceTest : public UnitTest
{
MidiMessageSequenceTest()
: UnitTest ("MidiMessageSequence", UnitTestCategories::midi)
{}
void runTest() override
{
MidiMessageSequence s;
s.addEvent (MidiMessage::noteOn (1, 60, 0.5f).withTimeStamp (0.0));
s.addEvent (MidiMessage::noteOff (1, 60, 0.5f).withTimeStamp (4.0));
s.addEvent (MidiMessage::noteOn (1, 30, 0.5f).withTimeStamp (2.0));
s.addEvent (MidiMessage::noteOff (1, 30, 0.5f).withTimeStamp (8.0));
beginTest ("Start & end time");
expectEquals (s.getStartTime(), 0.0);
expectEquals (s.getEndTime(), 8.0);
expectEquals (s.getEventTime (1), 2.0);
beginTest ("Matching note off & ons");
s.updateMatchedPairs();
expectEquals (s.getTimeOfMatchingKeyUp (0), 4.0);
expectEquals (s.getTimeOfMatchingKeyUp (1), 8.0);
expectEquals (s.getIndexOfMatchingKeyUp (0), 2);
expectEquals (s.getIndexOfMatchingKeyUp (1), 3);
beginTest ("Time & indices");
expectEquals (s.getNextIndexAtTime (0.5), 1);
expectEquals (s.getNextIndexAtTime (2.5), 2);
expectEquals (s.getNextIndexAtTime (9.0), 4);
beginTest ("Deleting events");
s.deleteEvent (0, true);
expectEquals (s.getNumEvents(), 2);
beginTest ("Merging sequences");
MidiMessageSequence s2;
s2.addEvent (MidiMessage::noteOn (2, 25, 0.5f).withTimeStamp (0.0));
s2.addEvent (MidiMessage::noteOn (2, 40, 0.5f).withTimeStamp (1.0));
s2.addEvent (MidiMessage::noteOff (2, 40, 0.5f).withTimeStamp (5.0));
s2.addEvent (MidiMessage::noteOn (2, 80, 0.5f).withTimeStamp (3.0));
s2.addEvent (MidiMessage::noteOff (2, 80, 0.5f).withTimeStamp (7.0));
s2.addEvent (MidiMessage::noteOff (2, 25, 0.5f).withTimeStamp (9.0));
s.addSequence (s2, 0.0, 0.0, 8.0); // Intentionally cut off the last note off
s.updateMatchedPairs();
expectEquals (s.getNumEvents(), 7);
expectEquals (s.getIndexOfMatchingKeyUp (0), -1); // Truncated note, should be no note off
expectEquals (s.getTimeOfMatchingKeyUp (1), 5.0);
struct ControlValue { int control, value; };
struct DataEntry
{
int controllerBase, channel, parameter, value;
double time;
std::array<ControlValue, 4> getControlValues() const
{
return { { { controllerBase + 1, (parameter >> 7) & 0x7f },
{ controllerBase + 0, (parameter >> 0) & 0x7f },
{ 0x06, (value >> 7) & 0x7f },
{ 0x26, (value >> 0) & 0x7f } } };
}
void addToSequence (MidiMessageSequence& s) const
{
for (const auto& pair : getControlValues())
s.addEvent (MidiMessage::controllerEvent (channel, pair.control, pair.value), time);
}
bool matches (const MidiMessage* begin, const MidiMessage* end) const
{
const auto isEqual = [this] (const ControlValue& cv, const MidiMessage& msg)
{
return msg.getTimeStamp() == time
&& msg.isController()
&& msg.getChannel() == channel
&& msg.getControllerNumber() == cv.control
&& msg.getControllerValue() == cv.value;
};
const auto pairs = getControlValues();
return std::equal (pairs.begin(), pairs.end(), begin, end, isEqual);
}
};
const auto addNrpn = [&] (MidiMessageSequence& seq, int channel, int parameter, int value, double time = 0.0)
{
DataEntry { 0x62, channel, parameter, value, time }.addToSequence (seq);
};
const auto addRpn = [&] (MidiMessageSequence& seq, int channel, int parameter, int value, double time = 0.0)
{
DataEntry { 0x64, channel, parameter, value, time }.addToSequence (seq);
};
const auto checkNrpn = [&] (const MidiMessage* begin, const MidiMessage* end, int channel, int parameter, int value, double time = 0.0)
{
expect (DataEntry { 0x62, channel, parameter, value, time }.matches (begin, end));
};
const auto checkRpn = [&] (const MidiMessage* begin, const MidiMessage* end, int channel, int parameter, int value, double time = 0.0)
{
expect (DataEntry { 0x64, channel, parameter, value, time }.matches (begin, end));
};
beginTest ("createControllerUpdatesForTime should emit (N)RPN components in the correct order");
{
const auto channel = 1;
const auto number = 200;
const auto value = 300;
MidiMessageSequence sequence;
addNrpn (sequence, channel, number, value);
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, 1.0, m);
checkNrpn (m.begin(), m.end(), channel, number, value);
}
beginTest ("createControllerUpdatesForTime ignores (N)RPNs after the final requested time");
{
const auto channel = 2;
const auto number = 123;
const auto value = 456;
MidiMessageSequence sequence;
addRpn (sequence, channel, number, value, 0.5);
addRpn (sequence, channel, 111, 222, 1.5);
addRpn (sequence, channel, 333, 444, 2.5);
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, 1.0, m);
checkRpn (m.begin(), std::next (m.begin(), 4), channel, number, value, 0.5);
}
beginTest ("createControllerUpdatesForTime should emit separate (N)RPN messages when appropriate");
{
const auto channel = 2;
const auto numberA = 1111;
const auto valueA = 9999;
const auto numberB = 8888;
const auto valueB = 2222;
const auto numberC = 7777;
const auto valueC = 3333;
const auto numberD = 6666;
const auto valueD = 4444;
const auto time = 0.5;
MidiMessageSequence sequence;
addRpn (sequence, channel, numberA, valueA, time);
addRpn (sequence, channel, numberB, valueB, time);
addNrpn (sequence, channel, numberC, valueC, time);
addNrpn (sequence, channel, numberD, valueD, time);
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, time * 2, m);
checkRpn (std::next (m.begin(), 0), std::next (m.begin(), 4), channel, numberA, valueA, time);
checkRpn (std::next (m.begin(), 4), std::next (m.begin(), 8), channel, numberB, valueB, time);
checkNrpn (std::next (m.begin(), 8), std::next (m.begin(), 12), channel, numberC, valueC, time);
checkNrpn (std::next (m.begin(), 12), std::next (m.begin(), 16), channel, numberD, valueD, time);
}
beginTest ("createControllerUpdatesForTime correctly emits (N)RPN messages on multiple channels");
{
struct Info { int channel, number, value; };
const Info infos[] { { 2, 1111, 9999 },
{ 8, 8888, 2222 },
{ 5, 7777, 3333 },
{ 1, 6666, 4444 } };
const auto time = 0.5;
MidiMessageSequence sequence;
for (const auto& info : infos)
addRpn (sequence, info.channel, info.number, info.value, time);
for (const auto& info : infos)
{
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (info.channel, time * 2, m);
checkRpn (std::next (m.begin(), 0), std::next (m.begin(), 4), info.channel, info.number, info.value, time);
}
}
const auto messagesAreEqual = [] (const MidiMessage& a, const MidiMessage& b)
{
return std::equal (a.getRawData(), a.getRawData() + a.getRawDataSize(),
b.getRawData(), b.getRawData() + b.getRawDataSize());
};
beginTest ("createControllerUpdatesForTime sends bank select messages when the next program is in a new bank");
{
MidiMessageSequence sequence;
const auto time = 0.0;
const auto channel = 1;
sequence.addEvent (MidiMessage::programChange (channel, 5), time);
sequence.addEvent (MidiMessage::controllerEvent (channel, 0x00, 128), time);
sequence.addEvent (MidiMessage::controllerEvent (channel, 0x20, 64), time);
sequence.addEvent (MidiMessage::programChange (channel, 63), time);
const Array<MidiMessage> finalEvents { MidiMessage::controllerEvent (channel, 0x00, 50),
MidiMessage::controllerEvent (channel, 0x20, 40),
MidiMessage::programChange (channel, 30) };
for (const auto& e : finalEvents)
sequence.addEvent (e);
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, 1.0, m);
expect (std::equal (m.begin(), m.end(), finalEvents.begin(), finalEvents.end(), messagesAreEqual));
}
beginTest ("createControllerUpdatesForTime preserves all Data Increment and Data Decrement messages");
{
MidiMessageSequence sequence;
const auto time = 0.0;
const auto channel = 1;
const Array<MidiMessage> messages { MidiMessage::controllerEvent (channel, 0x60, 0),
MidiMessage::controllerEvent (channel, 0x06, 100),
MidiMessage::controllerEvent (channel, 0x26, 50),
MidiMessage::controllerEvent (channel, 0x60, 10),
MidiMessage::controllerEvent (channel, 0x61, 10),
MidiMessage::controllerEvent (channel, 0x06, 20),
MidiMessage::controllerEvent (channel, 0x26, 30),
MidiMessage::controllerEvent (channel, 0x61, 10),
MidiMessage::controllerEvent (channel, 0x61, 20) };
for (const auto& m : messages)
sequence.addEvent (m, time);
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, 1.0, m);
expect (std::equal (m.begin(), m.end(), messages.begin(), messages.end(), messagesAreEqual));
}
beginTest ("createControllerUpdatesForTime does not emit redundant parameter number changes");
{
MidiMessageSequence sequence;
const auto time = 0.0;
const auto channel = 1;
const Array<MidiMessage> messages { MidiMessage::controllerEvent (channel, 0x65, 0),
MidiMessage::controllerEvent (channel, 0x64, 100),
MidiMessage::controllerEvent (channel, 0x63, 50),
MidiMessage::controllerEvent (channel, 0x62, 10),
MidiMessage::controllerEvent (channel, 0x06, 10) };
for (const auto& m : messages)
sequence.addEvent (m, time);
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, 1.0, m);
const Array<MidiMessage> expected { MidiMessage::controllerEvent (channel, 0x63, 50),
MidiMessage::controllerEvent (channel, 0x62, 10),
MidiMessage::controllerEvent (channel, 0x06, 10) };
expect (std::equal (m.begin(), m.end(), expected.begin(), expected.end(), messagesAreEqual));
}
beginTest ("createControllerUpdatesForTime sets parameter number correctly at end of sequence");
{
MidiMessageSequence sequence;
const auto time = 0.0;
const auto channel = 1;
const Array<MidiMessage> messages { MidiMessage::controllerEvent (channel, 0x65, 0),
MidiMessage::controllerEvent (channel, 0x64, 100),
MidiMessage::controllerEvent (channel, 0x63, 50),
MidiMessage::controllerEvent (channel, 0x62, 10),
MidiMessage::controllerEvent (channel, 0x06, 10),
MidiMessage::controllerEvent (channel, 0x64, 5) };
for (const auto& m : messages)
sequence.addEvent (m, time);
const auto finalTime = 1.0;
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, finalTime, m);
const Array<MidiMessage> expected { MidiMessage::controllerEvent (channel, 0x63, 50),
MidiMessage::controllerEvent (channel, 0x62, 10),
MidiMessage::controllerEvent (channel, 0x06, 10),
// Note: we should send both the MSB and LSB!
MidiMessage::controllerEvent (channel, 0x65, 0).withTimeStamp (finalTime),
MidiMessage::controllerEvent (channel, 0x64, 5).withTimeStamp (finalTime) };
expect (std::equal (m.begin(), m.end(), expected.begin(), expected.end(), messagesAreEqual));
}
beginTest ("createControllerUpdatesForTime does not emit duplicate parameter number change messages");
{
MidiMessageSequence sequence;
const auto time = 0.0;
const auto channel = 1;
const Array<MidiMessage> messages { MidiMessage::controllerEvent (channel, 0x65, 1),
MidiMessage::controllerEvent (channel, 0x64, 2),
MidiMessage::controllerEvent (channel, 0x63, 3),
MidiMessage::controllerEvent (channel, 0x62, 4),
MidiMessage::controllerEvent (channel, 0x06, 10),
MidiMessage::controllerEvent (channel, 0x63, 30),
MidiMessage::controllerEvent (channel, 0x62, 40),
MidiMessage::controllerEvent (channel, 0x63, 3),
MidiMessage::controllerEvent (channel, 0x62, 4),
MidiMessage::controllerEvent (channel, 0x60, 5),
MidiMessage::controllerEvent (channel, 0x65, 10) };
for (const auto& m : messages)
sequence.addEvent (m, time);
const auto finalTime = 1.0;
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, finalTime, m);
const Array<MidiMessage> expected { MidiMessage::controllerEvent (channel, 0x63, 3),
MidiMessage::controllerEvent (channel, 0x62, 4),
MidiMessage::controllerEvent (channel, 0x06, 10),
// Parameter number is set to (30, 40) then back to (3, 4),
// so there is no need to resend it
MidiMessage::controllerEvent (channel, 0x60, 5),
// Set parameter number to final value
MidiMessage::controllerEvent (channel, 0x65, 10).withTimeStamp (finalTime),
MidiMessage::controllerEvent (channel, 0x64, 2) .withTimeStamp (finalTime) };
expect (std::equal (m.begin(), m.end(), expected.begin(), expected.end(), messagesAreEqual));
}
beginTest ("createControllerUpdatesForTime emits bank change messages immediately before program change");
{
MidiMessageSequence sequence;
const auto time = 0.0;
const auto channel = 1;
const Array<MidiMessage> messages { MidiMessage::controllerEvent (channel, 0x00, 1),
MidiMessage::controllerEvent (channel, 0x20, 2),
MidiMessage::controllerEvent (channel, 0x65, 0),
MidiMessage::controllerEvent (channel, 0x64, 0),
MidiMessage::programChange (channel, 5) };
for (const auto& m : messages)
sequence.addEvent (m, time);
const auto finalTime = 1.0;
Array<MidiMessage> m;
sequence.createControllerUpdatesForTime (channel, finalTime, m);
const Array<MidiMessage> expected { MidiMessage::controllerEvent (channel, 0x00, 1),
MidiMessage::controllerEvent (channel, 0x20, 2),
MidiMessage::programChange (channel, 5),
MidiMessage::controllerEvent (channel, 0x65, 0).withTimeStamp (finalTime),
MidiMessage::controllerEvent (channel, 0x64, 0).withTimeStamp (finalTime) };
expect (std::equal (m.begin(), m.end(), expected.begin(), expected.end(), messagesAreEqual));
}
}
};
static MidiMessageSequenceTest midiMessageSequenceTests;
#endif
} // namespace juce

+ 0
- 315
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiMessageSequence.h View File

@@ -1,315 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
A sequence of timestamped midi messages.
This allows the sequence to be manipulated, and also to be read from and
written to a standard midi file.
@see MidiMessage, MidiFile
@tags{Audio}
*/
class JUCE_API MidiMessageSequence
{
public:
//==============================================================================
/** Creates an empty midi sequence object. */
MidiMessageSequence();
/** Creates a copy of another sequence. */
MidiMessageSequence (const MidiMessageSequence&);
/** Replaces this sequence with another one. */
MidiMessageSequence& operator= (const MidiMessageSequence&);
/** Move constructor */
MidiMessageSequence (MidiMessageSequence&&) noexcept;
/** Move assignment operator */
MidiMessageSequence& operator= (MidiMessageSequence&&) noexcept;
//==============================================================================
/** Structure used to hold midi events in the sequence.
These structures act as 'handles' on the events as they are moved about in
the list, and make it quick to find the matching note-offs for note-on events.
@see MidiMessageSequence::getEventPointer
*/
class MidiEventHolder
{
public:
//==============================================================================
/** The message itself, whose timestamp is used to specify the event's time. */
MidiMessage message;
/** The matching note-off event (if this is a note-on event).
If this isn't a note-on, this pointer will be nullptr.
Use the MidiMessageSequence::updateMatchedPairs() method to keep these
note-offs up-to-date after events have been moved around in the sequence
or deleted.
*/
MidiEventHolder* noteOffObject = nullptr;
private:
//==============================================================================
friend class MidiMessageSequence;
MidiEventHolder (const MidiMessage&);
MidiEventHolder (MidiMessage&&);
JUCE_LEAK_DETECTOR (MidiEventHolder)
};
//==============================================================================
/** Clears the sequence. */
void clear();
/** Returns the number of events in the sequence. */
int getNumEvents() const noexcept;
/** Returns a pointer to one of the events. */
MidiEventHolder* getEventPointer (int index) const noexcept;
/** Iterator for the list of MidiEventHolders */
MidiEventHolder** begin() noexcept;
/** Iterator for the list of MidiEventHolders */
MidiEventHolder* const* begin() const noexcept;
/** Iterator for the list of MidiEventHolders */
MidiEventHolder** end() noexcept;
/** Iterator for the list of MidiEventHolders */
MidiEventHolder* const* end() const noexcept;
/** Returns the time of the note-up that matches the note-on at this index.
If the event at this index isn't a note-on, it'll just return 0.
@see MidiMessageSequence::MidiEventHolder::noteOffObject
*/
double getTimeOfMatchingKeyUp (int index) const noexcept;
/** Returns the index of the note-up that matches the note-on at this index.
If the event at this index isn't a note-on, it'll just return -1.
@see MidiMessageSequence::MidiEventHolder::noteOffObject
*/
int getIndexOfMatchingKeyUp (int index) const noexcept;
/** Returns the index of an event. */
int getIndexOf (const MidiEventHolder* event) const noexcept;
/** Returns the index of the first event on or after the given timestamp.
If the time is beyond the end of the sequence, this will return the
number of events.
*/
int getNextIndexAtTime (double timeStamp) const noexcept;
//==============================================================================
/** Returns the timestamp of the first event in the sequence.
@see getEndTime
*/
double getStartTime() const noexcept;
/** Returns the timestamp of the last event in the sequence.
@see getStartTime
*/
double getEndTime() const noexcept;
/** Returns the timestamp of the event at a given index.
If the index is out-of-range, this will return 0.0
*/
double getEventTime (int index) const noexcept;
//==============================================================================
/** Inserts a midi message into the sequence.
The index at which the new message gets inserted will depend on its timestamp,
because the sequence is kept sorted.
Remember to call updateMatchedPairs() after adding note-on events.
@param newMessage the new message to add (an internal copy will be made)
@param timeAdjustment an optional value to add to the timestamp of the message
that will be inserted
@see updateMatchedPairs
*/
MidiEventHolder* addEvent (const MidiMessage& newMessage, double timeAdjustment = 0);
/** Inserts a midi message into the sequence.
The index at which the new message gets inserted will depend on its timestamp,
because the sequence is kept sorted.
Remember to call updateMatchedPairs() after adding note-on events.
@param newMessage the new message to add (an internal copy will be made)
@param timeAdjustment an optional value to add to the timestamp of the message
that will be inserted
@see updateMatchedPairs
*/
MidiEventHolder* addEvent (MidiMessage&& newMessage, double timeAdjustment = 0);
/** Deletes one of the events in the sequence.
Remember to call updateMatchedPairs() after removing events.
@param index the index of the event to delete
@param deleteMatchingNoteUp whether to also remove the matching note-off
if the event you're removing is a note-on
*/
void deleteEvent (int index, bool deleteMatchingNoteUp);
/** Merges another sequence into this one.
Remember to call updateMatchedPairs() after using this method.
@param other the sequence to add from
@param timeAdjustmentDelta an amount to add to the timestamps of the midi events
as they are read from the other sequence
@param firstAllowableDestTime events will not be added if their time is earlier
than this time. (This is after their time has been adjusted
by the timeAdjustmentDelta)
@param endOfAllowableDestTimes events will not be added if their time is equal to
or greater than this time. (This is after their time has
been adjusted by the timeAdjustmentDelta)
*/
void addSequence (const MidiMessageSequence& other,
double timeAdjustmentDelta,
double firstAllowableDestTime,
double endOfAllowableDestTimes);
/** Merges another sequence into this one.
Remember to call updateMatchedPairs() after using this method.
@param other the sequence to add from
@param timeAdjustmentDelta an amount to add to the timestamps of the midi events
as they are read from the other sequence
*/
void addSequence (const MidiMessageSequence& other,
double timeAdjustmentDelta);
//==============================================================================
/** Makes sure all the note-on and note-off pairs are up-to-date.
Call this after re-ordering messages or deleting/adding messages, and it
will scan the list and make sure all the note-offs in the MidiEventHolder
structures are pointing at the correct ones.
*/
void updateMatchedPairs() noexcept;
/** Forces a sort of the sequence.
You may need to call this if you've manually modified the timestamps of some
events such that the overall order now needs updating.
*/
void sort() noexcept;
//==============================================================================
/** Copies all the messages for a particular midi channel to another sequence.
@param channelNumberToExtract the midi channel to look for, in the range 1 to 16
@param destSequence the sequence that the chosen events should be copied to
@param alsoIncludeMetaEvents if true, any meta-events (which don't apply to a specific
channel) will also be copied across.
@see extractSysExMessages
*/
void extractMidiChannelMessages (int channelNumberToExtract,
MidiMessageSequence& destSequence,
bool alsoIncludeMetaEvents) const;
/** Copies all midi sys-ex messages to another sequence.
@param destSequence this is the sequence to which any sys-exes in this sequence
will be added
@see extractMidiChannelMessages
*/
void extractSysExMessages (MidiMessageSequence& destSequence) const;
/** Removes any messages in this sequence that have a specific midi channel.
@param channelNumberToRemove the midi channel to look for, in the range 1 to 16
*/
void deleteMidiChannelMessages (int channelNumberToRemove);
/** Removes any sys-ex messages from this sequence. */
void deleteSysExMessages();
/** Adds an offset to the timestamps of all events in the sequence.
@param deltaTime the amount to add to each timestamp.
*/
void addTimeToMessages (double deltaTime) noexcept;
//==============================================================================
/** Scans through the sequence to determine the state of any midi controllers at
a given time.
This will create a sequence of midi controller changes that can be
used to set all midi controllers to the state they would be in at the
specified time within this sequence.
As well as controllers, it will also recreate the midi program number
and pitch bend position.
This function has special handling for the "bank select" and "data entry"
controllers (0x00, 0x20, 0x06, 0x26, 0x60, 0x61, 0x62, 0x63, 0x64, 0x65).
If the sequence contains multiple bank select and program change messages,
only the bank select messages immediately preceding the final program change
message will be kept.
All "data increment" and "data decrement" messages will be retained. Some hardware will
ignore the requested increment/decrement values, so retaining all messages is the only
way to ensure compatibility with all hardware.
"Parameter number" changes will be slightly condensed. Only the parameter number
events immediately preceding each data entry event will be kept. The parameter number
will also be set to its final value at the end of the sequence, if necessary.
@param channelNumber the midi channel to look for, in the range 1 to 16. Controllers
for other channels will be ignored.
@param time the time at which you want to find out the state - there are
no explicit units for this time measurement, it's the same units
as used for the timestamps of the messages
@param resultMessages an array to which midi controller-change messages will be added. This
will be the minimum number of controller changes to recreate the
state at the required time.
*/
void createControllerUpdatesForTime (int channelNumber, double time,
Array<MidiMessage>& resultMessages);
//==============================================================================
/** Swaps this sequence with another one. */
void swapWith (MidiMessageSequence&) noexcept;
private:
//==============================================================================
friend class MidiFile;
OwnedArray<MidiEventHolder> list;
MidiEventHolder* addEvent (MidiEventHolder*, double);
JUCE_LEAK_DETECTOR (MidiMessageSequence)
};
} // namespace juce

+ 0
- 375
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiRPN.cpp View File

@@ -1,375 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MidiRPNDetector::MidiRPNDetector() noexcept
{
}
MidiRPNDetector::~MidiRPNDetector() noexcept
{
}
bool MidiRPNDetector::parseControllerMessage (int midiChannel,
int controllerNumber,
int controllerValue,
MidiRPNMessage& result) noexcept
{
jassert (midiChannel > 0 && midiChannel <= 16);
jassert (controllerNumber >= 0 && controllerNumber < 128);
jassert (controllerValue >= 0 && controllerValue < 128);
return states[midiChannel - 1].handleController (midiChannel, controllerNumber, controllerValue, result);
}
void MidiRPNDetector::reset() noexcept
{
for (int i = 0; i < 16; ++i)
{
states[i].parameterMSB = 0xff;
states[i].parameterLSB = 0xff;
states[i].resetValue();
states[i].isNRPN = false;
}
}
//==============================================================================
bool MidiRPNDetector::ChannelState::handleController (int channel,
int controllerNumber,
int value,
MidiRPNMessage& result) noexcept
{
switch (controllerNumber)
{
case 0x62: parameterLSB = uint8 (value); resetValue(); isNRPN = true; break;
case 0x63: parameterMSB = uint8 (value); resetValue(); isNRPN = true; break;
case 0x64: parameterLSB = uint8 (value); resetValue(); isNRPN = false; break;
case 0x65: parameterMSB = uint8 (value); resetValue(); isNRPN = false; break;
case 0x06: valueMSB = uint8 (value); return sendIfReady (channel, result);
case 0x26: valueLSB = uint8 (value); break;
default: break;
}
return false;
}
void MidiRPNDetector::ChannelState::resetValue() noexcept
{
valueMSB = 0xff;
valueLSB = 0xff;
}
//==============================================================================
bool MidiRPNDetector::ChannelState::sendIfReady (int channel, MidiRPNMessage& result) noexcept
{
if (parameterMSB < 0x80 && parameterLSB < 0x80)
{
if (valueMSB < 0x80)
{
result.channel = channel;
result.parameterNumber = (parameterMSB << 7) + parameterLSB;
result.isNRPN = isNRPN;
if (valueLSB < 0x80)
{
result.value = (valueMSB << 7) + valueLSB;
result.is14BitValue = true;
}
else
{
result.value = valueMSB;
result.is14BitValue = false;
}
return true;
}
}
return false;
}
//==============================================================================
MidiBuffer MidiRPNGenerator::generate (MidiRPNMessage message)
{
return generate (message.channel,
message.parameterNumber,
message.value,
message.isNRPN,
message.is14BitValue);
}
MidiBuffer MidiRPNGenerator::generate (int midiChannel,
int parameterNumber,
int value,
bool isNRPN,
bool use14BitValue)
{
jassert (midiChannel > 0 && midiChannel <= 16);
jassert (parameterNumber >= 0 && parameterNumber < 16384);
jassert (value >= 0 && value < (use14BitValue ? 16384 : 128));
uint8 parameterLSB = uint8 (parameterNumber & 0x0000007f);
uint8 parameterMSB = uint8 (parameterNumber >> 7);
uint8 valueLSB = use14BitValue ? uint8 (value & 0x0000007f) : 0x00;
uint8 valueMSB = use14BitValue ? uint8 (value >> 7) : uint8 (value);
uint8 channelByte = uint8 (0xb0 + midiChannel - 1);
MidiBuffer buffer;
buffer.addEvent (MidiMessage (channelByte, isNRPN ? 0x62 : 0x64, parameterLSB), 0);
buffer.addEvent (MidiMessage (channelByte, isNRPN ? 0x63 : 0x65, parameterMSB), 0);
// sending the value LSB is optional, but must come before sending the value MSB:
if (use14BitValue)
buffer.addEvent (MidiMessage (channelByte, 0x26, valueLSB), 0);
buffer.addEvent (MidiMessage (channelByte, 0x06, valueMSB), 0);
return buffer;
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class MidiRPNDetectorTests : public UnitTest
{
public:
MidiRPNDetectorTests()
: UnitTest ("MidiRPNDetector class", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("7-bit RPN");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (2, 101, 0, rpn));
expect (! detector.parseControllerMessage (2, 100, 7, rpn));
expect (detector.parseControllerMessage (2, 6, 42, rpn));
expectEquals (rpn.channel, 2);
expectEquals (rpn.parameterNumber, 7);
expectEquals (rpn.value, 42);
expect (! rpn.isNRPN);
expect (! rpn.is14BitValue);
}
beginTest ("14-bit RPN");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (1, 100, 44, rpn));
expect (! detector.parseControllerMessage (1, 101, 2, rpn));
expect (! detector.parseControllerMessage (1, 38, 94, rpn));
expect (detector.parseControllerMessage (1, 6, 1, rpn));
expectEquals (rpn.channel, 1);
expectEquals (rpn.parameterNumber, 300);
expectEquals (rpn.value, 222);
expect (! rpn.isNRPN);
expect (rpn.is14BitValue);
}
beginTest ("RPNs on multiple channels simultaneously");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (1, 100, 44, rpn));
expect (! detector.parseControllerMessage (2, 101, 0, rpn));
expect (! detector.parseControllerMessage (1, 101, 2, rpn));
expect (! detector.parseControllerMessage (2, 100, 7, rpn));
expect (! detector.parseControllerMessage (1, 38, 94, rpn));
expect (detector.parseControllerMessage (2, 6, 42, rpn));
expectEquals (rpn.channel, 2);
expectEquals (rpn.parameterNumber, 7);
expectEquals (rpn.value, 42);
expect (! rpn.isNRPN);
expect (! rpn.is14BitValue);
expect (detector.parseControllerMessage (1, 6, 1, rpn));
expectEquals (rpn.channel, 1);
expectEquals (rpn.parameterNumber, 300);
expectEquals (rpn.value, 222);
expect (! rpn.isNRPN);
expect (rpn.is14BitValue);
}
beginTest ("14-bit RPN with value within 7-bit range");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (16, 100, 0 , rpn));
expect (! detector.parseControllerMessage (16, 101, 0, rpn));
expect (! detector.parseControllerMessage (16, 38, 3, rpn));
expect (detector.parseControllerMessage (16, 6, 0, rpn));
expectEquals (rpn.channel, 16);
expectEquals (rpn.parameterNumber, 0);
expectEquals (rpn.value, 3);
expect (! rpn.isNRPN);
expect (rpn.is14BitValue);
}
beginTest ("invalid RPN (wrong order)");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (2, 6, 42, rpn));
expect (! detector.parseControllerMessage (2, 101, 0, rpn));
expect (! detector.parseControllerMessage (2, 100, 7, rpn));
}
beginTest ("14-bit RPN interspersed with unrelated CC messages");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (16, 3, 80, rpn));
expect (! detector.parseControllerMessage (16, 100, 0 , rpn));
expect (! detector.parseControllerMessage (16, 4, 81, rpn));
expect (! detector.parseControllerMessage (16, 101, 0, rpn));
expect (! detector.parseControllerMessage (16, 5, 82, rpn));
expect (! detector.parseControllerMessage (16, 5, 83, rpn));
expect (! detector.parseControllerMessage (16, 38, 3, rpn));
expect (! detector.parseControllerMessage (16, 4, 84, rpn));
expect (! detector.parseControllerMessage (16, 3, 85, rpn));
expect (detector.parseControllerMessage (16, 6, 0, rpn));
expectEquals (rpn.channel, 16);
expectEquals (rpn.parameterNumber, 0);
expectEquals (rpn.value, 3);
expect (! rpn.isNRPN);
expect (rpn.is14BitValue);
}
beginTest ("14-bit NRPN");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (1, 98, 44, rpn));
expect (! detector.parseControllerMessage (1, 99 , 2, rpn));
expect (! detector.parseControllerMessage (1, 38, 94, rpn));
expect (detector.parseControllerMessage (1, 6, 1, rpn));
expectEquals (rpn.channel, 1);
expectEquals (rpn.parameterNumber, 300);
expectEquals (rpn.value, 222);
expect (rpn.isNRPN);
expect (rpn.is14BitValue);
}
beginTest ("reset");
{
MidiRPNDetector detector;
MidiRPNMessage rpn;
expect (! detector.parseControllerMessage (2, 101, 0, rpn));
detector.reset();
expect (! detector.parseControllerMessage (2, 100, 7, rpn));
expect (! detector.parseControllerMessage (2, 6, 42, rpn));
}
}
};
static MidiRPNDetectorTests MidiRPNDetectorUnitTests;
//==============================================================================
class MidiRPNGeneratorTests : public UnitTest
{
public:
MidiRPNGeneratorTests()
: UnitTest ("MidiRPNGenerator class", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("generating RPN/NRPN");
{
{
MidiBuffer buffer = MidiRPNGenerator::generate (1, 23, 1337, true, true);
expectContainsRPN (buffer, 1, 23, 1337, true, true);
}
{
MidiBuffer buffer = MidiRPNGenerator::generate (16, 101, 34, false, false);
expectContainsRPN (buffer, 16, 101, 34, false, false);
}
{
MidiRPNMessage message = { 16, 101, 34, false, false };
MidiBuffer buffer = MidiRPNGenerator::generate (message);
expectContainsRPN (buffer, message);
}
}
}
private:
//==============================================================================
void expectContainsRPN (const MidiBuffer& midiBuffer,
int channel,
int parameterNumber,
int value,
bool isNRPN,
bool is14BitValue)
{
MidiRPNMessage expected = { channel, parameterNumber, value, isNRPN, is14BitValue };
expectContainsRPN (midiBuffer, expected);
}
//==============================================================================
void expectContainsRPN (const MidiBuffer& midiBuffer, MidiRPNMessage expected)
{
MidiRPNMessage result = MidiRPNMessage();
MidiRPNDetector detector;
for (const auto metadata : midiBuffer)
{
const auto midiMessage = metadata.getMessage();
if (detector.parseControllerMessage (midiMessage.getChannel(),
midiMessage.getControllerNumber(),
midiMessage.getControllerValue(),
result))
break;
}
expectEquals (result.channel, expected.channel);
expectEquals (result.parameterNumber, expected.parameterNumber);
expectEquals (result.value, expected.value);
expect (result.isNRPN == expected.isNRPN);
expect (result.is14BitValue == expected.is14BitValue);
}
};
static MidiRPNGeneratorTests MidiRPNGeneratorUnitTests;
#endif
} // namespace juce

+ 0
- 153
libs/juce7/source/modules/juce_audio_basics/midi/juce_MidiRPN.h View File

@@ -1,153 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/** Represents a MIDI RPN (registered parameter number) or NRPN (non-registered
parameter number) message.
@tags{Audio}
*/
struct MidiRPNMessage
{
/** Midi channel of the message, in the range 1 to 16. */
int channel;
/** The 14-bit parameter index, in the range 0 to 16383 (0x3fff). */
int parameterNumber;
/** The parameter value, in the range 0 to 16383 (0x3fff).
If the message contains no value LSB, the value will be in the range
0 to 127 (0x7f).
*/
int value;
/** True if this message is an NRPN; false if it is an RPN. */
bool isNRPN;
/** True if the value uses 14-bit resolution (LSB + MSB); false if
the value is 7-bit (MSB only).
*/
bool is14BitValue;
};
//==============================================================================
/**
Parses a stream of MIDI data to assemble RPN and NRPN messages from their
constituent MIDI CC messages.
The detector uses the following parsing rules: the parameter number
LSB/MSB can be sent/received in either order and must both come before the
parameter value; for the parameter value, LSB always has to be sent/received
before the value MSB, otherwise it will be treated as 7-bit (MSB only).
@tags{Audio}
*/
class JUCE_API MidiRPNDetector
{
public:
/** Constructor. */
MidiRPNDetector() noexcept;
/** Destructor. */
~MidiRPNDetector() noexcept;
/** Resets the RPN detector's internal state, so that it forgets about
previously received MIDI CC messages.
*/
void reset() noexcept;
//==============================================================================
/** Takes the next in a stream of incoming MIDI CC messages and returns true
if it forms the last of a sequence that makes an RPN or NPRN.
If this returns true, then the RPNMessage object supplied will be
filled-out with the message's details.
(If it returns false then the RPNMessage object will be unchanged).
*/
bool parseControllerMessage (int midiChannel,
int controllerNumber,
int controllerValue,
MidiRPNMessage& result) noexcept;
private:
//==============================================================================
struct ChannelState
{
bool handleController (int channel, int controllerNumber,
int value, MidiRPNMessage&) noexcept;
void resetValue() noexcept;
bool sendIfReady (int channel, MidiRPNMessage&) noexcept;
uint8 parameterMSB = 0xff, parameterLSB = 0xff, valueMSB = 0xff, valueLSB = 0xff;
bool isNRPN = false;
};
//==============================================================================
ChannelState states[16];
JUCE_LEAK_DETECTOR (MidiRPNDetector)
};
//==============================================================================
/**
Generates an appropriate sequence of MIDI CC messages to represent an RPN
or NRPN message.
This sequence (as a MidiBuffer) can then be directly sent to a MidiOutput.
@tags{Audio}
*/
class JUCE_API MidiRPNGenerator
{
public:
//==============================================================================
/** Generates a MIDI sequence representing the given RPN or NRPN message. */
static MidiBuffer generate (MidiRPNMessage message);
//==============================================================================
/** Generates a MIDI sequence representing an RPN or NRPN message with the
given parameters.
@param channel The MIDI channel of the RPN/NRPN message.
@param parameterNumber The parameter number, in the range 0 to 16383.
@param value The parameter value, in the range 0 to 16383, or
in the range 0 to 127 if sendAs14BitValue is false.
@param isNRPN Whether you need a MIDI RPN or NRPN sequence (RPN is default).
@param use14BitValue If true (default), the value will have 14-bit precision
(two MIDI bytes). If false, instead the value will have
7-bit precision (a single MIDI byte).
*/
static MidiBuffer generate (int channel,
int parameterNumber,
int value,
bool isNRPN = false,
bool use14BitValue = true);
};
} // namespace juce

+ 0
- 47
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMP.h View File

@@ -1,47 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#include "../juce_MidiDataConcatenator.h"
#include "juce_UMPProtocols.h"
#include "juce_UMPUtils.h"
#include "juce_UMPacket.h"
#include "juce_UMPSysEx7.h"
#include "juce_UMPView.h"
#include "juce_UMPIterator.h"
#include "juce_UMPackets.h"
#include "juce_UMPFactory.h"
#include "juce_UMPConversion.h"
#include "juce_UMPMidi1ToBytestreamTranslator.h"
#include "juce_UMPMidi1ToMidi2DefaultTranslator.h"
#include "juce_UMPConverters.h"
#include "juce_UMPDispatcher.h"
#include "juce_UMPReceiver.h"
#ifndef DOXYGEN
namespace juce
{
namespace ump = universal_midi_packets;
}
#endif

+ 0
- 330
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPConversion.h View File

@@ -1,330 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Functions to assist conversion of UMP messages to/from other formats,
especially older 'bytestream' formatted MidiMessages.
@tags{Audio}
*/
struct Conversion
{
/** Converts from a MIDI 1 bytestream to MIDI 1 on Universal MIDI Packets.
`callback` is a function which accepts a single View argument.
*/
template <typename PacketCallbackFunction>
static void toMidi1 (const MidiMessage& m, PacketCallbackFunction&& callback)
{
const auto* data = m.getRawData();
const auto firstByte = data[0];
const auto size = m.getRawDataSize();
if (firstByte != 0xf0)
{
const auto mask = [size]() -> uint32_t
{
switch (size)
{
case 0: return 0xff000000;
case 1: return 0xffff0000;
case 2: return 0xffffff00;
case 3: return 0xffffffff;
}
return 0x00000000;
}();
const auto extraByte = (uint8_t) ((((firstByte & 0xf0) == 0xf0) ? 0x1 : 0x2) << 0x4);
const PacketX1 packet { mask & Utils::bytesToWord (extraByte, data[0], data[1], data[2]) };
callback (View (packet.data()));
return;
}
const auto numSysExBytes = m.getSysExDataSize();
const auto numMessages = SysEx7::getNumPacketsRequiredForDataSize ((uint32_t) numSysExBytes);
auto* dataOffset = m.getSysExData();
if (numMessages <= 1)
{
const auto packet = Factory::makeSysExIn1Packet (0, (uint8_t) numSysExBytes, dataOffset);
callback (View (packet.data()));
return;
}
constexpr auto byteIncrement = 6;
for (auto i = numSysExBytes; i > 0; i -= byteIncrement, dataOffset += byteIncrement)
{
const auto func = [&]
{
if (i == numSysExBytes)
return Factory::makeSysExStart;
if (i <= byteIncrement)
return Factory::makeSysExEnd;
return Factory::makeSysExContinue;
}();
const auto bytesNow = std::min (byteIncrement, i);
const auto packet = func (0, (uint8_t) bytesNow, dataOffset);
callback (View (packet.data()));
}
}
/** Converts a MidiMessage to one or more messages in UMP format, using
the MIDI 1.0 Protocol.
`packets` is an out-param to allow the caller to control
allocation/deallocation. Returning a new Packets object would
require every call to toMidi1 to allocate. With this version, no
allocations will occur, provided that `packets` has adequate reserved
space.
*/
static void toMidi1 (const MidiMessage& m, Packets& packets)
{
toMidi1 (m, [&] (const View& view) { packets.add (view); });
}
/** Widens a 7-bit MIDI 1.0 value to a 8-bit MIDI 2.0 value. */
static uint8_t scaleTo8 (uint8_t word7Bit)
{
const auto shifted = (uint8_t) (word7Bit << 0x1);
const auto repeat = (uint8_t) (word7Bit & 0x3f);
const auto mask = (uint8_t) (word7Bit <= 0x40 ? 0x0 : 0xff);
return (uint8_t) (shifted | ((repeat >> 5) & mask));
}
/** Widens a 7-bit MIDI 1.0 value to a 16-bit MIDI 2.0 value. */
static uint16_t scaleTo16 (uint8_t word7Bit)
{
const auto shifted = (uint16_t) (word7Bit << 0x9);
const auto repeat = (uint16_t) (word7Bit & 0x3f);
const auto mask = (uint16_t) (word7Bit <= 0x40 ? 0x0 : 0xffff);
return (uint16_t) (shifted | (((repeat << 3) | (repeat >> 3)) & mask));
}
/** Widens a 14-bit MIDI 1.0 value to a 16-bit MIDI 2.0 value. */
static uint16_t scaleTo16 (uint16_t word14Bit)
{
const auto shifted = (uint16_t) (word14Bit << 0x2);
const auto repeat = (uint16_t) (word14Bit & 0x1fff);
const auto mask = (uint16_t) (word14Bit <= 0x2000 ? 0x0 : 0xffff);
return (uint16_t) (shifted | ((repeat >> 11) & mask));
}
/** Widens a 7-bit MIDI 1.0 value to a 32-bit MIDI 2.0 value. */
static uint32_t scaleTo32 (uint8_t word7Bit)
{
const auto shifted = (uint32_t) (word7Bit << 0x19);
const auto repeat = (uint32_t) (word7Bit & 0x3f);
const auto mask = (uint32_t) (word7Bit <= 0x40 ? 0x0 : 0xffffffff);
return (uint32_t) (shifted | (((repeat << 19)
| (repeat << 13)
| (repeat << 7)
| (repeat << 1)
| (repeat >> 5)) & mask));
}
/** Widens a 14-bit MIDI 1.0 value to a 32-bit MIDI 2.0 value. */
static uint32_t scaleTo32 (uint16_t word14Bit)
{
const auto shifted = (uint32_t) (word14Bit << 0x12);
const auto repeat = (uint32_t) (word14Bit & 0x1fff);
const auto mask = (uint32_t) (word14Bit <= 0x2000 ? 0x0 : 0xffffffff);
return (uint32_t) (shifted | (((repeat << 5) | (repeat >> 8)) & mask));
}
/** Narrows a 16-bit MIDI 2.0 value to a 7-bit MIDI 1.0 value. */
static uint8_t scaleTo7 (uint8_t word8Bit) { return (uint8_t) (word8Bit >> 1); }
/** Narrows a 16-bit MIDI 2.0 value to a 7-bit MIDI 1.0 value. */
static uint8_t scaleTo7 (uint16_t word16Bit) { return (uint8_t) (word16Bit >> 9); }
/** Narrows a 32-bit MIDI 2.0 value to a 7-bit MIDI 1.0 value. */
static uint8_t scaleTo7 (uint32_t word32Bit) { return (uint8_t) (word32Bit >> 25); }
/** Narrows a 32-bit MIDI 2.0 value to a 14-bit MIDI 1.0 value. */
static uint16_t scaleTo14 (uint16_t word16Bit) { return (uint16_t) (word16Bit >> 2); }
/** Narrows a 32-bit MIDI 2.0 value to a 14-bit MIDI 1.0 value. */
static uint16_t scaleTo14 (uint32_t word32Bit) { return (uint16_t) (word32Bit >> 18); }
/** Converts UMP messages which may include MIDI 2.0 channel voice messages into
equivalent MIDI 1.0 messages (still in UMP format).
`callback` is a function that accepts a single View argument and will be
called with each converted packet.
Note that not all MIDI 2.0 messages have MIDI 1.0 equivalents, so such
messages will be ignored.
*/
template <typename Callback>
static void midi2ToMidi1DefaultTranslation (const View& v, Callback&& callback)
{
const auto firstWord = v[0];
if (Utils::getMessageType (firstWord) != 0x4)
{
callback (v);
return;
}
const auto status = Utils::getStatus (firstWord);
const auto typeAndGroup = (uint8_t) ((0x2 << 0x4) | Utils::getGroup (firstWord));
switch (status)
{
case 0x8: // note off
case 0x9: // note on
case 0xa: // poly pressure
case 0xb: // control change
{
const auto statusAndChannel = (uint8_t) ((firstWord >> 0x10) & 0xff);
const auto byte2 = (uint8_t) ((firstWord >> 0x08) & 0xff);
const auto byte3 = scaleTo7 (v[1]);
// If this is a note-on, and the scaled byte is 0,
// the scaled velocity should be 1 instead of 0
const auto needsCorrection = status == 0x9 && byte3 == 0;
const auto correctedByte = (uint8_t) (needsCorrection ? 1 : byte3);
const auto shouldIgnore = status == 0xb && [&]
{
switch (byte2)
{
case 0:
case 6:
case 32:
case 38:
case 98:
case 99:
case 100:
case 101:
return true;
}
return false;
}();
if (shouldIgnore)
return;
const PacketX1 packet { Utils::bytesToWord (typeAndGroup,
statusAndChannel,
byte2,
correctedByte) };
callback (View (packet.data()));
return;
}
case 0xd: // channel pressure
{
const auto statusAndChannel = (uint8_t) ((firstWord >> 0x10) & 0xff);
const auto byte2 = scaleTo7 (v[1]);
const PacketX1 packet { Utils::bytesToWord (typeAndGroup,
statusAndChannel,
byte2,
0) };
callback (View (packet.data()));
return;
}
case 0x2: // rpn
case 0x3: // nrpn
{
const auto ccX = (uint8_t) (status == 0x2 ? 101 : 99);
const auto ccY = (uint8_t) (status == 0x2 ? 100 : 98);
const auto statusAndChannel = (uint8_t) ((0xb << 0x4) | Utils::getChannel (firstWord));
const auto data = scaleTo14 (v[1]);
const PacketX1 packets[]
{
PacketX1 { Utils::bytesToWord (typeAndGroup, statusAndChannel, ccX, (uint8_t) ((firstWord >> 0x8) & 0x7f)) },
PacketX1 { Utils::bytesToWord (typeAndGroup, statusAndChannel, ccY, (uint8_t) ((firstWord >> 0x0) & 0x7f)) },
PacketX1 { Utils::bytesToWord (typeAndGroup, statusAndChannel, 6, (uint8_t) ((data >> 0x7) & 0x7f)) },
PacketX1 { Utils::bytesToWord (typeAndGroup, statusAndChannel, 38, (uint8_t) ((data >> 0x0) & 0x7f)) },
};
for (const auto& packet : packets)
callback (View (packet.data()));
return;
}
case 0xc: // program change / bank select
{
if (firstWord & 1)
{
const auto statusAndChannel = (uint8_t) ((0xb << 0x4) | Utils::getChannel (firstWord));
const auto secondWord = v[1];
const PacketX1 packets[]
{
PacketX1 { Utils::bytesToWord (typeAndGroup, statusAndChannel, 0, (uint8_t) ((secondWord >> 0x8) & 0x7f)) },
PacketX1 { Utils::bytesToWord (typeAndGroup, statusAndChannel, 32, (uint8_t) ((secondWord >> 0x0) & 0x7f)) },
};
for (const auto& packet : packets)
callback (View (packet.data()));
}
const auto statusAndChannel = (uint8_t) ((0xc << 0x4) | Utils::getChannel (firstWord));
const PacketX1 packet { Utils::bytesToWord (typeAndGroup,
statusAndChannel,
(uint8_t) ((v[1] >> 0x18) & 0x7f),
0) };
callback (View (packet.data()));
return;
}
case 0xe: // pitch bend
{
const auto data = scaleTo14 (v[1]);
const auto statusAndChannel = (uint8_t) ((firstWord >> 0x10) & 0xff);
const PacketX1 packet { Utils::bytesToWord (typeAndGroup,
statusAndChannel,
(uint8_t) (data & 0x7f),
(uint8_t) ((data >> 7) & 0x7f)) };
callback (View (packet.data()));
return;
}
default: // other message types do not translate
return;
}
}
};
}
}
#endif

+ 0
- 169
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPConverters.h View File

@@ -1,169 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Allows conversion from bytestream- or Universal MIDI Packet-formatted
messages to MIDI 1.0 messages in UMP format.
@tags{Audio}
*/
struct ToUMP1Converter
{
template <typename Fn>
void convert (const MidiMessage& m, Fn&& fn)
{
Conversion::toMidi1 (m, std::forward<Fn> (fn));
}
template <typename Fn>
void convert (const View& v, Fn&& fn)
{
Conversion::midi2ToMidi1DefaultTranslation (v, std::forward<Fn> (fn));
}
};
/**
Allows conversion from bytestream- or Universal MIDI Packet-formatted
messages to MIDI 2.0 messages in UMP format.
@tags{Audio}
*/
struct ToUMP2Converter
{
template <typename Fn>
void convert (const MidiMessage& m, Fn&& fn)
{
Conversion::toMidi1 (m, [&] (const View& v)
{
translator.dispatch (v, fn);
});
}
template <typename Fn>
void convert (const View& v, Fn&& fn)
{
translator.dispatch (v, std::forward<Fn> (fn));
}
void reset()
{
translator.reset();
}
Midi1ToMidi2DefaultTranslator translator;
};
/**
Allows conversion from bytestream- or Universal MIDI Packet-formatted
messages to UMP format.
The packet protocol can be selected using the constructor parameter.
@tags{Audio}
*/
class GenericUMPConverter
{
public:
explicit GenericUMPConverter (PacketProtocol m)
: mode (m) {}
void reset()
{
std::get<1> (converters).reset();
}
template <typename Fn>
void convert (const MidiMessage& m, Fn&& fn)
{
switch (mode)
{
case PacketProtocol::MIDI_1_0: return std::get<0> (converters).convert (m, std::forward<Fn> (fn));
case PacketProtocol::MIDI_2_0: return std::get<1> (converters).convert (m, std::forward<Fn> (fn));
}
}
template <typename Fn>
void convert (const View& v, Fn&& fn)
{
switch (mode)
{
case PacketProtocol::MIDI_1_0: return std::get<0> (converters).convert (v, std::forward<Fn> (fn));
case PacketProtocol::MIDI_2_0: return std::get<1> (converters).convert (v, std::forward<Fn> (fn));
}
}
template <typename Fn>
void convert (Iterator begin, Iterator end, Fn&& fn)
{
std::for_each (begin, end, [&] (const View& v)
{
convert (v, fn);
});
}
PacketProtocol getProtocol() const noexcept { return mode; }
private:
std::tuple<ToUMP1Converter, ToUMP2Converter> converters;
const PacketProtocol mode{};
};
/**
Allows conversion from bytestream- or Universal MIDI Packet-formatted
messages to bytestream format.
@tags{Audio}
*/
struct ToBytestreamConverter
{
explicit ToBytestreamConverter (int storageSize)
: translator (storageSize) {}
template <typename Fn>
void convert (const MidiMessage& m, Fn&& fn)
{
fn (m);
}
template <typename Fn>
void convert (const View& v, double time, Fn&& fn)
{
Conversion::midi2ToMidi1DefaultTranslation (v, [&] (const View& midi1)
{
translator.dispatch (midi1, time, fn);
});
}
void reset() { translator.reset(); }
Midi1ToBytestreamTranslator translator;
};
}
}
#endif

+ 0
- 202
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPDispatcher.h View File

@@ -1,202 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Parses a raw stream of uint32_t, and calls a user-provided callback every time
a full Universal MIDI Packet is encountered.
@tags{Audio}
*/
class Dispatcher
{
public:
/** Clears the dispatcher. */
void reset() { currentPacketLen = 0; }
/** Calls `callback` with a View of each packet encountered in the range delimited
by `begin` and `end`.
If the range ends part-way through a packet, the next call to `dispatch` will
continue from that point in the packet (unless `reset` is called first).
*/
template <typename PacketCallbackFunction>
void dispatch (const uint32_t* begin,
const uint32_t* end,
double timeStamp,
PacketCallbackFunction&& callback)
{
std::for_each (begin, end, [&] (uint32_t word)
{
nextPacket[currentPacketLen++] = word;
if (currentPacketLen == Utils::getNumWordsForMessageType (nextPacket.front()))
{
callback (View (nextPacket.data()), timeStamp);
currentPacketLen = 0;
}
});
}
private:
std::array<uint32_t, 4> nextPacket;
size_t currentPacketLen = 0;
};
//==============================================================================
/**
Parses a stream of bytes representing a sequence of bytestream-encoded MIDI 1.0 messages,
converting the messages to UMP format and passing the packets to a user-provided callback
as they become ready.
@tags{Audio}
*/
class BytestreamToUMPDispatcher
{
public:
/** Initialises the dispatcher.
Channel messages will be converted to the requested protocol format `pp`.
`storageSize` bytes will be allocated to store incomplete messages.
*/
explicit BytestreamToUMPDispatcher (PacketProtocol pp, int storageSize)
: concatenator (storageSize),
converter (pp)
{}
void reset()
{
concatenator.reset();
converter.reset();
}
/** Calls `callback` with a View of each converted packet as it becomes ready.
@param begin the first byte in a range of bytes representing bytestream-encoded MIDI messages.
@param end one-past the last byte in a range of bytes representing bytestream-encoded MIDI messages.
@param timestamp a timestamp to apply to the created packets.
@param callback a callback which will be passed a View pointing to each new packet as it becomes ready.
*/
template <typename PacketCallbackFunction>
void dispatch (const uint8_t* begin,
const uint8_t* end,
double timestamp,
PacketCallbackFunction&& callback)
{
using CallbackPtr = decltype (std::addressof (callback));
#if JUCE_MINGW
#define JUCE_MINGW_HIDDEN_VISIBILITY __attribute__ ((visibility ("hidden")))
#else
#define JUCE_MINGW_HIDDEN_VISIBILITY
#endif
struct JUCE_MINGW_HIDDEN_VISIBILITY Callback
{
Callback (BytestreamToUMPDispatcher& d, CallbackPtr c)
: dispatch (d), callbackPtr (c) {}
void handleIncomingMidiMessage (void*, const MidiMessage& msg) const
{
Conversion::toMidi1 (msg, [&] (const View& view)
{
dispatch.converter.convert (view, *callbackPtr);
});
}
void handlePartialSysexMessage (void*, const uint8_t*, int, double) const {}
BytestreamToUMPDispatcher& dispatch;
CallbackPtr callbackPtr = nullptr;
};
#undef JUCE_MINGW_HIDDEN_VISIBILITY
Callback inputCallback { *this, &callback };
concatenator.pushMidiData (begin, int (end - begin), timestamp, (void*) nullptr, inputCallback);
}
private:
MidiDataConcatenator concatenator;
GenericUMPConverter converter;
};
//==============================================================================
/**
Parses a stream of 32-bit words representing a sequence of UMP-encoded MIDI messages,
converting the messages to MIDI 1.0 bytestream format and passing them to a user-provided
callback as they become ready.
@tags{Audio}
*/
class ToBytestreamDispatcher
{
public:
/** Initialises the dispatcher.
`storageSize` bytes will be allocated to store incomplete messages.
*/
explicit ToBytestreamDispatcher (int storageSize)
: converter (storageSize) {}
/** Clears the dispatcher. */
void reset()
{
dispatcher.reset();
converter.reset();
}
/** Calls `callback` with converted bytestream-formatted MidiMessage whenever
a new message becomes available.
@param begin the first word in a stream of words representing UMP-encoded MIDI packets.
@param end one-past the last word in a stream of words representing UMP-encoded MIDI packets.
@param timestamp a timestamp to apply to converted messages.
@param callback a callback which will be passed a MidiMessage each time a new message becomes ready.
*/
template <typename BytestreamMessageCallback>
void dispatch (const uint32_t* begin,
const uint32_t* end,
double timestamp,
BytestreamMessageCallback&& callback)
{
dispatcher.dispatch (begin, end, timestamp, [&] (const View& view, double time)
{
converter.convert (view, time, callback);
});
}
private:
Dispatcher dispatcher;
ToBytestreamConverter converter;
};
}
}
#endif

+ 0
- 538
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPFactory.h View File

@@ -1,538 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
This struct holds functions that can be used to create different kinds
of Universal MIDI Packet.
@tags{Audio}
*/
struct Factory
{
/** @internal */
struct Detail
{
static PacketX1 makeSystem() { return PacketX1{}.withMessageType (1); }
static PacketX1 makeV1() { return PacketX1{}.withMessageType (2); }
static PacketX2 makeV2() { return PacketX2{}.withMessageType (4); }
static PacketX2 makeSysEx (uint8_t group,
uint8_t status,
uint8_t numBytes,
const uint8_t* data)
{
jassert (numBytes <= 6);
std::array<uint8_t, 8> bytes{{}};
bytes[0] = (0x3 << 0x4) | group;
bytes[1] = (uint8_t) (status << 0x4) | numBytes;
std::memcpy (bytes.data() + 2, data, numBytes);
std::array<uint32_t, 2> words;
size_t index = 0;
for (auto& word : words)
word = ByteOrder::bigEndianInt (bytes.data() + 4 * index++);
return PacketX2 { words };
}
static PacketX4 makeSysEx8 (uint8_t group,
uint8_t status,
uint8_t numBytes,
uint8_t dataStart,
const uint8_t* data)
{
jassert (numBytes <= 16 - dataStart);
std::array<uint8_t, 16> bytes{{}};
bytes[0] = (0x5 << 0x4) | group;
bytes[1] = (uint8_t) (status << 0x4) | numBytes;
std::memcpy (bytes.data() + dataStart, data, numBytes);
std::array<uint32_t, 4> words;
size_t index = 0;
for (auto& word : words)
word = ByteOrder::bigEndianInt (bytes.data() + 4 * index++);
return PacketX4 { words };
}
};
static PacketX1 makeNoop (uint8_t group)
{
return PacketX1{}.withGroup (group);
}
static PacketX1 makeJRClock (uint8_t group, uint16_t time)
{
return PacketX1 { time }.withStatus (1).withGroup (group);
}
static PacketX1 makeJRTimestamp (uint8_t group, uint16_t time)
{
return PacketX1 { time }.withStatus (2).withGroup (group);
}
static PacketX1 makeTimeCode (uint8_t group, uint8_t code)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xf1)
.withU8<2> (code & 0x7f);
}
static PacketX1 makeSongPositionPointer (uint8_t group, uint16_t pos)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xf2)
.withU8<2> (pos & 0x7f)
.withU8<3> ((pos >> 7) & 0x7f);
}
static PacketX1 makeSongSelect (uint8_t group, uint8_t song)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xf3)
.withU8<2> (song & 0x7f);
}
static PacketX1 makeTuneRequest (uint8_t group)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xf6);
}
static PacketX1 makeTimingClock (uint8_t group)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xf8);
}
static PacketX1 makeStart (uint8_t group)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xfa);
}
static PacketX1 makeContinue (uint8_t group)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xfb);
}
static PacketX1 makeStop (uint8_t group)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xfc);
}
static PacketX1 makeActiveSensing (uint8_t group)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xfe);
}
static PacketX1 makeReset (uint8_t group)
{
return Detail::makeSystem().withGroup (group)
.withU8<1> (0xff);
}
static PacketX1 makeNoteOffV1 (uint8_t group,
uint8_t channel,
uint8_t note,
uint8_t velocity)
{
return Detail::makeV1().withGroup (group)
.withStatus (0x8)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU8<3> (velocity & 0x7f);
}
static PacketX1 makeNoteOnV1 (uint8_t group,
uint8_t channel,
uint8_t note,
uint8_t velocity)
{
return Detail::makeV1().withGroup (group)
.withStatus (0x9)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU8<3> (velocity & 0x7f);
}
static PacketX1 makePolyPressureV1 (uint8_t group,
uint8_t channel,
uint8_t note,
uint8_t pressure)
{
return Detail::makeV1().withGroup (group)
.withStatus (0xa)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU8<3> (pressure & 0x7f);
}
static PacketX1 makeControlChangeV1 (uint8_t group,
uint8_t channel,
uint8_t controller,
uint8_t value)
{
return Detail::makeV1().withGroup (group)
.withStatus (0xb)
.withChannel (channel)
.withU8<2> (controller & 0x7f)
.withU8<3> (value & 0x7f);
}
static PacketX1 makeProgramChangeV1 (uint8_t group,
uint8_t channel,
uint8_t program)
{
return Detail::makeV1().withGroup (group)
.withStatus (0xc)
.withChannel (channel)
.withU8<2> (program & 0x7f);
}
static PacketX1 makeChannelPressureV1 (uint8_t group,
uint8_t channel,
uint8_t pressure)
{
return Detail::makeV1().withGroup (group)
.withStatus (0xd)
.withChannel (channel)
.withU8<2> (pressure & 0x7f);
}
static PacketX1 makePitchBend (uint8_t group,
uint8_t channel,
uint16_t pitchbend)
{
return Detail::makeV1().withGroup (group)
.withStatus (0xe)
.withChannel (channel)
.withU8<2> (pitchbend & 0x7f)
.withU8<3> ((pitchbend >> 7) & 0x7f);
}
static PacketX2 makeSysExIn1Packet (uint8_t group,
uint8_t numBytes,
const uint8_t* data)
{
return Detail::makeSysEx (group, 0x0, numBytes, data);
}
static PacketX2 makeSysExStart (uint8_t group,
uint8_t numBytes,
const uint8_t* data)
{
return Detail::makeSysEx (group, 0x1, numBytes, data);
}
static PacketX2 makeSysExContinue (uint8_t group,
uint8_t numBytes,
const uint8_t* data)
{
return Detail::makeSysEx (group, 0x2, numBytes, data);
}
static PacketX2 makeSysExEnd (uint8_t group,
uint8_t numBytes,
const uint8_t* data)
{
return Detail::makeSysEx (group, 0x3, numBytes, data);
}
static PacketX2 makeRegisteredPerNoteControllerV2 (uint8_t group,
uint8_t channel,
uint8_t note,
uint8_t controller,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x0)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU8<3> (controller & 0x7f)
.withU32<1> (data);
}
static PacketX2 makeAssignablePerNoteControllerV2 (uint8_t group,
uint8_t channel,
uint8_t note,
uint8_t controller,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x1)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU8<3> (controller & 0x7f)
.withU32<1> (data);
}
static PacketX2 makeRegisteredControllerV2 (uint8_t group,
uint8_t channel,
uint8_t bank,
uint8_t index,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x2)
.withChannel (channel)
.withU8<2> (bank & 0x7f)
.withU8<3> (index & 0x7f)
.withU32<1> (data);
}
static PacketX2 makeAssignableControllerV2 (uint8_t group,
uint8_t channel,
uint8_t bank,
uint8_t index,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x3)
.withChannel (channel)
.withU8<2> (bank & 0x7f)
.withU8<3> (index & 0x7f)
.withU32<1> (data);
}
static PacketX2 makeRelativeRegisteredControllerV2 (uint8_t group,
uint8_t channel,
uint8_t bank,
uint8_t index,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x4)
.withChannel (channel)
.withU8<2> (bank & 0x7f)
.withU8<3> (index & 0x7f)
.withU32<1> (data);
}
static PacketX2 makeRelativeAssignableControllerV2 (uint8_t group,
uint8_t channel,
uint8_t bank,
uint8_t index,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x5)
.withChannel (channel)
.withU8<2> (bank & 0x7f)
.withU8<3> (index & 0x7f)
.withU32<1> (data);
}
static PacketX2 makePerNotePitchBendV2 (uint8_t group,
uint8_t channel,
uint8_t note,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x6)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU32<1> (data);
}
enum class NoteAttributeKind : uint8_t
{
none = 0x00,
manufacturer = 0x01,
profile = 0x02,
pitch7_9 = 0x03
};
static PacketX2 makeNoteOffV2 (uint8_t group,
uint8_t channel,
uint8_t note,
NoteAttributeKind attribute,
uint16_t velocity,
uint16_t attributeValue)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x8)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU8<3> ((uint8_t) attribute)
.withU16<2> (velocity)
.withU16<3> (attributeValue);
}
static PacketX2 makeNoteOnV2 (uint8_t group,
uint8_t channel,
uint8_t note,
NoteAttributeKind attribute,
uint16_t velocity,
uint16_t attributeValue)
{
return Detail::makeV2().withGroup (group)
.withStatus (0x9)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU8<3> ((uint8_t) attribute)
.withU16<2> (velocity)
.withU16<3> (attributeValue);
}
static PacketX2 makePolyPressureV2 (uint8_t group,
uint8_t channel,
uint8_t note,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0xa)
.withChannel (channel)
.withU8<2> (note & 0x7f)
.withU32<1> (data);
}
static PacketX2 makeControlChangeV2 (uint8_t group,
uint8_t channel,
uint8_t controller,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0xb)
.withChannel (channel)
.withU8<2> (controller & 0x7f)
.withU32<1> (data);
}
static PacketX2 makeProgramChangeV2 (uint8_t group,
uint8_t channel,
uint8_t optionFlags,
uint8_t program,
uint8_t bankMsb,
uint8_t bankLsb)
{
return Detail::makeV2().withGroup (group)
.withStatus (0xc)
.withChannel (channel)
.withU8<3> (optionFlags)
.withU8<4> (program)
.withU8<6> (bankMsb)
.withU8<7> (bankLsb);
}
static PacketX2 makeChannelPressureV2 (uint8_t group,
uint8_t channel,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0xd)
.withChannel (channel)
.withU32<1> (data);
}
static PacketX2 makePitchBendV2 (uint8_t group,
uint8_t channel,
uint32_t data)
{
return Detail::makeV2().withGroup (group)
.withStatus (0xe)
.withChannel (channel)
.withU32<1> (data);
}
static PacketX2 makePerNoteManagementV2 (uint8_t group,
uint8_t channel,
uint8_t note,
uint8_t optionFlags)
{
return Detail::makeV2().withGroup (group)
.withStatus (0xf)
.withChannel (channel)
.withU8<2> (note)
.withU8<3> (optionFlags);
}
static PacketX4 makeSysEx8in1Packet (uint8_t group,
uint8_t numBytes,
uint8_t streamId,
const uint8_t* data)
{
return Detail::makeSysEx8 (group, 0x0, numBytes, 3, data).withU8<2> (streamId);
}
static PacketX4 makeSysEx8Start (uint8_t group,
uint8_t numBytes,
uint8_t streamId,
const uint8_t* data)
{
return Detail::makeSysEx8 (group, 0x1, numBytes, 3, data).withU8<2> (streamId);
}
static PacketX4 makeSysEx8Continue (uint8_t group,
uint8_t numBytes,
uint8_t streamId,
const uint8_t* data)
{
return Detail::makeSysEx8 (group, 0x2, numBytes, 3, data).withU8<2> (streamId);
}
static PacketX4 makeSysEx8End (uint8_t group,
uint8_t numBytes,
uint8_t streamId,
const uint8_t* data)
{
return Detail::makeSysEx8 (group, 0x3, numBytes, 3, data).withU8<2> (streamId);
}
static PacketX4 makeMixedDataSetHeader (uint8_t group,
uint8_t dataSetId,
const uint8_t* data)
{
return Detail::makeSysEx8 (group, 0x8, 14, 2, data).withChannel (dataSetId);
}
static PacketX4 makeDataSetPayload (uint8_t group,
uint8_t dataSetId,
const uint8_t* data)
{
return Detail::makeSysEx8 (group, 0x9, 14, 2, data).withChannel (dataSetId);
}
};
}
}
#endif

+ 0
- 130
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPIterator.h View File

@@ -1,130 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Enables iteration over a collection of Universal MIDI Packets stored as
a contiguous range of 32-bit words.
This iterator is used by Packets to allow access to the messages
that it contains.
@tags{Audio}
*/
class Iterator
{
public:
/** Creates an invalid (singular) iterator. */
Iterator() noexcept = default;
/** Creates an iterator pointing at `ptr`. */
explicit Iterator (const uint32_t* ptr, size_t bytes) noexcept
: view (ptr)
#if JUCE_DEBUG
, bytesRemaining (bytes)
#endif
{
ignoreUnused (bytes);
}
using difference_type = std::iterator_traits<const uint32_t*>::difference_type;
using value_type = View;
using reference = const View&;
using pointer = const View*;
using iterator_category = std::forward_iterator_tag;
/** Moves this iterator to the next packet in the range. */
Iterator& operator++() noexcept
{
const auto increment = view.size();
#if JUCE_DEBUG
// If you hit this, the memory region contained a truncated or otherwise
// malformed Universal MIDI Packet.
// The Iterator can only be used on regions containing complete packets!
jassert (increment <= bytesRemaining);
bytesRemaining -= increment;
#endif
view = View (view.data() + increment);
return *this;
}
/** Moves this iterator to the next packet in the range,
returning the value of the iterator before it was
incremented.
*/
Iterator operator++ (int) noexcept
{
auto copy = *this;
++(*this);
return copy;
}
/** Returns true if this iterator points to the same address
as another iterator.
*/
bool operator== (const Iterator& other) const noexcept
{
return view == other.view;
}
/** Returns false if this iterator points to the same address
as another iterator.
*/
bool operator!= (const Iterator& other) const noexcept
{
return ! operator== (other);
}
/** Returns a reference to a View of the packet currently
pointed-to by this iterator.
The View can be queried for its size and content.
*/
reference operator*() noexcept { return view; }
/** Returns a pointer to a View of the packet currently
pointed-to by this iterator.
The View can be queried for its size and content.
*/
pointer operator->() noexcept { return &view; }
private:
View view;
#if JUCE_DEBUG
size_t bytesRemaining = 0;
#endif
};
}
}
#endif

+ 0
- 217
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPMidi1ToBytestreamTranslator.h View File

@@ -1,217 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Parses a raw stream of uint32_t holding a series of Universal MIDI Packets using
the MIDI 1.0 Protocol, converting to plain (non-UMP) MidiMessages.
@tags{Audio}
*/
class Midi1ToBytestreamTranslator
{
public:
/** Ensures that there is room in the internal buffer for a sysex message of at least
`initialBufferSize` bytes.
*/
explicit Midi1ToBytestreamTranslator (int initialBufferSize)
{
pendingSysExData.reserve (size_t (initialBufferSize));
}
/** Clears the concatenator. */
void reset()
{
pendingSysExData.clear();
pendingSysExTime = 0.0;
}
/** Converts a Universal MIDI Packet using the MIDI 1.0 Protocol to
an equivalent MidiMessage. Accumulates SysEx packets into a single
MidiMessage, as appropriate.
@param packet a packet which is using the MIDI 1.0 Protocol.
@param time the timestamp to be applied to these messages.
@param callback a callback which will be called with each converted MidiMessage.
*/
template <typename MessageCallback>
void dispatch (const View& packet, double time, MessageCallback&& callback)
{
const auto firstWord = *packet.data();
if (! pendingSysExData.empty() && shouldPacketTerminateSysExEarly (firstWord))
pendingSysExData.clear();
switch (packet.size())
{
case 1:
{
// Utility messages don't translate to bytestream format
if (Utils::getMessageType (firstWord) != 0x00)
callback (fromUmp (PacketX1 { firstWord }, time));
break;
}
case 2:
{
if (Utils::getMessageType (firstWord) == 0x3)
processSysEx (PacketX2 { packet[0], packet[1] }, time, callback);
break;
}
case 3: // no 3-word packets in the current spec
case 4: // no 4-word packets translate to bytestream format
default:
break;
}
}
/** Converts from a Universal MIDI Packet to MIDI 1 bytestream format.
This is only capable of converting a single Universal MIDI Packet to
an equivalent bytestream MIDI message. This function cannot understand
multi-packet messages, like SysEx7 messages.
To convert multi-packet messages, use `Midi1ToBytestreamTranslator`
to convert from a UMP MIDI 1.0 stream, or `ToBytestreamDispatcher`
to convert from both MIDI 2.0 and MIDI 1.0.
*/
static MidiMessage fromUmp (const PacketX1& m, double time = 0)
{
const auto word = m.front();
jassert (Utils::getNumWordsForMessageType (word) == 1);
const std::array<uint8_t, 3> bytes { { uint8_t ((word >> 0x10) & 0xff),
uint8_t ((word >> 0x08) & 0xff),
uint8_t ((word >> 0x00) & 0xff) } };
const auto numBytes = MidiMessage::getMessageLengthFromFirstByte (bytes.front());
return MidiMessage (bytes.data(), numBytes, time);
}
private:
template <typename MessageCallback>
void processSysEx (const PacketX2& packet,
double time,
MessageCallback&& callback)
{
switch (getSysEx7Kind (packet[0]))
{
case SysEx7::Kind::complete:
startSysExMessage (time);
pushBytes (packet);
terminateSysExMessage (callback);
break;
case SysEx7::Kind::begin:
startSysExMessage (time);
pushBytes (packet);
break;
case SysEx7::Kind::continuation:
if (pendingSysExData.empty())
break;
pushBytes (packet);
break;
case SysEx7::Kind::end:
if (pendingSysExData.empty())
break;
pushBytes (packet);
terminateSysExMessage (callback);
break;
}
}
void pushBytes (const PacketX2& packet)
{
const auto bytes = SysEx7::getDataBytes (packet);
pendingSysExData.insert (pendingSysExData.end(),
bytes.data.begin(),
bytes.data.begin() + bytes.size);
}
void startSysExMessage (double time)
{
pendingSysExTime = time;
pendingSysExData.push_back (0xf0);
}
template <typename MessageCallback>
void terminateSysExMessage (MessageCallback&& callback)
{
pendingSysExData.push_back (0xf7);
callback (MidiMessage (pendingSysExData.data(),
int (pendingSysExData.size()),
pendingSysExTime));
pendingSysExData.clear();
}
static bool shouldPacketTerminateSysExEarly (uint32_t firstWord)
{
return ! (isSysExContinuation (firstWord)
|| isSystemRealTime (firstWord)
|| isJROrNOP (firstWord));
}
static SysEx7::Kind getSysEx7Kind (uint32_t word)
{
return SysEx7::Kind ((word >> 0x14) & 0xf);
}
static bool isJROrNOP (uint32_t word)
{
return Utils::getMessageType (word) == 0x0;
}
static bool isSysExContinuation (uint32_t word)
{
if (Utils::getMessageType (word) != 0x3)
return false;
const auto kind = getSysEx7Kind (word);
return kind == SysEx7::Kind::continuation || kind == SysEx7::Kind::end;
}
static bool isSystemRealTime (uint32_t word)
{
return Utils::getMessageType (word) == 0x1 && ((word >> 0x10) & 0xff) >= 0xf8;
}
std::vector<uint8_t> pendingSysExData;
double pendingSysExTime = 0.0;
};
}
}
#endif

+ 0
- 195
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPMidi1ToMidi2DefaultTranslator.cpp View File

@@ -1,195 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace universal_midi_packets
{
PacketX2 Midi1ToMidi2DefaultTranslator::processNoteOnOrOff (const HelperValues helpers)
{
const auto velocity = helpers.byte2;
const auto needsConversion = (helpers.byte0 >> 0x4) == 0x9 && velocity == 0;
const auto firstByte = needsConversion ? (uint8_t) ((0x8 << 0x4) | (helpers.byte0 & 0xf))
: helpers.byte0;
return PacketX2
{
Utils::bytesToWord (helpers.typeAndGroup, firstByte, helpers.byte1, 0),
(uint32_t) (Conversion::scaleTo16 (velocity) << 0x10)
};
}
PacketX2 Midi1ToMidi2DefaultTranslator::processPolyPressure (const HelperValues helpers)
{
return PacketX2
{
Utils::bytesToWord (helpers.typeAndGroup, helpers.byte0, helpers.byte1, 0),
Conversion::scaleTo32 (helpers.byte2)
};
}
bool Midi1ToMidi2DefaultTranslator::processControlChange (const HelperValues helpers,
PacketX2& packet)
{
const auto statusAndChannel = helpers.byte0;
const auto cc = helpers.byte1;
const auto shouldAccumulate = [&]
{
switch (cc)
{
case 6:
case 38:
case 98:
case 99:
case 100:
case 101:
return true;
}
return false;
}();
const auto group = (uint8_t) (helpers.typeAndGroup & 0xf);
const auto channel = (uint8_t) (statusAndChannel & 0xf);
const auto byte = helpers.byte2;
if (shouldAccumulate)
{
auto& accumulator = groupAccumulators[group][channel];
if (accumulator.addByte (cc, byte))
{
const auto& bytes = accumulator.getBytes();
const auto bank = bytes[0];
const auto index = bytes[1];
const auto msb = bytes[2];
const auto lsb = bytes[3];
const auto value = (uint16_t) (((msb & 0x7f) << 7) | (lsb & 0x7f));
const auto newStatus = (uint8_t) (accumulator.getKind() == PnKind::nrpn ? 0x3 : 0x2);
packet = PacketX2
{
Utils::bytesToWord (helpers.typeAndGroup, (uint8_t) ((newStatus << 0x4) | channel), bank, index),
Conversion::scaleTo32 (value)
};
return true;
}
return false;
}
if (cc == 0)
{
groupBanks[group][channel].setMsb (byte);
return false;
}
if (cc == 32)
{
groupBanks[group][channel].setLsb (byte);
return false;
}
packet = PacketX2
{
Utils::bytesToWord (helpers.typeAndGroup, statusAndChannel, cc, 0),
Conversion::scaleTo32 (helpers.byte2)
};
return true;
}
PacketX2 Midi1ToMidi2DefaultTranslator::processProgramChange (const HelperValues helpers) const
{
const auto group = (uint8_t) (helpers.typeAndGroup & 0xf);
const auto channel = (uint8_t) (helpers.byte0 & 0xf);
const auto bank = groupBanks[group][channel];
const auto valid = bank.isValid();
return PacketX2
{
Utils::bytesToWord (helpers.typeAndGroup, helpers.byte0, 0, valid ? 1 : 0),
Utils::bytesToWord (helpers.byte1, 0, valid ? bank.getMsb() : 0, valid ? bank.getLsb() : 0)
};
}
PacketX2 Midi1ToMidi2DefaultTranslator::processChannelPressure (const HelperValues helpers)
{
return PacketX2
{
Utils::bytesToWord (helpers.typeAndGroup, helpers.byte0, 0, 0),
Conversion::scaleTo32 (helpers.byte1)
};
}
PacketX2 Midi1ToMidi2DefaultTranslator::processPitchBend (const HelperValues helpers)
{
const auto lsb = helpers.byte1;
const auto msb = helpers.byte2;
const auto value = (uint16_t) (msb << 7 | lsb);
return PacketX2
{
Utils::bytesToWord (helpers.typeAndGroup, helpers.byte0, 0, 0),
Conversion::scaleTo32 (value)
};
}
bool Midi1ToMidi2DefaultTranslator::PnAccumulator::addByte (uint8_t cc, uint8_t byte)
{
const auto isStart = cc == 99 || cc == 101;
if (isStart)
{
kind = cc == 99 ? PnKind::nrpn : PnKind::rpn;
index = 0;
}
bytes[index] = byte;
const auto shouldContinue = [&]
{
switch (index)
{
case 0: return isStart;
case 1: return kind == PnKind::nrpn ? cc == 98 : cc == 100;
case 2: return cc == 6;
case 3: return cc == 38;
}
return false;
}();
index = shouldContinue ? index + 1 : 0;
if (index != bytes.size())
return false;
index = 0;
return true;
}
}
}

+ 0
- 191
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPMidi1ToMidi2DefaultTranslator.h View File

@@ -1,191 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Translates a series of MIDI 1 Universal MIDI Packets to corresponding MIDI 2
packets.
@tags{Audio}
*/
class Midi1ToMidi2DefaultTranslator
{
public:
Midi1ToMidi2DefaultTranslator() = default;
/** Converts MIDI 1 Universal MIDI Packets to corresponding MIDI 2 packets,
calling `callback` with each converted packet.
In some cases (such as RPN/NRPN messages) multiple MIDI 1 packets will
convert to a single MIDI 2 packet. In these cases, the translator will
accumulate the full message internally, and send a single callback with
the completed message, once all the individual MIDI 1 packets have been
processed.
*/
template <typename PacketCallback>
void dispatch (const View& v, PacketCallback&& callback)
{
const auto firstWord = v[0];
const auto messageType = Utils::getMessageType (firstWord);
if (messageType != 0x2)
{
callback (v);
return;
}
const HelperValues helperValues
{
(uint8_t) ((0x4 << 0x4) | Utils::getGroup (firstWord)),
(uint8_t) ((firstWord >> 0x10) & 0xff),
(uint8_t) ((firstWord >> 0x08) & 0x7f),
(uint8_t) ((firstWord >> 0x00) & 0x7f),
};
switch (Utils::getStatus (firstWord))
{
case 0x8:
case 0x9:
{
const auto packet = processNoteOnOrOff (helperValues);
callback (View (packet.data()));
return;
}
case 0xa:
{
const auto packet = processPolyPressure (helperValues);
callback (View (packet.data()));
return;
}
case 0xb:
{
PacketX2 packet;
if (processControlChange (helperValues, packet))
callback (View (packet.data()));
return;
}
case 0xc:
{
const auto packet = processProgramChange (helperValues);
callback (View (packet.data()));
return;
}
case 0xd:
{
const auto packet = processChannelPressure (helperValues);
callback (View (packet.data()));
return;
}
case 0xe:
{
const auto packet = processPitchBend (helperValues);
callback (View (packet.data()));
return;
}
}
}
void reset()
{
groupAccumulators = {};
groupBanks = {};
}
private:
enum class PnKind { nrpn, rpn };
struct HelperValues
{
uint8_t typeAndGroup;
uint8_t byte0;
uint8_t byte1;
uint8_t byte2;
};
static PacketX2 processNoteOnOrOff (const HelperValues helpers);
static PacketX2 processPolyPressure (const HelperValues helpers);
bool processControlChange (const HelperValues helpers, PacketX2& packet);
PacketX2 processProgramChange (const HelperValues helpers) const;
static PacketX2 processChannelPressure (const HelperValues helpers);
static PacketX2 processPitchBend (const HelperValues helpers);
class PnAccumulator
{
public:
bool addByte (uint8_t cc, uint8_t byte);
const std::array<uint8_t, 4>& getBytes() const noexcept { return bytes; }
PnKind getKind() const noexcept { return kind; }
private:
std::array<uint8_t, 4> bytes;
uint8_t index = 0;
PnKind kind = PnKind::nrpn;
};
class Bank
{
public:
bool isValid() const noexcept { return ! (msb & 0x80); }
uint8_t getMsb() const noexcept { return msb & 0x7f; }
uint8_t getLsb() const noexcept { return lsb & 0x7f; }
void setMsb (uint8_t i) noexcept { msb = i & 0x7f; }
void setLsb (uint8_t i) noexcept { msb &= 0x7f; lsb = i & 0x7f; }
private:
// We use the top bit to indicate whether this bank is valid.
// After reading the spec, it's not clear how we should determine whether
// there are valid values, so we'll just assume that the bank is valid
// once either the lsb or msb have been written.
uint8_t msb = 0x80;
uint8_t lsb = 0x00;
};
using ChannelAccumulators = std::array<PnAccumulator, 16>;
std::array<ChannelAccumulators, 16> groupAccumulators;
using ChannelBanks = std::array<Bank, 16>;
std::array<ChannelBanks, 16> groupBanks;
};
}
}
#endif

+ 0
- 48
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPProtocols.h View File

@@ -1,48 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/** The kinds of MIDI protocol that can be formatted into Universal MIDI Packets. */
enum class PacketProtocol
{
MIDI_1_0,
MIDI_2_0,
};
/** All kinds of MIDI protocol understood by JUCE. */
enum class MidiProtocol
{
bytestream,
UMP_MIDI_1_0,
UMP_MIDI_2_0,
};
}
}
#endif

+ 0
- 46
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPReceiver.h View File

@@ -1,46 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
A base class for classes which receive Universal MIDI Packets from an input.
@tags{Audio}
*/
struct Receiver
{
virtual ~Receiver() noexcept = default;
/** This will be called each time a new packet is ready for processing. */
virtual void packetReceived (const View& packet, double time) = 0;
};
}
}
#endif

+ 0
- 53
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPSysEx7.cpp View File

@@ -1,53 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace universal_midi_packets
{
uint32_t SysEx7::getNumPacketsRequiredForDataSize (uint32_t size)
{
constexpr auto denom = 6;
return (size / denom) + ((size % denom) != 0);
}
SysEx7::PacketBytes SysEx7::getDataBytes (const PacketX2& packet)
{
const auto numBytes = Utils::getChannel (packet[0]);
constexpr uint8_t maxBytes = 6;
jassert (numBytes <= maxBytes);
return
{
{ { packet.getU8<2>(),
packet.getU8<3>(),
packet.getU8<4>(),
packet.getU8<5>(),
packet.getU8<6>(),
packet.getU8<7>() } },
jmin (numBytes, maxBytes)
};
}
}
}

+ 0
- 77
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPSysEx7.h View File

@@ -1,77 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
This struct acts as a single-file namespace for Universal MIDI Packet
functionality related to 7-bit SysEx.
@tags{Audio}
*/
struct SysEx7
{
/** Returns the number of 64-bit packets required to hold a series of
SysEx bytes.
The number passed to this function should exclude the leading/trailing
SysEx bytes used in an old midi bytestream, as these are not required
when using Universal MIDI Packets.
*/
static uint32_t getNumPacketsRequiredForDataSize (uint32_t);
/** The different kinds of UMP SysEx-7 message. */
enum class Kind : uint8_t
{
/** The whole message fits in a single 2-word packet. */
complete = 0,
/** The packet begins a SysEx message that will continue in subsequent packets. */
begin = 1,
/** The packet is a continuation of an ongoing SysEx message. */
continuation = 2,
/** The packet terminates an ongoing SysEx message. */
end = 3
};
/** Holds the bytes from a single SysEx-7 packet. */
struct PacketBytes
{
std::array<uint8_t, 6> data;
uint8_t size;
};
/** Extracts the data bytes from a 64-bit data message. */
static PacketBytes getDataBytes (const PacketX2& packet);
};
}
}
#endif

+ 0
- 59
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPUtils.cpp View File

@@ -1,59 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace universal_midi_packets
{
uint32_t Utils::getNumWordsForMessageType (uint32_t mt)
{
switch (Utils::getMessageType (mt))
{
case 0x0:
case 0x1:
case 0x2:
case 0x6:
case 0x7:
return 1;
case 0x3:
case 0x4:
case 0x8:
case 0x9:
case 0xa:
return 2;
case 0xb:
case 0xc:
return 3;
case 0x5:
case 0xd:
case 0xe:
case 0xf:
return 4;
}
jassertfalse;
return 1;
}
}
}

+ 0
- 117
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPUtils.h View File

@@ -1,117 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Helpful types and functions for interacting with Universal MIDI Packets.
@tags{Audio}
*/
struct Utils
{
/** Joins 4 bytes into a single 32-bit word. */
static constexpr uint32_t bytesToWord (uint8_t a, uint8_t b, uint8_t c, uint8_t d)
{
return uint32_t (a << 0x18 | b << 0x10 | c << 0x08 | d << 0x00);
}
/** Returns the expected number of 32-bit words in a Universal MIDI Packet, given
the first word of the packet.
The result will be between 1 and 4 inclusive.
A result of 1 means that the word is itself a complete packet.
*/
static uint32_t getNumWordsForMessageType (uint32_t);
/**
Helper functions for setting/getting 4-bit ranges inside a 32-bit word.
*/
template <size_t Index>
struct U4
{
static constexpr uint32_t shift = (uint32_t) 0x1c - Index * 4;
static constexpr uint32_t set (uint32_t word, uint8_t value)
{
return (word & ~((uint32_t) 0xf << shift)) | (uint32_t) ((value & 0xf) << shift);
}
static constexpr uint8_t get (uint32_t word)
{
return (uint8_t) ((word >> shift) & 0xf);
}
};
/**
Helper functions for setting/getting 8-bit ranges inside a 32-bit word.
*/
template <size_t Index>
struct U8
{
static constexpr uint32_t shift = (uint32_t) 0x18 - Index * 8;
static constexpr uint32_t set (uint32_t word, uint8_t value)
{
return (word & ~((uint32_t) 0xff << shift)) | (uint32_t) (value << shift);
}
static constexpr uint8_t get (uint32_t word)
{
return (uint8_t) ((word >> shift) & 0xff);
}
};
/**
Helper functions for setting/getting 16-bit ranges inside a 32-bit word.
*/
template <size_t Index>
struct U16
{
static constexpr uint32_t shift = (uint32_t) 0x10 - Index * 16;
static constexpr uint32_t set (uint32_t word, uint16_t value)
{
return (word & ~((uint32_t) 0xffff << shift)) | (uint32_t) (value << shift);
}
static constexpr uint16_t get (uint32_t word)
{
return (uint16_t) ((word >> shift) & 0xffff);
}
};
static constexpr uint8_t getMessageType (uint32_t w) noexcept { return U4<0>::get (w); }
static constexpr uint8_t getGroup (uint32_t w) noexcept { return U4<1>::get (w); }
static constexpr uint8_t getStatus (uint32_t w) noexcept { return U4<2>::get (w); }
static constexpr uint8_t getChannel (uint32_t w) noexcept { return U4<3>::get (w); }
};
}
}
#endif

+ 0
- 35
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPView.cpp View File

@@ -1,35 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace universal_midi_packets
{
uint32_t View::size() const noexcept
{
jassert (ptr != nullptr);
return Utils::getNumWordsForMessageType (*ptr);
}
}
}

+ 0
- 92
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPView.h View File

@@ -1,92 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Points to a single Universal MIDI Packet.
The packet must be well-formed for member functions to work correctly.
Specifically, the constructor argument must be the beginning of a region of
uint32_t that contains at least `getNumWordsForMessageType(*ddata)` items,
where `data` is the constructor argument.
NOTE: Instances of this class do not own the memory that they point to!
If you need to store a packet pointed-to by a View for later use, copy
the view contents to a Packets collection, or use the Utils::PacketX types.
@tags{Audio}
*/
class View
{
public:
/** Create an invalid view. */
View() noexcept = default;
/** Create a view of the packet starting at address `d`. */
explicit View (const uint32_t* data) noexcept : ptr (data) {}
/** Get a pointer to the first word in the Universal MIDI Packet currently
pointed-to by this view.
*/
const uint32_t* data() const noexcept { return ptr; }
/** Get the number of 32-words (between 1 and 4 inclusive) in the Universal
MIDI Packet currently pointed-to by this view.
*/
uint32_t size() const noexcept;
/** Get a specific word from this packet.
Passing an `index` that is greater than or equal to the result of `size`
will cause undefined behaviour.
*/
const uint32_t& operator[] (size_t index) const noexcept { return ptr[index]; }
/** Get an iterator pointing to the first word in the packet. */
const uint32_t* begin() const noexcept { return ptr; }
const uint32_t* cbegin() const noexcept { return ptr; }
/** Get an iterator pointing one-past the last word in the packet. */
const uint32_t* end() const noexcept { return ptr + size(); }
const uint32_t* cend() const noexcept { return ptr + size(); }
/** Return true if this view is pointing to the same address as another view. */
bool operator== (const View& other) const noexcept { return ptr == other.ptr; }
/** Return false if this view is pointing to the same address as another view. */
bool operator!= (const View& other) const noexcept { return ! operator== (other); }
private:
const uint32_t* ptr = nullptr;
};
}
}
#endif

+ 0
- 1018
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMP_test.cpp
File diff suppressed because it is too large
View File


+ 0
- 193
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPacket.h View File

@@ -1,193 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Holds a single Universal MIDI Packet.
@tags{Audio}
*/
template <size_t numWords>
class Packet
{
public:
Packet() = default;
template <size_t w = numWords, typename std::enable_if<w == 1, int>::type = 0>
Packet (uint32_t a)
: contents { { a } }
{
jassert (Utils::getNumWordsForMessageType (a) == 1);
}
template <size_t w = numWords, typename std::enable_if<w == 2, int>::type = 0>
Packet (uint32_t a, uint32_t b)
: contents { { a, b } }
{
jassert (Utils::getNumWordsForMessageType (a) == 2);
}
template <size_t w = numWords, typename std::enable_if<w == 3, int>::type = 0>
Packet (uint32_t a, uint32_t b, uint32_t c)
: contents { { a, b, c } }
{
jassert (Utils::getNumWordsForMessageType (a) == 3);
}
template <size_t w = numWords, typename std::enable_if<w == 4, int>::type = 0>
Packet (uint32_t a, uint32_t b, uint32_t c, uint32_t d)
: contents { { a, b, c, d } }
{
jassert (Utils::getNumWordsForMessageType (a) == 4);
}
template <size_t w, typename std::enable_if<w == numWords, int>::type = 0>
explicit Packet (const std::array<uint32_t, w>& fullPacket)
: contents (fullPacket)
{
jassert (Utils::getNumWordsForMessageType (fullPacket.front()) == numWords);
}
Packet withMessageType (uint8_t type) const noexcept
{
return withU4<0> (type);
}
Packet withGroup (uint8_t group) const noexcept
{
return withU4<1> (group);
}
Packet withStatus (uint8_t status) const noexcept
{
return withU4<2> (status);
}
Packet withChannel (uint8_t channel) const noexcept
{
return withU4<3> (channel);
}
uint8_t getMessageType() const noexcept { return getU4<0>(); }
uint8_t getGroup() const noexcept { return getU4<1>(); }
uint8_t getStatus() const noexcept { return getU4<2>(); }
uint8_t getChannel() const noexcept { return getU4<3>(); }
template <size_t index>
Packet withU4 (uint8_t value) const noexcept
{
constexpr auto word = index / 8;
auto copy = *this;
std::get<word> (copy.contents) = Utils::U4<index % 8>::set (copy.template getU32<word>(), value);
return copy;
}
template <size_t index>
Packet withU8 (uint8_t value) const noexcept
{
constexpr auto word = index / 4;
auto copy = *this;
std::get<word> (copy.contents) = Utils::U8<index % 4>::set (copy.template getU32<word>(), value);
return copy;
}
template <size_t index>
Packet withU16 (uint16_t value) const noexcept
{
constexpr auto word = index / 2;
auto copy = *this;
std::get<word> (copy.contents) = Utils::U16<index % 2>::set (copy.template getU32<word>(), value);
return copy;
}
template <size_t index>
Packet withU32 (uint32_t value) const noexcept
{
auto copy = *this;
std::get<index> (copy.contents) = value;
return copy;
}
template <size_t index>
uint8_t getU4() const noexcept
{
return Utils::U4<index % 8>::get (this->template getU32<index / 8>());
}
template <size_t index>
uint8_t getU8() const noexcept
{
return Utils::U8<index % 4>::get (this->template getU32<index / 4>());
}
template <size_t index>
uint16_t getU16() const noexcept
{
return Utils::U16<index % 2>::get (this->template getU32<index / 2>());
}
template <size_t index>
uint32_t getU32() const noexcept
{
return std::get<index> (contents);
}
//==============================================================================
using Contents = std::array<uint32_t, numWords>;
using const_iterator = typename Contents::const_iterator;
const_iterator begin() const noexcept { return contents.begin(); }
const_iterator cbegin() const noexcept { return contents.begin(); }
const_iterator end() const noexcept { return contents.end(); }
const_iterator cend() const noexcept { return contents.end(); }
const uint32_t* data() const noexcept { return contents.data(); }
const uint32_t& front() const noexcept { return contents.front(); }
const uint32_t& back() const noexcept { return contents.back(); }
const uint32_t& operator[] (size_t index) const noexcept { return contents[index]; }
private:
Contents contents { {} };
};
using PacketX1 = Packet<1>;
using PacketX2 = Packet<2>;
using PacketX3 = Packet<3>;
using PacketX4 = Packet<4>;
}
}
#endif

+ 0
- 96
libs/juce7/source/modules/juce_audio_basics/midi/ump/juce_UMPackets.h View File

@@ -1,96 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
#ifndef DOXYGEN
namespace juce
{
namespace universal_midi_packets
{
/**
Holds a collection of Universal MIDI Packets.
Unlike MidiBuffer, this collection does not store any additional information
(e.g. timestamps) alongside the raw messages.
If timestamps are required, these can be added to the container in UMP format,
as Jitter Reduction Utility messages.
@tags{Audio}
*/
class Packets
{
public:
/** Adds a single packet to the collection.
The View must be valid for this to work. If the view
points to a malformed message, or if the view points to a region
too short for the contained message, this call will result in
undefined behaviour.
*/
void add (const View& v) { storage.insert (storage.end(), v.cbegin(), v.cend()); }
void add (const PacketX1& p) { addImpl (p); }
void add (const PacketX2& p) { addImpl (p); }
void add (const PacketX3& p) { addImpl (p); }
void add (const PacketX4& p) { addImpl (p); }
/** Pre-allocates space for at least `numWords` 32-bit words in this collection. */
void reserve (size_t numWords) { storage.reserve (numWords); }
/** Removes all previously-added packets from this collection. */
void clear() { storage.clear(); }
/** Gets an iterator pointing to the first packet in this collection. */
Iterator cbegin() const noexcept { return Iterator (data(), size()); }
Iterator begin() const noexcept { return cbegin(); }
/** Gets an iterator pointing one-past the last packet in this collection. */
Iterator cend() const noexcept { return Iterator (data() + size(), 0); }
Iterator end() const noexcept { return cend(); }
/** Gets a pointer to the contents of the collection as a range of raw 32-bit words. */
const uint32_t* data() const noexcept { return storage.data(); }
/** Returns the number of uint32_t words in storage.
Note that this is likely to be larger than the number of packets
currently being stored, as some packets span multiple words.
*/
size_t size() const noexcept { return storage.size(); }
private:
template <size_t numWords>
void addImpl (const Packet<numWords>& p)
{
jassert (Utils::getNumWordsForMessageType (p[0]) == numWords);
add (View (p.data()));
}
std::vector<uint32_t> storage;
};
}
}
#endif

+ 0
- 2352
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEInstrument.cpp
File diff suppressed because it is too large
View File


+ 0
- 426
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEInstrument.h View File

@@ -1,426 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This class represents an instrument handling MPE.
It has an MPE zone layout and maintains a state of currently
active (playing) notes and the values of their dimensions of expression.
You can trigger and modulate notes:
- by passing MIDI messages with the method processNextMidiEvent;
- by directly calling the methods noteOn, noteOff etc.
The class implements the channel and note management logic specified in
MPE. If you pass it a message, it will know what notes on what
channels (if any) should be affected by that message.
The class has a Listener class that can be used to react to note and
state changes and trigger some functionality for your application.
For example, you can use this class to write an MPE visualiser.
If you want to write a real-time audio synth with MPE functionality,
you should instead use the classes MPESynthesiserBase, which adds
the ability to render audio and to manage voices.
@see MPENote, MPEZoneLayout, MPESynthesiser
@tags{Audio}
*/
class JUCE_API MPEInstrument
{
public:
/** Constructor.
This will construct an MPE instrument with inactive lower and upper zones.
In order to process incoming MIDI messages call setZoneLayout, use the MPEZoneLayout
constructor, define the layout via MIDI RPN messages, or set the instrument to legacy mode.
*/
MPEInstrument() noexcept;
/** Constructs an MPE instrument with the specified zone layout. */
MPEInstrument (MPEZoneLayout layout);
/** Destructor. */
virtual ~MPEInstrument();
//==============================================================================
/** Returns the current zone layout of the instrument.
This happens by value, to enforce thread-safety and class invariants.
Note: If the instrument is in legacy mode, the return value of this
method is unspecified.
*/
MPEZoneLayout getZoneLayout() const noexcept;
/** Re-sets the zone layout of the instrument to the one passed in.
As a side effect, this will discard all currently playing notes,
and call noteReleased for all of them.
This will also disable legacy mode in case it was enabled previously.
*/
void setZoneLayout (MPEZoneLayout newLayout);
/** Returns true if the given MIDI channel (1-16) is a note channel in any
of the MPEInstrument's MPE zones; false otherwise.
When in legacy mode, this will return true if the given channel is
contained in the current legacy mode channel range; false otherwise.
*/
bool isMemberChannel (int midiChannel) const noexcept;
/** Returns true if the given MIDI channel (1-16) is a master channel (channel
1 or 16).
In legacy mode, this will always return false.
*/
bool isMasterChannel (int midiChannel) const noexcept;
/** Returns true if the given MIDI channel (1-16) is used by any of the
MPEInstrument's MPE zones; false otherwise.
When in legacy mode, this will return true if the given channel is
contained in the current legacy mode channel range; false otherwise.
*/
bool isUsingChannel (int midiChannel) const noexcept;
//==============================================================================
/** The MPE note tracking mode. In case there is more than one note playing
simultaneously on the same MIDI channel, this determines which of these
notes will be modulated by an incoming MPE message on that channel
(pressure, pitchbend, or timbre).
The default is lastNotePlayedOnChannel.
*/
enum TrackingMode
{
lastNotePlayedOnChannel, /**< The most recent note on the channel that is still played (key down and/or sustained). */
lowestNoteOnChannel, /**< The lowest note (by initialNote) on the channel with the note key still down. */
highestNoteOnChannel, /**< The highest note (by initialNote) on the channel with the note key still down. */
allNotesOnChannel /**< All notes on the channel (key down and/or sustained). */
};
/** Set the MPE tracking mode for the pressure dimension. */
void setPressureTrackingMode (TrackingMode modeToUse);
/** Set the MPE tracking mode for the pitchbend dimension. */
void setPitchbendTrackingMode (TrackingMode modeToUse);
/** Set the MPE tracking mode for the timbre dimension. */
void setTimbreTrackingMode (TrackingMode modeToUse);
//==============================================================================
/** Process a MIDI message and trigger the appropriate method calls
(noteOn, noteOff etc.)
You can override this method if you need some special MIDI message
treatment on top of the standard MPE logic implemented here.
*/
virtual void processNextMidiEvent (const MidiMessage& message);
//==============================================================================
/** Request a note-on on the given channel, with the given initial note
number and velocity.
If the message arrives on a valid note channel, this will create a
new MPENote and call the noteAdded callback.
*/
virtual void noteOn (int midiChannel, int midiNoteNumber, MPEValue midiNoteOnVelocity);
/** Request a note-off.
If there is a matching playing note, this will release the note
(except if it is sustained by a sustain or sostenuto pedal) and call
the noteReleased callback.
*/
virtual void noteOff (int midiChannel, int midiNoteNumber, MPEValue midiNoteOffVelocity);
/** Request a pitchbend on the given channel with the given value (in units
of MIDI pitchwheel position).
Internally, this will determine whether the pitchwheel move is a
per-note pitchbend or a master pitchbend (depending on midiChannel),
take the correct per-note or master pitchbend range of the affected MPE
zone, and apply the resulting pitchbend to the affected note(s) (if any).
*/
virtual void pitchbend (int midiChannel, MPEValue pitchbend);
/** Request a pressure change on the given channel with the given value.
This will modify the pressure dimension of the note currently held down
on this channel (if any). If the channel is a zone master channel,
the pressure change will be broadcast to all notes in this zone.
*/
virtual void pressure (int midiChannel, MPEValue value);
/** Request a third dimension (timbre) change on the given channel with the
given value.
This will modify the timbre dimension of the note currently held down
on this channel (if any). If the channel is a zone master channel,
the timbre change will be broadcast to all notes in this zone.
*/
virtual void timbre (int midiChannel, MPEValue value);
/** Request a poly-aftertouch change for a given note number.
The change will be broadcast to all notes sharing the channel and note
number of the change message.
*/
virtual void polyAftertouch (int midiChannel, int midiNoteNumber, MPEValue value);
/** Request a sustain pedal press or release.
If midiChannel is a zone's master channel, this will act on all notes in
that zone; otherwise, nothing will happen.
*/
virtual void sustainPedal (int midiChannel, bool isDown);
/** Request a sostenuto pedal press or release.
If midiChannel is a zone's master channel, this will act on all notes in
that zone; otherwise, nothing will happen.
*/
virtual void sostenutoPedal (int midiChannel, bool isDown);
/** Discard all currently playing notes.
This will also call the noteReleased listener callback for all of them.
*/
void releaseAllNotes();
//==============================================================================
/** Returns the number of MPE notes currently played by the instrument. */
int getNumPlayingNotes() const noexcept;
/** Returns the note at the given index.
If there is no such note, returns an invalid MPENote. The notes are sorted
such that the most recently added note is the last element.
*/
MPENote getNote (int index) const noexcept;
/** Returns the note currently playing on the given midiChannel with the
specified initial MIDI note number, if there is such a note. Otherwise,
this returns an invalid MPENote (check with note.isValid() before use!)
*/
MPENote getNote (int midiChannel, int midiNoteNumber) const noexcept;
/** Returns the note with a given ID. */
MPENote getNoteWithID (uint16 noteID) const noexcept;
/** Returns the most recent note that is playing on the given midiChannel
(this will be the note which has received the most recent note-on without
a corresponding note-off), if there is such a note. Otherwise, this returns an
invalid MPENote (check with note.isValid() before use!)
*/
MPENote getMostRecentNote (int midiChannel) const noexcept;
/** Returns the most recent note that is not the note passed in. If there is no
such note, this returns an invalid MPENote (check with note.isValid() before use!).
This helper method might be useful for some custom voice handling algorithms.
*/
MPENote getMostRecentNoteOtherThan (MPENote otherThanThisNote) const noexcept;
//==============================================================================
/** Derive from this class to be informed about any changes in the MPE notes played
by this instrument, and any changes to its zone layout.
Note: This listener type receives its callbacks immediately, and not
via the message thread (so you might be for example in the MIDI thread).
Therefore you should never do heavy work such as graphics rendering etc.
inside those callbacks.
*/
class JUCE_API Listener
{
public:
/** Destructor. */
virtual ~Listener() = default;
/** Implement this callback to be informed whenever a new expressive MIDI
note is triggered.
*/
virtual void noteAdded (MPENote newNote) { ignoreUnused (newNote); }
/** Implement this callback to be informed whenever a currently playing
MPE note's pressure value changes.
*/
virtual void notePressureChanged (MPENote changedNote) { ignoreUnused (changedNote); }
/** Implement this callback to be informed whenever a currently playing
MPE note's pitchbend value changes.
Note: This can happen if the note itself is bent, if there is a
master channel pitchbend event, or if both occur simultaneously.
Call MPENote::getFrequencyInHertz to get the effective note frequency.
*/
virtual void notePitchbendChanged (MPENote changedNote) { ignoreUnused (changedNote); }
/** Implement this callback to be informed whenever a currently playing
MPE note's timbre value changes.
*/
virtual void noteTimbreChanged (MPENote changedNote) { ignoreUnused (changedNote); }
/** Implement this callback to be informed whether a currently playing
MPE note's key state (whether the key is down and/or the note is
sustained) has changed.
Note: If the key state changes to MPENote::off, noteReleased is
called instead.
*/
virtual void noteKeyStateChanged (MPENote changedNote) { ignoreUnused (changedNote); }
/** Implement this callback to be informed whenever an MPE note
is released (either by a note-off message, or by a sustain/sostenuto
pedal release for a note that already received a note-off),
and should therefore stop playing.
*/
virtual void noteReleased (MPENote finishedNote) { ignoreUnused (finishedNote); }
/** Implement this callback to be informed whenever the MPE zone layout
or legacy mode settings of this instrument have been changed.
*/
virtual void zoneLayoutChanged() {}
};
//==============================================================================
/** Adds a listener. */
void addListener (Listener* listenerToAdd);
/** Removes a listener. */
void removeListener (Listener* listenerToRemove);
//==============================================================================
/** Puts the instrument into legacy mode. If legacy mode is already enabled this method
does nothing.
As a side effect, this will discard all currently playing notes,
and call noteReleased for all of them.
This special zone layout mode is for backwards compatibility with
non-MPE MIDI devices. In this mode, the instrument will ignore the
current MPE zone layout. It will instead take a range of MIDI channels
(default: all channels 1-16) and treat them as note channels, with no
master channel. MIDI channels outside of this range will be ignored.
@param pitchbendRange The note pitchbend range in semitones to use when in legacy mode.
Must be between 0 and 96, otherwise behaviour is undefined.
The default pitchbend range in legacy mode is +/- 2 semitones.
@param channelRange The range of MIDI channels to use for notes when in legacy mode.
The default is to use all MIDI channels (1-16).
To get out of legacy mode, set a new MPE zone layout using setZoneLayout.
*/
void enableLegacyMode (int pitchbendRange = 2,
Range<int> channelRange = Range<int> (1, 17));
/** Returns true if the instrument is in legacy mode, false otherwise. */
bool isLegacyModeEnabled() const noexcept;
/** Returns the range of MIDI channels (1-16) to be used for notes when in legacy mode. */
Range<int> getLegacyModeChannelRange() const noexcept;
/** Re-sets the range of MIDI channels (1-16) to be used for notes when in legacy mode. */
void setLegacyModeChannelRange (Range<int> channelRange);
/** Returns the pitchbend range in semitones (0-96) to be used for notes when in legacy mode. */
int getLegacyModePitchbendRange() const noexcept;
/** Re-sets the pitchbend range in semitones (0-96) to be used for notes when in legacy mode. */
void setLegacyModePitchbendRange (int pitchbendRange);
protected:
//==============================================================================
CriticalSection lock;
private:
//==============================================================================
Array<MPENote> notes;
MPEZoneLayout zoneLayout;
ListenerList<Listener> listeners;
uint8 lastPressureLowerBitReceivedOnChannel[16];
uint8 lastTimbreLowerBitReceivedOnChannel[16];
bool isMemberChannelSustained[16];
struct LegacyMode
{
bool isEnabled = false;
Range<int> channelRange;
int pitchbendRange = 2;
};
struct MPEDimension
{
TrackingMode trackingMode = lastNotePlayedOnChannel;
MPEValue lastValueReceivedOnChannel[16];
MPEValue MPENote::* value;
MPEValue& getValue (MPENote& note) noexcept { return note.*(value); }
};
LegacyMode legacyMode;
MPEDimension pitchbendDimension, pressureDimension, timbreDimension;
void resetLastReceivedValues();
void updateDimension (int midiChannel, MPEDimension&, MPEValue);
void updateDimensionMaster (bool, MPEDimension&, MPEValue);
void updateDimensionForNote (MPENote&, MPEDimension&, MPEValue);
void callListenersDimensionChanged (const MPENote&, const MPEDimension&);
MPEValue getInitialValueForNewNote (int midiChannel, MPEDimension&) const;
void processMidiNoteOnMessage (const MidiMessage&);
void processMidiNoteOffMessage (const MidiMessage&);
void processMidiPitchWheelMessage (const MidiMessage&);
void processMidiChannelPressureMessage (const MidiMessage&);
void processMidiControllerMessage (const MidiMessage&);
void processMidiResetAllControllersMessage (const MidiMessage&);
void processMidiAfterTouchMessage (const MidiMessage&);
void handlePressureMSB (int midiChannel, int value) noexcept;
void handlePressureLSB (int midiChannel, int value) noexcept;
void handleTimbreMSB (int midiChannel, int value) noexcept;
void handleTimbreLSB (int midiChannel, int value) noexcept;
void handleSustainOrSostenuto (int midiChannel, bool isDown, bool isSostenuto);
const MPENote* getNotePtr (int midiChannel, int midiNoteNumber) const noexcept;
MPENote* getNotePtr (int midiChannel, int midiNoteNumber) noexcept;
const MPENote* getNotePtr (int midiChannel, TrackingMode) const noexcept;
MPENote* getNotePtr (int midiChannel, TrackingMode) noexcept;
const MPENote* getLastNotePlayedPtr (int midiChannel) const noexcept;
MPENote* getLastNotePlayedPtr (int midiChannel) noexcept;
const MPENote* getHighestNotePtr (int midiChannel) const noexcept;
MPENote* getHighestNotePtr (int midiChannel) noexcept;
const MPENote* getLowestNotePtr (int midiChannel) const noexcept;
MPENote* getLowestNotePtr (int midiChannel) noexcept;
void updateNoteTotalPitchbend (MPENote&);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MPEInstrument)
};
} // namespace juce

+ 0
- 238
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEMessages.cpp View File

@@ -1,238 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MidiBuffer MPEMessages::setLowerZone (int numMemberChannels, int perNotePitchbendRange, int masterPitchbendRange)
{
auto buffer = MidiRPNGenerator::generate (1, zoneLayoutMessagesRpnNumber, numMemberChannels, false, false);
buffer.addEvents (setLowerZonePerNotePitchbendRange (perNotePitchbendRange), 0, -1, 0);
buffer.addEvents (setLowerZoneMasterPitchbendRange (masterPitchbendRange), 0, -1, 0);
return buffer;
}
MidiBuffer MPEMessages::setUpperZone (int numMemberChannels, int perNotePitchbendRange, int masterPitchbendRange)
{
auto buffer = MidiRPNGenerator::generate (16, zoneLayoutMessagesRpnNumber, numMemberChannels, false, false);
buffer.addEvents (setUpperZonePerNotePitchbendRange (perNotePitchbendRange), 0, -1, 0);
buffer.addEvents (setUpperZoneMasterPitchbendRange (masterPitchbendRange), 0, -1, 0);
return buffer;
}
MidiBuffer MPEMessages::setLowerZonePerNotePitchbendRange (int perNotePitchbendRange)
{
return MidiRPNGenerator::generate (2, 0, perNotePitchbendRange, false, false);
}
MidiBuffer MPEMessages::setUpperZonePerNotePitchbendRange (int perNotePitchbendRange)
{
return MidiRPNGenerator::generate (15, 0, perNotePitchbendRange, false, false);
}
MidiBuffer MPEMessages::setLowerZoneMasterPitchbendRange (int masterPitchbendRange)
{
return MidiRPNGenerator::generate (1, 0, masterPitchbendRange, false, false);
}
MidiBuffer MPEMessages::setUpperZoneMasterPitchbendRange (int masterPitchbendRange)
{
return MidiRPNGenerator::generate (16, 0, masterPitchbendRange, false, false);
}
MidiBuffer MPEMessages::clearLowerZone()
{
return MidiRPNGenerator::generate (1, zoneLayoutMessagesRpnNumber, 0, false, false);
}
MidiBuffer MPEMessages::clearUpperZone()
{
return MidiRPNGenerator::generate (16, zoneLayoutMessagesRpnNumber, 0, false, false);
}
MidiBuffer MPEMessages::clearAllZones()
{
MidiBuffer buffer;
buffer.addEvents (clearLowerZone(), 0, -1, 0);
buffer.addEvents (clearUpperZone(), 0, -1, 0);
return buffer;
}
MidiBuffer MPEMessages::setZoneLayout (MPEZoneLayout layout)
{
MidiBuffer buffer;
buffer.addEvents (clearAllZones(), 0, -1, 0);
auto lowerZone = layout.getLowerZone();
if (lowerZone.isActive())
buffer.addEvents (setLowerZone (lowerZone.numMemberChannels,
lowerZone.perNotePitchbendRange,
lowerZone.masterPitchbendRange),
0, -1, 0);
auto upperZone = layout.getUpperZone();
if (upperZone.isActive())
buffer.addEvents (setUpperZone (upperZone.numMemberChannels,
upperZone.perNotePitchbendRange,
upperZone.masterPitchbendRange),
0, -1, 0);
return buffer;
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class MPEMessagesTests : public UnitTest
{
public:
MPEMessagesTests()
: UnitTest ("MPEMessages class", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("add zone");
{
{
MidiBuffer buffer = MPEMessages::setLowerZone (7);
const uint8 expectedBytes[] =
{
0xb0, 0x64, 0x06, 0xb0, 0x65, 0x00, 0xb0, 0x06, 0x07, // set up zone
0xb1, 0x64, 0x00, 0xb1, 0x65, 0x00, 0xb1, 0x06, 0x30, // per-note pbrange (default = 48)
0xb0, 0x64, 0x00, 0xb0, 0x65, 0x00, 0xb0, 0x06, 0x02 // master pbrange (default = 2)
};
testMidiBuffer (buffer, expectedBytes, sizeof (expectedBytes));
}
{
MidiBuffer buffer = MPEMessages::setUpperZone (5, 96, 0);
const uint8 expectedBytes[] =
{
0xbf, 0x64, 0x06, 0xbf, 0x65, 0x00, 0xbf, 0x06, 0x05, // set up zone
0xbe, 0x64, 0x00, 0xbe, 0x65, 0x00, 0xbe, 0x06, 0x60, // per-note pbrange (custom)
0xbf, 0x64, 0x00, 0xbf, 0x65, 0x00, 0xbf, 0x06, 0x00 // master pbrange (custom)
};
testMidiBuffer (buffer, expectedBytes, sizeof (expectedBytes));
}
}
beginTest ("set per-note pitchbend range");
{
MidiBuffer buffer = MPEMessages::setLowerZonePerNotePitchbendRange (96);
const uint8 expectedBytes[] = { 0xb1, 0x64, 0x00, 0xb1, 0x65, 0x00, 0xb1, 0x06, 0x60 };
testMidiBuffer (buffer, expectedBytes, sizeof (expectedBytes));
}
beginTest ("set master pitchbend range");
{
MidiBuffer buffer = MPEMessages::setUpperZoneMasterPitchbendRange (60);
const uint8 expectedBytes[] = { 0xbf, 0x64, 0x00, 0xbf, 0x65, 0x00, 0xbf, 0x06, 0x3c };
testMidiBuffer (buffer, expectedBytes, sizeof (expectedBytes));
}
beginTest ("clear all zones");
{
MidiBuffer buffer = MPEMessages::clearAllZones();
const uint8 expectedBytes[] = { 0xb0, 0x64, 0x06, 0xb0, 0x65, 0x00, 0xb0, 0x06, 0x00, // clear lower zone
0xbf, 0x64, 0x06, 0xbf, 0x65, 0x00, 0xbf, 0x06, 0x00 // clear upper zone
};
testMidiBuffer (buffer, expectedBytes, sizeof (expectedBytes));
}
beginTest ("set complete state");
{
MPEZoneLayout layout;
layout.setLowerZone (7, 96, 0);
layout.setUpperZone (7);
MidiBuffer buffer = MPEMessages::setZoneLayout (layout);
const uint8 expectedBytes[] = {
0xb0, 0x64, 0x06, 0xb0, 0x65, 0x00, 0xb0, 0x06, 0x00, // clear lower zone
0xbf, 0x64, 0x06, 0xbf, 0x65, 0x00, 0xbf, 0x06, 0x00, // clear upper zone
0xb0, 0x64, 0x06, 0xb0, 0x65, 0x00, 0xb0, 0x06, 0x07, // set lower zone
0xb1, 0x64, 0x00, 0xb1, 0x65, 0x00, 0xb1, 0x06, 0x60, // per-note pbrange (custom)
0xb0, 0x64, 0x00, 0xb0, 0x65, 0x00, 0xb0, 0x06, 0x00, // master pbrange (custom)
0xbf, 0x64, 0x06, 0xbf, 0x65, 0x00, 0xbf, 0x06, 0x07, // set upper zone
0xbe, 0x64, 0x00, 0xbe, 0x65, 0x00, 0xbe, 0x06, 0x30, // per-note pbrange (default = 48)
0xbf, 0x64, 0x00, 0xbf, 0x65, 0x00, 0xbf, 0x06, 0x02 // master pbrange (default = 2)
};
testMidiBuffer (buffer, expectedBytes, sizeof (expectedBytes));
}
}
private:
//==============================================================================
void testMidiBuffer (MidiBuffer& buffer, const uint8* expectedBytes, int expectedBytesSize)
{
uint8 actualBytes[128] = { 0 };
extractRawBinaryData (buffer, actualBytes, sizeof (actualBytes));
expectEquals (std::memcmp (actualBytes, expectedBytes, (std::size_t) expectedBytesSize), 0);
}
//==============================================================================
void extractRawBinaryData (const MidiBuffer& midiBuffer, const uint8* bufferToCopyTo, std::size_t maxBytes)
{
std::size_t pos = 0;
for (const auto metadata : midiBuffer)
{
const uint8* data = metadata.data;
std::size_t dataSize = (std::size_t) metadata.numBytes;
if (pos + dataSize > maxBytes)
return;
std::memcpy ((void*) (bufferToCopyTo + pos), data, dataSize);
pos += dataSize;
}
}
};
static MPEMessagesTests MPEMessagesUnitTests;
#endif
} // namespace juce

+ 0
- 116
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEMessages.h View File

@@ -1,116 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This helper class contains the necessary helper functions to generate
MIDI messages that are exclusive to MPE, such as defining the upper and lower
MPE zones and setting per-note and master pitchbend ranges.
You can then send them to your MPE device using MidiOutput::sendBlockOfMessagesNow.
All other MPE messages like per-note pitchbend, pressure, and third
dimension, are ordinary MIDI messages that should be created using the MidiMessage
class instead. You just need to take care to send them to the appropriate
per-note MIDI channel.
Note: If you are working with an MPEZoneLayout object inside your app,
you should not use the message sequences provided here. Instead, you should
change the zone layout programmatically with the member functions provided in the
MPEZoneLayout class itself. You should also make sure that the Expressive
MIDI zone layout of your C++ code and of the MPE device are kept in sync.
@see MidiMessage, MPEZoneLayout
@tags{Audio}
*/
class JUCE_API MPEMessages
{
public:
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will set the lower MPE zone.
*/
static MidiBuffer setLowerZone (int numMemberChannels = 0,
int perNotePitchbendRange = 48,
int masterPitchbendRange = 2);
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will set the upper MPE zone.
*/
static MidiBuffer setUpperZone (int numMemberChannels = 0,
int perNotePitchbendRange = 48,
int masterPitchbendRange = 2);
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will set the per-note pitchbend range of the lower MPE zone.
*/
static MidiBuffer setLowerZonePerNotePitchbendRange (int perNotePitchbendRange = 48);
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will set the per-note pitchbend range of the upper MPE zone.
*/
static MidiBuffer setUpperZonePerNotePitchbendRange (int perNotePitchbendRange = 48);
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will set the master pitchbend range of the lower MPE zone.
*/
static MidiBuffer setLowerZoneMasterPitchbendRange (int masterPitchbendRange = 2);
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will set the master pitchbend range of the upper MPE zone.
*/
static MidiBuffer setUpperZoneMasterPitchbendRange (int masterPitchbendRange = 2);
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will clear the lower zone.
*/
static MidiBuffer clearLowerZone();
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will clear the upper zone.
*/
static MidiBuffer clearUpperZone();
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will clear the lower and upper zones.
*/
static MidiBuffer clearAllZones();
/** Returns the sequence of MIDI messages that, if sent to an Expressive
MIDI device, will reset the whole MPE zone layout of the
device to the layout passed in. This will first clear the current lower and upper
zones, then then set the zones contained in the passed-in zone layout, and set their
per-note and master pitchbend ranges to their current values.
*/
static MidiBuffer setZoneLayout (MPEZoneLayout layout);
/** The RPN number used for MPE zone layout messages.
Pitchbend range messages (both per-note and master) are instead sent
on RPN 0 as in standard MIDI 1.0.
*/
static const int zoneLayoutMessagesRpnNumber = 6;
};
} // namespace juce

+ 0
- 127
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPENote.cpp View File

@@ -1,127 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace
{
uint16 generateNoteID (int midiChannel, int midiNoteNumber) noexcept
{
jassert (midiChannel > 0 && midiChannel <= 16);
jassert (midiNoteNumber >= 0 && midiNoteNumber < 128);
return uint16 ((midiChannel << 7) + midiNoteNumber);
}
}
//==============================================================================
MPENote::MPENote (int midiChannel_,
int initialNote_,
MPEValue noteOnVelocity_,
MPEValue pitchbend_,
MPEValue pressure_,
MPEValue timbre_,
KeyState keyState_) noexcept
: noteID (generateNoteID (midiChannel_, initialNote_)),
midiChannel (uint8 (midiChannel_)),
initialNote (uint8 (initialNote_)),
noteOnVelocity (noteOnVelocity_),
pitchbend (pitchbend_),
pressure (pressure_),
initialTimbre (timbre_),
timbre (timbre_),
keyState (keyState_)
{
jassert (keyState != MPENote::off);
jassert (isValid());
}
MPENote::MPENote() noexcept {}
//==============================================================================
bool MPENote::isValid() const noexcept
{
return midiChannel > 0 && midiChannel <= 16 && initialNote < 128;
}
//==============================================================================
double MPENote::getFrequencyInHertz (double frequencyOfA) const noexcept
{
auto pitchInSemitones = double (initialNote) + totalPitchbendInSemitones;
return frequencyOfA * std::pow (2.0, (pitchInSemitones - 69.0) / 12.0);
}
//==============================================================================
bool MPENote::operator== (const MPENote& other) const noexcept
{
jassert (isValid() && other.isValid());
return noteID == other.noteID;
}
bool MPENote::operator!= (const MPENote& other) const noexcept
{
jassert (isValid() && other.isValid());
return noteID != other.noteID;
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class MPENoteTests : public UnitTest
{
public:
MPENoteTests()
: UnitTest ("MPENote class", UnitTestCategories::midi)
{}
//==============================================================================
void runTest() override
{
beginTest ("getFrequencyInHertz");
{
MPENote note;
note.initialNote = 60;
note.totalPitchbendInSemitones = -0.5;
expectEqualsWithinOneCent (note.getFrequencyInHertz(), 254.178);
}
}
private:
//==============================================================================
void expectEqualsWithinOneCent (double frequencyInHertzActual,
double frequencyInHertzExpected)
{
double ratio = frequencyInHertzActual / frequencyInHertzExpected;
double oneCent = 1.0005946;
expect (ratio < oneCent);
expect (ratio > 1.0 / oneCent);
}
};
static MPENoteTests MPENoteUnitTests;
#endif
} // namespace juce

+ 0
- 184
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPENote.h View File

@@ -1,184 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This struct represents a playing MPE note.
A note is identified by a unique ID, or alternatively, by a MIDI channel
and an initial note. It is characterised by five dimensions of continuous
expressive control. Their current values are represented as
MPEValue objects.
@see MPEValue
@tags{Audio}
*/
struct JUCE_API MPENote
{
//==============================================================================
/** Possible values for the note key state. */
enum KeyState
{
off = 0, /**< The key is up (off). */
keyDown = 1, /**< The note key is currently down (pressed). */
sustained = 2, /**< The note is sustained (by a sustain or sostenuto pedal). */
keyDownAndSustained = 3 /**< The note key is down and sustained (by a sustain or sostenuto pedal). */
};
//==============================================================================
/** Constructor.
@param midiChannel The MIDI channel of the note, between 2 and 15.
(Channel 1 and channel 16 can never be note channels in MPE).
@param initialNote The MIDI note number, between 0 and 127.
@param velocity The note-on velocity of the note.
@param pitchbend The initial per-note pitchbend of the note.
@param pressure The initial pressure of the note.
@param timbre The timbre value of the note.
@param keyState The key state of the note (whether the key is down
and/or the note is sustained). This value must not
be MPENote::off, since you are triggering a new note.
(If not specified, the default value will be MPENote::keyDown.)
*/
MPENote (int midiChannel,
int initialNote,
MPEValue velocity,
MPEValue pitchbend,
MPEValue pressure,
MPEValue timbre,
KeyState keyState = MPENote::keyDown) noexcept;
/** Default constructor.
Constructs an invalid MPE note (a note with the key state MPENote::off
and an invalid MIDI channel. The only allowed use for such a note is to
call isValid() on it; everything else is undefined behaviour.
*/
MPENote() noexcept;
/** Checks whether the MPE note is valid. */
bool isValid() const noexcept;
//==============================================================================
// Invariants that define the note.
/** A unique ID. Useful to distinguish the note from other simultaneously
sounding notes that may use the same note number or MIDI channel.
This should never change during the lifetime of a note object.
*/
uint16 noteID = 0;
/** The MIDI channel which this note uses.
This should never change during the lifetime of an MPENote object.
*/
uint8 midiChannel = 0;
/** The MIDI note number that was sent when the note was triggered.
This should never change during the lifetime of an MPENote object.
*/
uint8 initialNote = 0;
//==============================================================================
// The five dimensions of continuous expressive control
/** The velocity ("strike") of the note-on.
This dimension will stay constant after the note has been turned on.
*/
MPEValue noteOnVelocity { MPEValue::minValue() };
/** Current per-note pitchbend of the note (in units of MIDI pitchwheel
position). This dimension can be modulated while the note sounds.
Note: This value is not aware of the currently used pitchbend range,
or an additional master pitchbend that may be simultaneously applied.
To compute the actual effective pitchbend of an MPENote, you should
probably use the member totalPitchbendInSemitones instead.
@see totalPitchbendInSemitones, getFrequencyInHertz
*/
MPEValue pitchbend { MPEValue::centreValue() };
/** Current pressure with which the note is held down.
This dimension can be modulated while the note sounds.
*/
MPEValue pressure { MPEValue::centreValue() };
/** Initial value of timbre when the note was triggered.
This should never change during the lifetime of an MPENote object.
*/
MPEValue initialTimbre { MPEValue::centreValue() };
/** Current value of the note's third expressive dimension, typically
encoding some kind of timbre parameter.
This dimension can be modulated while the note sounds.
*/
MPEValue timbre { MPEValue::centreValue() };
/** The release velocity ("lift") of the note after a note-off has been
received.
This dimension will only have a meaningful value after a note-off has
been received for the note (and keyState is set to MPENote::off or
MPENote::sustained). Initially, the value is undefined.
*/
MPEValue noteOffVelocity { MPEValue::minValue() };
//==============================================================================
/** Current effective pitchbend of the note in units of semitones, relative
to initialNote. You should use this to compute the actual effective pitch
of the note. This value is computed and set by an MPEInstrument to the
sum of the per-note pitchbend value (stored in MPEValue::pitchbend)
and the master pitchbend of the MPE zone, weighted with the per-note
pitchbend range and master pitchbend range of the zone, respectively.
@see getFrequencyInHertz
*/
double totalPitchbendInSemitones;
/** Current key state. Indicates whether the note key is currently down (pressed)
and/or the note is sustained (by a sustain or sostenuto pedal).
*/
KeyState keyState { MPENote::off };
//==============================================================================
/** Returns the current frequency of the note in Hertz. This is the sum of
the initialNote and the totalPitchbendInSemitones, converted to Hertz.
*/
double getFrequencyInHertz (double frequencyOfA = 440.0) const noexcept;
/** Returns true if two notes are the same, determined by their unique ID. */
bool operator== (const MPENote& other) const noexcept;
/** Returns true if two notes are different notes, determined by their unique ID. */
bool operator!= (const MPENote& other) const noexcept;
};
} // namespace juce

+ 0
- 341
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiser.cpp View File

@@ -1,341 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MPESynthesiser::MPESynthesiser()
{
}
MPESynthesiser::MPESynthesiser (MPEInstrument& mpeInstrument)
: MPESynthesiserBase (mpeInstrument)
{
}
MPESynthesiser::~MPESynthesiser()
{
}
//==============================================================================
void MPESynthesiser::startVoice (MPESynthesiserVoice* voice, MPENote noteToStart)
{
jassert (voice != nullptr);
voice->currentlyPlayingNote = noteToStart;
voice->noteOnTime = lastNoteOnCounter++;
voice->noteStarted();
}
void MPESynthesiser::stopVoice (MPESynthesiserVoice* voice, MPENote noteToStop, bool allowTailOff)
{
jassert (voice != nullptr);
voice->currentlyPlayingNote = noteToStop;
voice->noteStopped (allowTailOff);
}
//==============================================================================
void MPESynthesiser::noteAdded (MPENote newNote)
{
const ScopedLock sl (voicesLock);
if (auto* voice = findFreeVoice (newNote, shouldStealVoices))
startVoice (voice, newNote);
}
void MPESynthesiser::notePressureChanged (MPENote changedNote)
{
const ScopedLock sl (voicesLock);
for (auto* voice : voices)
{
if (voice->isCurrentlyPlayingNote (changedNote))
{
voice->currentlyPlayingNote = changedNote;
voice->notePressureChanged();
}
}
}
void MPESynthesiser::notePitchbendChanged (MPENote changedNote)
{
const ScopedLock sl (voicesLock);
for (auto* voice : voices)
{
if (voice->isCurrentlyPlayingNote (changedNote))
{
voice->currentlyPlayingNote = changedNote;
voice->notePitchbendChanged();
}
}
}
void MPESynthesiser::noteTimbreChanged (MPENote changedNote)
{
const ScopedLock sl (voicesLock);
for (auto* voice : voices)
{
if (voice->isCurrentlyPlayingNote (changedNote))
{
voice->currentlyPlayingNote = changedNote;
voice->noteTimbreChanged();
}
}
}
void MPESynthesiser::noteKeyStateChanged (MPENote changedNote)
{
const ScopedLock sl (voicesLock);
for (auto* voice : voices)
{
if (voice->isCurrentlyPlayingNote (changedNote))
{
voice->currentlyPlayingNote = changedNote;
voice->noteKeyStateChanged();
}
}
}
void MPESynthesiser::noteReleased (MPENote finishedNote)
{
const ScopedLock sl (voicesLock);
for (auto i = voices.size(); --i >= 0;)
{
auto* voice = voices.getUnchecked (i);
if (voice->isCurrentlyPlayingNote (finishedNote))
stopVoice (voice, finishedNote, true);
}
}
void MPESynthesiser::setCurrentPlaybackSampleRate (const double newRate)
{
MPESynthesiserBase::setCurrentPlaybackSampleRate (newRate);
const ScopedLock sl (voicesLock);
turnOffAllVoices (false);
for (auto i = voices.size(); --i >= 0;)
voices.getUnchecked (i)->setCurrentSampleRate (newRate);
}
void MPESynthesiser::handleMidiEvent (const MidiMessage& m)
{
if (m.isController())
handleController (m.getChannel(), m.getControllerNumber(), m.getControllerValue());
else if (m.isProgramChange())
handleProgramChange (m.getChannel(), m.getProgramChangeNumber());
MPESynthesiserBase::handleMidiEvent (m);
}
MPESynthesiserVoice* MPESynthesiser::findFreeVoice (MPENote noteToFindVoiceFor, bool stealIfNoneAvailable) const
{
const ScopedLock sl (voicesLock);
for (auto* voice : voices)
{
if (! voice->isActive())
return voice;
}
if (stealIfNoneAvailable)
return findVoiceToSteal (noteToFindVoiceFor);
return nullptr;
}
MPESynthesiserVoice* MPESynthesiser::findVoiceToSteal (MPENote noteToStealVoiceFor) const
{
// This voice-stealing algorithm applies the following heuristics:
// - Re-use the oldest notes first
// - Protect the lowest & topmost notes, even if sustained, but not if they've been released.
// apparently you are trying to render audio without having any voices...
jassert (voices.size() > 0);
// These are the voices we want to protect (ie: only steal if unavoidable)
MPESynthesiserVoice* low = nullptr; // Lowest sounding note, might be sustained, but NOT in release phase
MPESynthesiserVoice* top = nullptr; // Highest sounding note, might be sustained, but NOT in release phase
// this is a list of voices we can steal, sorted by how long they've been running
Array<MPESynthesiserVoice*> usableVoices;
usableVoices.ensureStorageAllocated (voices.size());
for (auto* voice : voices)
{
jassert (voice->isActive()); // We wouldn't be here otherwise
usableVoices.add (voice);
// NB: Using a functor rather than a lambda here due to scare-stories about
// compilers generating code containing heap allocations..
struct Sorter
{
bool operator() (const MPESynthesiserVoice* a, const MPESynthesiserVoice* b) const noexcept { return a->noteOnTime < b->noteOnTime; }
};
std::sort (usableVoices.begin(), usableVoices.end(), Sorter());
if (! voice->isPlayingButReleased()) // Don't protect released notes
{
auto noteNumber = voice->getCurrentlyPlayingNote().initialNote;
if (low == nullptr || noteNumber < low->getCurrentlyPlayingNote().initialNote)
low = voice;
if (top == nullptr || noteNumber > top->getCurrentlyPlayingNote().initialNote)
top = voice;
}
}
// Eliminate pathological cases (ie: only 1 note playing): we always give precedence to the lowest note(s)
if (top == low)
top = nullptr;
// If we want to re-use the voice to trigger a new note,
// then The oldest note that's playing the same note number is ideal.
if (noteToStealVoiceFor.isValid())
for (auto* voice : usableVoices)
if (voice->getCurrentlyPlayingNote().initialNote == noteToStealVoiceFor.initialNote)
return voice;
// Oldest voice that has been released (no finger on it and not held by sustain pedal)
for (auto* voice : usableVoices)
if (voice != low && voice != top && voice->isPlayingButReleased())
return voice;
// Oldest voice that doesn't have a finger on it:
for (auto* voice : usableVoices)
if (voice != low && voice != top
&& voice->getCurrentlyPlayingNote().keyState != MPENote::keyDown
&& voice->getCurrentlyPlayingNote().keyState != MPENote::keyDownAndSustained)
return voice;
// Oldest voice that isn't protected
for (auto* voice : usableVoices)
if (voice != low && voice != top)
return voice;
// We've only got "protected" voices now: lowest note takes priority
jassert (low != nullptr);
// Duophonic synth: give priority to the bass note:
if (top != nullptr)
return top;
return low;
}
//==============================================================================
void MPESynthesiser::addVoice (MPESynthesiserVoice* const newVoice)
{
const ScopedLock sl (voicesLock);
newVoice->setCurrentSampleRate (getSampleRate());
voices.add (newVoice);
}
void MPESynthesiser::clearVoices()
{
const ScopedLock sl (voicesLock);
voices.clear();
}
MPESynthesiserVoice* MPESynthesiser::getVoice (const int index) const
{
const ScopedLock sl (voicesLock);
return voices [index];
}
void MPESynthesiser::removeVoice (const int index)
{
const ScopedLock sl (voicesLock);
voices.remove (index);
}
void MPESynthesiser::reduceNumVoices (const int newNumVoices)
{
// we can't possibly get to a negative number of voices...
jassert (newNumVoices >= 0);
const ScopedLock sl (voicesLock);
while (voices.size() > newNumVoices)
{
if (auto* voice = findFreeVoice ({}, true))
voices.removeObject (voice);
else
voices.remove (0); // if there's no voice to steal, kill the oldest voice
}
}
void MPESynthesiser::turnOffAllVoices (bool allowTailOff)
{
{
const ScopedLock sl (voicesLock);
// first turn off all voices (it's more efficient to do this immediately
// rather than to go through the MPEInstrument for this).
for (auto* voice : voices)
{
voice->currentlyPlayingNote.noteOffVelocity = MPEValue::from7BitInt (64); // some reasonable number
voice->currentlyPlayingNote.keyState = MPENote::off;
voice->noteStopped (allowTailOff);
}
}
// finally make sure the MPE Instrument also doesn't have any notes anymore.
instrument.releaseAllNotes();
}
//==============================================================================
void MPESynthesiser::renderNextSubBlock (AudioBuffer<float>& buffer, int startSample, int numSamples)
{
const ScopedLock sl (voicesLock);
for (auto* voice : voices)
{
if (voice->isActive())
voice->renderNextBlock (buffer, startSample, numSamples);
}
}
void MPESynthesiser::renderNextSubBlock (AudioBuffer<double>& buffer, int startSample, int numSamples)
{
const ScopedLock sl (voicesLock);
for (auto* voice : voices)
{
if (voice->isActive())
voice->renderNextBlock (buffer, startSample, numSamples);
}
}
} // namespace juce

+ 0
- 311
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiser.h View File

@@ -1,311 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Base class for an MPE-compatible musical device that can play sounds.
This class extends MPESynthesiserBase by adding the concept of voices,
each of which can play a sound triggered by a MPENote that can be modulated
by MPE dimensions like pressure, pitchbend, and timbre, while the note is
sounding.
To create a synthesiser, you'll need to create a subclass of MPESynthesiserVoice
which can play back one of these sounds at a time.
Then you can use the addVoice() methods to give the synthesiser a set of voices
it can use to play notes. If you only give it one voice it will be monophonic -
the more voices it has, the more polyphony it'll have available.
Then repeatedly call the renderNextBlock() method to produce the audio (inherited
from MPESynthesiserBase). The voices will be started, stopped, and modulated
automatically, based on the MPE/MIDI messages that the synthesiser receives.
Before rendering, be sure to call the setCurrentPlaybackSampleRate() to tell it
what the target playback rate is. This value is passed on to the voices so that
they can pitch their output correctly.
@see MPESynthesiserBase, MPESynthesiserVoice, MPENote, MPEInstrument
@tags{Audio}
*/
class JUCE_API MPESynthesiser : public MPESynthesiserBase
{
public:
//==============================================================================
/** Constructor.
You'll need to add some voices before it'll make any sound.
@see addVoice
*/
MPESynthesiser();
/** Constructor to pass to the synthesiser a custom MPEInstrument object
to handle the MPE note state, MIDI channel assignment etc.
(in case you need custom logic for this that goes beyond MIDI and MPE).
@see MPESynthesiserBase, MPEInstrument
*/
MPESynthesiser (MPEInstrument& instrumentToUse);
/** Destructor. */
~MPESynthesiser() override;
//==============================================================================
/** Deletes all voices. */
void clearVoices();
/** Returns the number of voices that have been added. */
int getNumVoices() const noexcept { return voices.size(); }
/** Returns one of the voices that have been added. */
MPESynthesiserVoice* getVoice (int index) const;
/** Adds a new voice to the synth.
All the voices should be the same class of object and are treated equally.
The object passed in will be managed by the synthesiser, which will delete
it later on when no longer needed. The caller should not retain a pointer to the
voice.
*/
void addVoice (MPESynthesiserVoice* newVoice);
/** Deletes one of the voices. */
void removeVoice (int index);
/** Reduces the number of voices to newNumVoices.
This will repeatedly call findVoiceToSteal() and remove that voice, until
the total number of voices equals newNumVoices. If newNumVoices is greater than
or equal to the current number of voices, this method does nothing.
*/
void reduceNumVoices (int newNumVoices);
/** Release all MPE notes and turn off all voices.
If allowTailOff is true, the voices will be allowed to fade out the notes gracefully
(if they can do). If this is false, the notes will all be cut off immediately.
This method is meant to be called by the user, for example to implement
a MIDI panic button in a synth.
*/
virtual void turnOffAllVoices (bool allowTailOff);
//==============================================================================
/** If set to true, then the synth will try to take over an existing voice if
it runs out and needs to play another note.
The value of this boolean is passed into findFreeVoice(), so the result will
depend on the implementation of this method.
*/
void setVoiceStealingEnabled (bool shouldSteal) noexcept { shouldStealVoices = shouldSteal; }
/** Returns true if note-stealing is enabled. */
bool isVoiceStealingEnabled() const noexcept { return shouldStealVoices; }
//==============================================================================
/** Tells the synthesiser what the sample rate is for the audio it's being used to render.
This overrides the implementation in MPESynthesiserBase, to additionally
propagate the new value to the voices so that they can use it to render the correct
pitches.
*/
void setCurrentPlaybackSampleRate (double newRate) override;
//==============================================================================
/** Handle incoming MIDI events.
This method will be called automatically according to the MIDI data passed
into renderNextBlock(), but you can also call it yourself to manually
inject MIDI events.
This implementation forwards program change messages and non-MPE-related
controller messages to handleProgramChange and handleController, respectively,
and then simply calls through to MPESynthesiserBase::handleMidiEvent to deal
with MPE-related MIDI messages used for MPE notes, zones etc.
This method can be overridden further if you need to do custom MIDI
handling on top of what is provided here.
*/
void handleMidiEvent (const MidiMessage&) override;
/** Callback for MIDI controller messages. The default implementation
provided here does nothing; override this method if you need custom
MIDI controller handling on top of MPE.
This method will be called automatically according to the midi data passed into
renderNextBlock().
*/
virtual void handleController (int /*midiChannel*/,
int /*controllerNumber*/,
int /*controllerValue*/) {}
/** Callback for MIDI program change messages. The default implementation
provided here does nothing; override this method if you need to handle
those messages.
This method will be called automatically according to the midi data passed into
renderNextBlock().
*/
virtual void handleProgramChange (int /*midiChannel*/,
int /*programNumber*/) {}
protected:
//==============================================================================
/** Attempts to start playing a new note.
The default method here will find a free voice that is appropriate for
playing the given MPENote, and use that voice to start playing the sound.
If isNoteStealingEnabled returns true (set this by calling setNoteStealingEnabled),
the synthesiser will use the voice stealing algorithm to find a free voice for
the note (if no voices are free otherwise).
This method will be called automatically according to the midi data passed into
renderNextBlock(). Do not call it yourself, otherwise the internal MPE note state
will become inconsistent.
*/
void noteAdded (MPENote newNote) override;
/** Stops playing a note.
This will be called whenever an MPE note is released (either by a note-off message,
or by a sustain/sostenuto pedal release for a note that already received a note-off),
and should therefore stop playing.
This will find any voice that is currently playing finishedNote,
turn its currently playing note off, and call its noteStopped callback.
This method will be called automatically according to the midi data passed into
renderNextBlock(). Do not call it yourself, otherwise the internal MPE note state
will become inconsistent.
*/
void noteReleased (MPENote finishedNote) override;
/** Will find any voice that is currently playing changedNote, update its
currently playing note, and call its notePressureChanged method.
This method will be called automatically according to the midi data passed into
renderNextBlock(). Do not call it yourself.
*/
void notePressureChanged (MPENote changedNote) override;
/** Will find any voice that is currently playing changedNote, update its
currently playing note, and call its notePitchbendChanged method.
This method will be called automatically according to the midi data passed into
renderNextBlock(). Do not call it yourself.
*/
void notePitchbendChanged (MPENote changedNote) override;
/** Will find any voice that is currently playing changedNote, update its
currently playing note, and call its noteTimbreChanged method.
This method will be called automatically according to the midi data passed into
renderNextBlock(). Do not call it yourself.
*/
void noteTimbreChanged (MPENote changedNote) override;
/** Will find any voice that is currently playing changedNote, update its
currently playing note, and call its noteKeyStateChanged method.
This method will be called automatically according to the midi data passed into
renderNextBlock(). Do not call it yourself.
*/
void noteKeyStateChanged (MPENote changedNote) override;
//==============================================================================
/** This will simply call renderNextBlock for each currently active
voice and fill the buffer with the sum.
Override this method if you need to do more work to render your audio.
*/
void renderNextSubBlock (AudioBuffer<float>& outputAudio,
int startSample,
int numSamples) override;
/** This will simply call renderNextBlock for each currently active
voice and fill the buffer with the sum. (double-precision version)
Override this method if you need to do more work to render your audio.
*/
void renderNextSubBlock (AudioBuffer<double>& outputAudio,
int startSample,
int numSamples) override;
//==============================================================================
/** Searches through the voices to find one that's not currently playing, and
which can play the given MPE note.
If all voices are active and stealIfNoneAvailable is false, this returns
a nullptr. If all voices are active and stealIfNoneAvailable is true,
this will call findVoiceToSteal() to find a voice.
If you need to find a free voice for something else than playing a note
(e.g. for deleting it), you can pass an invalid (default-constructed) MPENote.
*/
virtual MPESynthesiserVoice* findFreeVoice (MPENote noteToFindVoiceFor,
bool stealIfNoneAvailable) const;
/** Chooses a voice that is most suitable for being re-used to play a new
note, or for being deleted by reduceNumVoices.
The default method will attempt to find the oldest voice that isn't the
bottom or top note being played. If that's not suitable for your synth,
you can override this method and do something more cunning instead.
If you pass a valid MPENote for the optional argument, then the note number
of that note will be taken into account for finding the ideal voice to steal.
If you pass an invalid (default-constructed) MPENote instead, this part of
the algorithm will be ignored.
*/
virtual MPESynthesiserVoice* findVoiceToSteal (MPENote noteToStealVoiceFor = MPENote()) const;
/** Starts a specified voice and tells it to play a particular MPENote.
You should never need to call this, it's called internally by
MPESynthesiserBase::instrument via the noteStarted callback,
but is protected in case it's useful for some custom subclasses.
*/
void startVoice (MPESynthesiserVoice* voice, MPENote noteToStart);
/** Stops a given voice and tells it to stop playing a particular MPENote
(which should be the same note it is actually playing).
You should never need to call this, it's called internally by
MPESynthesiserBase::instrument via the noteReleased callback,
but is protected in case it's useful for some custom subclasses.
*/
void stopVoice (MPESynthesiserVoice* voice, MPENote noteToStop, bool allowTailOff);
//==============================================================================
OwnedArray<MPESynthesiserVoice> voices;
CriticalSection voicesLock;
private:
//==============================================================================
std::atomic<bool> shouldStealVoices { false };
uint32 lastNoteOnCounter = 0;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MPESynthesiser)
};
} // namespace juce

+ 0
- 375
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserBase.cpp View File

@@ -1,375 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MPESynthesiserBase::MPESynthesiserBase()
: instrument (defaultInstrument)
{
instrument.addListener (this);
}
MPESynthesiserBase::MPESynthesiserBase (MPEInstrument& inst)
: instrument (inst)
{
instrument.addListener (this);
}
//==============================================================================
MPEZoneLayout MPESynthesiserBase::getZoneLayout() const noexcept
{
return instrument.getZoneLayout();
}
void MPESynthesiserBase::setZoneLayout (MPEZoneLayout newLayout)
{
instrument.setZoneLayout (newLayout);
}
//==============================================================================
void MPESynthesiserBase::enableLegacyMode (int pitchbendRange, Range<int> channelRange)
{
instrument.enableLegacyMode (pitchbendRange, channelRange);
}
bool MPESynthesiserBase::isLegacyModeEnabled() const noexcept
{
return instrument.isLegacyModeEnabled();
}
Range<int> MPESynthesiserBase::getLegacyModeChannelRange() const noexcept
{
return instrument.getLegacyModeChannelRange();
}
void MPESynthesiserBase::setLegacyModeChannelRange (Range<int> channelRange)
{
instrument.setLegacyModeChannelRange (channelRange);
}
int MPESynthesiserBase::getLegacyModePitchbendRange() const noexcept
{
return instrument.getLegacyModePitchbendRange();
}
void MPESynthesiserBase::setLegacyModePitchbendRange (int pitchbendRange)
{
instrument.setLegacyModePitchbendRange (pitchbendRange);
}
//==============================================================================
void MPESynthesiserBase::setPressureTrackingMode (TrackingMode modeToUse)
{
instrument.setPressureTrackingMode (modeToUse);
}
void MPESynthesiserBase::setPitchbendTrackingMode (TrackingMode modeToUse)
{
instrument.setPitchbendTrackingMode (modeToUse);
}
void MPESynthesiserBase::setTimbreTrackingMode (TrackingMode modeToUse)
{
instrument.setTimbreTrackingMode (modeToUse);
}
//==============================================================================
void MPESynthesiserBase::handleMidiEvent (const MidiMessage& m)
{
instrument.processNextMidiEvent (m);
}
//==============================================================================
template <typename floatType>
void MPESynthesiserBase::renderNextBlock (AudioBuffer<floatType>& outputAudio,
const MidiBuffer& inputMidi,
int startSample,
int numSamples)
{
// you must set the sample rate before using this!
jassert (sampleRate != 0);
const ScopedLock sl (noteStateLock);
auto prevSample = startSample;
const auto endSample = startSample + numSamples;
for (auto it = inputMidi.findNextSamplePosition (startSample); it != inputMidi.cend(); ++it)
{
const auto metadata = *it;
if (metadata.samplePosition >= endSample)
break;
const auto smallBlockAllowed = (prevSample == startSample && ! subBlockSubdivisionIsStrict);
const auto thisBlockSize = smallBlockAllowed ? 1 : minimumSubBlockSize;
if (metadata.samplePosition >= prevSample + thisBlockSize)
{
renderNextSubBlock (outputAudio, prevSample, metadata.samplePosition - prevSample);
prevSample = metadata.samplePosition;
}
handleMidiEvent (metadata.getMessage());
}
if (prevSample < endSample)
renderNextSubBlock (outputAudio, prevSample, endSample - prevSample);
}
// explicit instantiation for supported float types:
template void MPESynthesiserBase::renderNextBlock<float> (AudioBuffer<float>&, const MidiBuffer&, int, int);
template void MPESynthesiserBase::renderNextBlock<double> (AudioBuffer<double>&, const MidiBuffer&, int, int);
//==============================================================================
void MPESynthesiserBase::setCurrentPlaybackSampleRate (const double newRate)
{
if (sampleRate != newRate)
{
const ScopedLock sl (noteStateLock);
instrument.releaseAllNotes();
sampleRate = newRate;
}
}
//==============================================================================
void MPESynthesiserBase::setMinimumRenderingSubdivisionSize (int numSamples, bool shouldBeStrict) noexcept
{
jassert (numSamples > 0); // it wouldn't make much sense for this to be less than 1
minimumSubBlockSize = numSamples;
subBlockSubdivisionIsStrict = shouldBeStrict;
}
#if JUCE_UNIT_TESTS
namespace
{
class MpeSynthesiserBaseTests : public UnitTest
{
enum class CallbackKind { process, midi };
struct StartAndLength
{
StartAndLength (int s, int l) : start (s), length (l) {}
int start = 0;
int length = 0;
std::tuple<const int&, const int&> tie() const noexcept { return std::tie (start, length); }
bool operator== (const StartAndLength& other) const noexcept { return tie() == other.tie(); }
bool operator!= (const StartAndLength& other) const noexcept { return tie() != other.tie(); }
bool operator< (const StartAndLength& other) const noexcept { return tie() < other.tie(); }
};
struct Events
{
std::vector<StartAndLength> blocks;
std::vector<MidiMessage> messages;
std::vector<CallbackKind> order;
};
class MockSynthesiser : public MPESynthesiserBase
{
public:
Events events;
void handleMidiEvent (const MidiMessage& m) override
{
events.messages.emplace_back (m);
events.order.emplace_back (CallbackKind::midi);
}
private:
using MPESynthesiserBase::renderNextSubBlock;
void renderNextSubBlock (AudioBuffer<float>&,
int startSample,
int numSamples) override
{
events.blocks.push_back ({ startSample, numSamples });
events.order.emplace_back (CallbackKind::process);
}
};
static MidiBuffer makeTestBuffer (const int bufferLength)
{
MidiBuffer result;
for (int i = 0; i != bufferLength; ++i)
result.addEvent ({}, i);
return result;
}
public:
MpeSynthesiserBaseTests()
: UnitTest ("MPE Synthesiser Base", UnitTestCategories::midi) {}
void runTest() override
{
const auto sumBlockLengths = [] (const std::vector<StartAndLength>& b)
{
const auto addBlock = [] (int acc, const StartAndLength& info) { return acc + info.length; };
return std::accumulate (b.begin(), b.end(), 0, addBlock);
};
beginTest ("Rendering sparse subblocks works");
{
const int blockSize = 512;
const auto midi = [&] { MidiBuffer b; b.addEvent ({}, blockSize / 2); return b; }();
AudioBuffer<float> audio (1, blockSize);
const auto processEvents = [&] (int start, int length)
{
MockSynthesiser synth;
synth.setMinimumRenderingSubdivisionSize (1, false);
synth.setCurrentPlaybackSampleRate (44100);
synth.renderNextBlock (audio, midi, start, length);
return synth.events;
};
{
const auto e = processEvents (0, blockSize);
expect (e.blocks.size() == 2);
expect (e.messages.size() == 1);
expect (std::is_sorted (e.blocks.begin(), e.blocks.end()));
expect (sumBlockLengths (e.blocks) == blockSize);
expect (e.order == std::vector<CallbackKind> { CallbackKind::process,
CallbackKind::midi,
CallbackKind::process });
}
}
beginTest ("Rendering subblocks processes only contained midi events");
{
const int blockSize = 512;
const auto midi = makeTestBuffer (blockSize);
AudioBuffer<float> audio (1, blockSize);
const auto processEvents = [&] (int start, int length)
{
MockSynthesiser synth;
synth.setMinimumRenderingSubdivisionSize (1, false);
synth.setCurrentPlaybackSampleRate (44100);
synth.renderNextBlock (audio, midi, start, length);
return synth.events;
};
{
const int subBlockLength = 0;
const auto e = processEvents (0, subBlockLength);
expect (e.blocks.size() == 0);
expect (e.messages.size() == 0);
expect (std::is_sorted (e.blocks.begin(), e.blocks.end()));
expect (sumBlockLengths (e.blocks) == subBlockLength);
}
{
const int subBlockLength = 0;
const auto e = processEvents (1, subBlockLength);
expect (e.blocks.size() == 0);
expect (e.messages.size() == 0);
expect (std::is_sorted (e.blocks.begin(), e.blocks.end()));
expect (sumBlockLengths (e.blocks) == subBlockLength);
}
{
const int subBlockLength = 1;
const auto e = processEvents (1, subBlockLength);
expect (e.blocks.size() == 1);
expect (e.messages.size() == 1);
expect (std::is_sorted (e.blocks.begin(), e.blocks.end()));
expect (sumBlockLengths (e.blocks) == subBlockLength);
expect (e.order == std::vector<CallbackKind> { CallbackKind::midi,
CallbackKind::process });
}
{
const auto e = processEvents (0, blockSize);
expect (e.blocks.size() == blockSize);
expect (e.messages.size() == blockSize);
expect (std::is_sorted (e.blocks.begin(), e.blocks.end()));
expect (sumBlockLengths (e.blocks) == blockSize);
expect (e.order.front() == CallbackKind::midi);
}
}
beginTest ("Subblocks respect their minimum size");
{
const int blockSize = 512;
const auto midi = makeTestBuffer (blockSize);
AudioBuffer<float> audio (1, blockSize);
const auto blockLengthsAreValid = [] (const std::vector<StartAndLength>& info, int minLength, bool strict)
{
if (info.size() <= 1)
return true;
const auto lengthIsValid = [&] (const StartAndLength& s) { return minLength <= s.length; };
const auto begin = strict ? info.begin() : std::next (info.begin());
// The final block is allowed to be shorter than the minLength
return std::all_of (begin, std::prev (info.end()), lengthIsValid);
};
for (auto strict : { false, true })
{
for (auto subblockSize : { 1, 16, 32, 64, 1024 })
{
MockSynthesiser synth;
synth.setMinimumRenderingSubdivisionSize (subblockSize, strict);
synth.setCurrentPlaybackSampleRate (44100);
synth.renderNextBlock (audio, midi, 0, blockSize);
const auto& e = synth.events;
expectWithinAbsoluteError (float (e.blocks.size()),
std::ceil ((float) blockSize / (float) subblockSize),
1.0f);
expect (e.messages.size() == blockSize);
expect (std::is_sorted (e.blocks.begin(), e.blocks.end()));
expect (sumBlockLengths (e.blocks) == blockSize);
expect (blockLengthsAreValid (e.blocks, subblockSize, strict));
}
}
{
MockSynthesiser synth;
synth.setMinimumRenderingSubdivisionSize (32, true);
synth.setCurrentPlaybackSampleRate (44100);
synth.renderNextBlock (audio, MidiBuffer{}, 0, 16);
expect (synth.events.blocks == std::vector<StartAndLength> { { 0, 16 } });
expect (synth.events.order == std::vector<CallbackKind> { CallbackKind::process });
expect (synth.events.messages.empty());
}
}
}
};
MpeSynthesiserBaseTests mpeSynthesiserBaseTests;
}
#endif
} // namespace juce

+ 0
- 216
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserBase.h View File

@@ -1,216 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Derive from this class to create a basic audio generator capable of MPE.
Implement the callbacks of MPEInstrument::Listener (noteAdded, notePressureChanged
etc.) to let your audio generator know that MPE notes were triggered, modulated,
or released. What to do inside them, and how that influences your audio generator,
is up to you!
This class uses an instance of MPEInstrument internally to handle the MPE
note state logic.
This class is a very low-level base class for an MPE instrument. If you need
something more sophisticated, have a look at MPESynthesiser. This class extends
MPESynthesiserBase by adding the concept of voices that can play notes,
a voice stealing algorithm, and much more.
@see MPESynthesiser, MPEInstrument
@tags{Audio}
*/
struct JUCE_API MPESynthesiserBase : public MPEInstrument::Listener
{
public:
//==============================================================================
/** Constructor. */
MPESynthesiserBase();
/** Constructor.
If you use this constructor, the synthesiser will use the provided instrument
object to handle the MPE note state logic.
This is useful if you want to use an instance of your own class derived
from MPEInstrument for the MPE logic.
*/
MPESynthesiserBase (MPEInstrument& instrument);
//==============================================================================
/** Returns the synthesiser's internal MPE zone layout.
This happens by value, to enforce thread-safety and class invariants.
*/
MPEZoneLayout getZoneLayout() const noexcept;
/** Re-sets the synthesiser's internal MPE zone layout to the one passed in.
As a side effect, this will discard all currently playing notes,
call noteReleased for all of them, and disable legacy mode (if previously enabled).
*/
void setZoneLayout (MPEZoneLayout newLayout);
//==============================================================================
/** Tells the synthesiser what the sample rate is for the audio it's being
used to render.
*/
virtual void setCurrentPlaybackSampleRate (double sampleRate);
/** Returns the current target sample rate at which rendering is being done.
Subclasses may need to know this so that they can pitch things correctly.
*/
double getSampleRate() const noexcept { return sampleRate; }
//==============================================================================
/** Creates the next block of audio output.
Call this to make sound. This will chop up the AudioBuffer into subBlock
pieces separated by events in the MIDI buffer, and then call
renderNextSubBlock on each one of them. In between you will get calls
to noteAdded/Changed/Finished, where you can update parameters that
depend on those notes to use for your audio rendering.
@param outputAudio Buffer into which audio will be rendered
@param inputMidi MIDI events to process
@param startSample The first sample to process in both buffers
@param numSamples The number of samples to process
*/
template <typename floatType>
void renderNextBlock (AudioBuffer<floatType>& outputAudio,
const MidiBuffer& inputMidi,
int startSample,
int numSamples);
//==============================================================================
/** Handle incoming MIDI events (called from renderNextBlock).
The default implementation provided here simply forwards everything
to MPEInstrument::processNextMidiEvent, where it is used to update the
MPE notes, zones etc. MIDI messages not relevant for MPE are ignored.
This method can be overridden if you need to do custom MIDI handling
on top of MPE. The MPESynthesiser class overrides this to implement
callbacks for MIDI program changes and non-MPE-related MIDI controller
messages.
*/
virtual void handleMidiEvent (const MidiMessage&);
//==============================================================================
/** Sets a minimum limit on the size to which audio sub-blocks will be divided when rendering.
When rendering, the audio blocks that are passed into renderNextBlock() will be split up
into smaller blocks that lie between all the incoming midi messages, and it is these smaller
sub-blocks that are rendered with multiple calls to renderVoices().
Obviously in a pathological case where there are midi messages on every sample, then
renderVoices() could be called once per sample and lead to poor performance, so this
setting allows you to set a lower limit on the block size.
The default setting is 32, which means that midi messages are accurate to about < 1ms
accuracy, which is probably fine for most purposes, but you may want to increase or
decrease this value for your synth.
If shouldBeStrict is true, the audio sub-blocks will strictly never be smaller than numSamples.
If shouldBeStrict is false (default), the first audio sub-block in the buffer is allowed
to be smaller, to make sure that the first MIDI event in a buffer will always be sample-accurate
(this can sometimes help to avoid quantisation or phasing issues).
*/
void setMinimumRenderingSubdivisionSize (int numSamples, bool shouldBeStrict = false) noexcept;
//==============================================================================
/** Puts the synthesiser into legacy mode.
@param pitchbendRange The note pitchbend range in semitones to use when in legacy mode.
Must be between 0 and 96, otherwise behaviour is undefined.
The default pitchbend range in legacy mode is +/- 2 semitones.
@param channelRange The range of MIDI channels to use for notes when in legacy mode.
The default is to use all MIDI channels (1-16).
To get out of legacy mode, set a new MPE zone layout using setZoneLayout.
*/
void enableLegacyMode (int pitchbendRange = 2,
Range<int> channelRange = Range<int> (1, 17));
/** Returns true if the instrument is in legacy mode, false otherwise. */
bool isLegacyModeEnabled() const noexcept;
/** Returns the range of MIDI channels (1-16) to be used for notes when in legacy mode. */
Range<int> getLegacyModeChannelRange() const noexcept;
/** Re-sets the range of MIDI channels (1-16) to be used for notes when in legacy mode. */
void setLegacyModeChannelRange (Range<int> channelRange);
/** Returns the pitchbend range in semitones (0-96) to be used for notes when in legacy mode. */
int getLegacyModePitchbendRange() const noexcept;
/** Re-sets the pitchbend range in semitones (0-96) to be used for notes when in legacy mode. */
void setLegacyModePitchbendRange (int pitchbendRange);
//==============================================================================
using TrackingMode = MPEInstrument::TrackingMode;
/** Set the MPE tracking mode for the pressure dimension. */
void setPressureTrackingMode (TrackingMode modeToUse);
/** Set the MPE tracking mode for the pitchbend dimension. */
void setPitchbendTrackingMode (TrackingMode modeToUse);
/** Set the MPE tracking mode for the timbre dimension. */
void setTimbreTrackingMode (TrackingMode modeToUse);
protected:
//==============================================================================
/** Implement this method to render your audio inside.
@see renderNextBlock
*/
virtual void renderNextSubBlock (AudioBuffer<float>& outputAudio,
int startSample,
int numSamples) = 0;
/** Implement this method if you want to render 64-bit audio as well;
otherwise leave blank.
*/
virtual void renderNextSubBlock (AudioBuffer<double>& /*outputAudio*/,
int /*startSample*/,
int /*numSamples*/) {}
protected:
//==============================================================================
/** @internal */
MPEInstrument& instrument;
private:
//==============================================================================
MPEInstrument defaultInstrument { MPEZone (MPEZone::Type::lower, 15) };
CriticalSection noteStateLock;
double sampleRate = 0.0;
int minimumSubBlockSize = 32;
bool subBlockSubdivisionIsStrict = false;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MPESynthesiserBase)
};
} // namespace juce

+ 0
- 50
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserVoice.cpp View File

@@ -1,50 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MPESynthesiserVoice::MPESynthesiserVoice()
{
}
MPESynthesiserVoice::~MPESynthesiserVoice()
{
}
//==============================================================================
bool MPESynthesiserVoice::isCurrentlyPlayingNote (MPENote note) const noexcept
{
return isActive() && currentlyPlayingNote.noteID == note.noteID;
}
bool MPESynthesiserVoice::isPlayingButReleased() const noexcept
{
return isActive() && currentlyPlayingNote.keyState == MPENote::off;
}
void MPESynthesiserVoice::clearCurrentNote() noexcept
{
currentlyPlayingNote = MPENote();
}
} // namespace juce

+ 0
- 191
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPESynthesiserVoice.h View File

@@ -1,191 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Represents an MPE voice that an MPESynthesiser can use to play a sound.
A voice plays a single sound at a time, and a synthesiser holds an array of
voices so that it can play polyphonically.
@see MPESynthesiser, MPENote
@tags{Audio}
*/
class JUCE_API MPESynthesiserVoice
{
public:
//==============================================================================
/** Constructor. */
MPESynthesiserVoice();
/** Destructor. */
virtual ~MPESynthesiserVoice();
/** Returns the MPENote that this voice is currently playing.
Returns an invalid MPENote if no note is playing
(you can check this using MPENote::isValid() or MPEVoice::isActive()).
*/
MPENote getCurrentlyPlayingNote() const noexcept { return currentlyPlayingNote; }
/** Returns true if the voice is currently playing the given MPENote
(as identified by the note's initial note number and MIDI channel).
*/
bool isCurrentlyPlayingNote (MPENote note) const noexcept;
/** Returns true if this voice is currently busy playing a sound.
By default this just checks whether getCurrentlyPlayingNote()
returns a valid MPE note, but can be overridden for more advanced checking.
*/
virtual bool isActive() const { return currentlyPlayingNote.isValid(); }
/** Returns true if a voice is sounding in its release phase. **/
bool isPlayingButReleased() const noexcept;
/** Called by the MPESynthesiser to let the voice know that a new note has started on it.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void noteStarted() = 0;
/** Called by the MPESynthesiser to let the voice know that its currently playing note has stopped.
This will be called during the rendering callback, so must be fast and thread-safe.
If allowTailOff is false or the voice doesn't want to tail-off, then it must stop all
sound immediately, and must call clearCurrentNote() to reset the state of this voice
and allow the synth to reassign it another sound.
If allowTailOff is true and the voice decides to do a tail-off, then it's allowed to
begin fading out its sound, and it can stop playing until it's finished. As soon as it
finishes playing (during the rendering callback), it must make sure that it calls
clearCurrentNote().
*/
virtual void noteStopped (bool allowTailOff) = 0;
/** Called by the MPESynthesiser to let the voice know that its currently playing note
has changed its pressure value.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void notePressureChanged() = 0;
/** Called by the MPESynthesiser to let the voice know that its currently playing note
has changed its pitchbend value.
This will be called during the rendering callback, so must be fast and thread-safe.
Note: You can call currentlyPlayingNote.getFrequencyInHertz() to find out the effective frequency
of the note, as a sum of the initial note number, the per-note pitchbend and the master pitchbend.
*/
virtual void notePitchbendChanged() = 0;
/** Called by the MPESynthesiser to let the voice know that its currently playing note
has changed its timbre value.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void noteTimbreChanged() = 0;
/** Called by the MPESynthesiser to let the voice know that its currently playing note
has changed its key state.
This typically happens when a sustain or sostenuto pedal is pressed or released (on
an MPE channel relevant for this note), or if the note key is lifted while the sustained
or sostenuto pedal is still held down.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void noteKeyStateChanged() = 0;
/** Renders the next block of data for this voice.
The output audio data must be added to the current contents of the buffer provided.
Only the region of the buffer between startSample and (startSample + numSamples)
should be altered by this method.
If the voice is currently silent, it should just return without doing anything.
If the sound that the voice is playing finishes during the course of this rendered
block, it must call clearCurrentNote(), to tell the synthesiser that it has finished.
The size of the blocks that are rendered can change each time it is called, and may
involve rendering as little as 1 sample at a time. In between rendering callbacks,
the voice's methods will be called to tell it about note and controller events.
*/
virtual void renderNextBlock (AudioBuffer<float>& outputBuffer,
int startSample,
int numSamples) = 0;
/** Renders the next block of 64-bit data for this voice.
Support for 64-bit audio is optional. You can choose to not override this method if
you don't need it (the default implementation simply does nothing).
*/
virtual void renderNextBlock (AudioBuffer<double>& /*outputBuffer*/,
int /*startSample*/,
int /*numSamples*/) {}
/** Changes the voice's reference sample rate.
The rate is set so that subclasses know the output rate and can set their pitch
accordingly.
This method is called by the synth, and subclasses can access the current rate with
the currentSampleRate member.
*/
virtual void setCurrentSampleRate (double newRate) { currentSampleRate = newRate; }
/** Returns the current target sample rate at which rendering is being done.
Subclasses may need to know this so that they can pitch things correctly.
*/
double getSampleRate() const noexcept { return currentSampleRate; }
/** This will be set to an incrementing counter value in MPESynthesiser::startVoice()
and can be used to determine the order in which voices started.
*/
uint32 noteOnTime = 0;
protected:
//==============================================================================
/** Resets the state of this voice after a sound has finished playing.
The subclass must call this when it finishes playing a note and becomes available
to play new ones.
It must either call it in the stopNote() method, or if the voice is tailing off,
then it should call it later during the renderNextBlock method, as soon as it
finishes its tail-off.
It can also be called at any time during the render callback if the sound happens
to have finished, e.g. if it's playing a sample and the sample finishes.
*/
void clearCurrentNote() noexcept;
//==============================================================================
double currentSampleRate = 0.0;
MPENote currentlyPlayingNote;
private:
//==============================================================================
friend class MPESynthesiser;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MPESynthesiserVoice)
};
} // namespace juce

+ 0
- 555
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEUtils.cpp View File

@@ -1,555 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MPEChannelAssigner::MPEChannelAssigner (MPEZoneLayout::Zone zoneToUse)
: zone (new MPEZoneLayout::Zone (zoneToUse)),
channelIncrement (zone->isLowerZone() ? 1 : -1),
numChannels (zone->numMemberChannels),
firstChannel (zone->getFirstMemberChannel()),
lastChannel (zone->getLastMemberChannel()),
midiChannelLastAssigned (firstChannel - channelIncrement)
{
// must be an active MPE zone!
jassert (numChannels > 0);
}
MPEChannelAssigner::MPEChannelAssigner (Range<int> channelRange)
: isLegacy (true),
channelIncrement (1),
numChannels (channelRange.getLength()),
firstChannel (channelRange.getStart()),
lastChannel (channelRange.getEnd() - 1),
midiChannelLastAssigned (firstChannel - channelIncrement)
{
// must have at least one channel!
jassert (! channelRange.isEmpty());
}
int MPEChannelAssigner::findMidiChannelForNewNote (int noteNumber) noexcept
{
if (numChannels <= 1)
return firstChannel;
for (int ch = firstChannel; (isLegacy || zone->isLowerZone() ? ch <= lastChannel : ch >= lastChannel); ch += channelIncrement)
{
if (midiChannels[(size_t) ch].isFree() && midiChannels[(size_t) ch].lastNotePlayed == noteNumber)
{
midiChannelLastAssigned = ch;
midiChannels[(size_t) ch].notes.add (noteNumber);
return ch;
}
}
for (int ch = midiChannelLastAssigned + channelIncrement; ; ch += channelIncrement)
{
if (ch == lastChannel + channelIncrement) // loop wrap-around
ch = firstChannel;
if (midiChannels[(size_t) ch].isFree())
{
midiChannelLastAssigned = ch;
midiChannels[(size_t) ch].notes.add (noteNumber);
return ch;
}
if (ch == midiChannelLastAssigned)
break; // no free channels!
}
midiChannelLastAssigned = findMidiChannelPlayingClosestNonequalNote (noteNumber);
midiChannels[(size_t) midiChannelLastAssigned].notes.add (noteNumber);
return midiChannelLastAssigned;
}
int MPEChannelAssigner::findMidiChannelForExistingNote (int noteNumber) noexcept
{
const auto iter = std::find_if (midiChannels.cbegin(), midiChannels.cend(), [&] (auto& ch)
{
return std::find (ch.notes.begin(), ch.notes.end(), noteNumber) != ch.notes.end();
});
return iter != midiChannels.cend() ? (int) std::distance (midiChannels.cbegin(), iter) : -1;
}
void MPEChannelAssigner::noteOff (int noteNumber, int midiChannel)
{
const auto removeNote = [] (MidiChannel& ch, int noteNum)
{
if (ch.notes.removeAllInstancesOf (noteNum) > 0)
{
ch.lastNotePlayed = noteNum;
return true;
}
return false;
};
if (midiChannel >= 0 && midiChannel <= 16)
{
removeNote (midiChannels[(size_t) midiChannel], noteNumber);
return;
}
for (auto& ch : midiChannels)
{
if (removeNote (ch, noteNumber))
return;
}
}
void MPEChannelAssigner::allNotesOff()
{
for (auto& ch : midiChannels)
{
if (ch.notes.size() > 0)
ch.lastNotePlayed = ch.notes.getLast();
ch.notes.clear();
}
}
int MPEChannelAssigner::findMidiChannelPlayingClosestNonequalNote (int noteNumber) noexcept
{
auto channelWithClosestNote = firstChannel;
int closestNoteDistance = 127;
for (int ch = firstChannel; (isLegacy || zone->isLowerZone() ? ch <= lastChannel : ch >= lastChannel); ch += channelIncrement)
{
for (auto note : midiChannels[(size_t) ch].notes)
{
auto noteDistance = std::abs (note - noteNumber);
if (noteDistance > 0 && noteDistance < closestNoteDistance)
{
closestNoteDistance = noteDistance;
channelWithClosestNote = ch;
}
}
}
return channelWithClosestNote;
}
//==============================================================================
MPEChannelRemapper::MPEChannelRemapper (MPEZoneLayout::Zone zoneToRemap)
: zone (zoneToRemap),
channelIncrement (zone.isLowerZone() ? 1 : -1),
firstChannel (zone.getFirstMemberChannel()),
lastChannel (zone.getLastMemberChannel())
{
// must be an active MPE zone!
jassert (zone.numMemberChannels > 0);
zeroArrays();
}
void MPEChannelRemapper::remapMidiChannelIfNeeded (MidiMessage& message, uint32 mpeSourceID) noexcept
{
auto channel = message.getChannel();
if (! zone.isUsingChannelAsMemberChannel (channel))
return;
if (channel == zone.getMasterChannel() && (message.isResetAllControllers() || message.isAllNotesOff()))
{
clearSource (mpeSourceID);
return;
}
auto sourceAndChannelID = (((uint32) mpeSourceID << 5) | (uint32) (channel));
if (messageIsNoteData (message))
{
++counter;
// fast path - no remap
if (applyRemapIfExisting (channel, sourceAndChannelID, message))
return;
// find existing remap
for (int chan = firstChannel; (zone.isLowerZone() ? chan <= lastChannel : chan >= lastChannel); chan += channelIncrement)
if (applyRemapIfExisting (chan, sourceAndChannelID, message))
return;
// no remap necessary
if (sourceAndChannel[channel] == notMPE)
{
lastUsed[channel] = counter;
sourceAndChannel[channel] = sourceAndChannelID;
return;
}
// remap source & channel to new channel
auto chan = getBestChanToReuse();
sourceAndChannel[chan] = sourceAndChannelID;
lastUsed[chan] = counter;
message.setChannel (chan);
}
}
void MPEChannelRemapper::reset() noexcept
{
for (auto& s : sourceAndChannel)
s = notMPE;
}
void MPEChannelRemapper::clearChannel (int channel) noexcept
{
sourceAndChannel[channel] = notMPE;
}
void MPEChannelRemapper::clearSource (uint32 mpeSourceID)
{
for (auto& s : sourceAndChannel)
{
if (uint32 (s >> 5) == mpeSourceID)
{
s = notMPE;
return;
}
}
}
bool MPEChannelRemapper::applyRemapIfExisting (int channel, uint32 sourceAndChannelID, MidiMessage& m) noexcept
{
if (sourceAndChannel[channel] == sourceAndChannelID)
{
if (m.isNoteOff())
sourceAndChannel[channel] = notMPE;
else
lastUsed[channel] = counter;
m.setChannel (channel);
return true;
}
return false;
}
int MPEChannelRemapper::getBestChanToReuse() const noexcept
{
for (int chan = firstChannel; (zone.isLowerZone() ? chan <= lastChannel : chan >= lastChannel); chan += channelIncrement)
if (sourceAndChannel[chan] == notMPE)
return chan;
auto bestChan = firstChannel;
auto bestLastUse = counter;
for (int chan = firstChannel; (zone.isLowerZone() ? chan <= lastChannel : chan >= lastChannel); chan += channelIncrement)
{
if (lastUsed[chan] < bestLastUse)
{
bestLastUse = lastUsed[chan];
bestChan = chan;
}
}
return bestChan;
}
void MPEChannelRemapper::zeroArrays()
{
for (int i = 0; i < 17; ++i)
{
sourceAndChannel[i] = 0;
lastUsed[i] = 0;
}
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
struct MPEUtilsUnitTests : public UnitTest
{
MPEUtilsUnitTests()
: UnitTest ("MPE Utilities", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("MPEChannelAssigner");
{
MPEZoneLayout layout;
// lower
{
layout.setLowerZone (15);
// lower zone
MPEChannelAssigner channelAssigner (layout.getLowerZone());
// check that channels are assigned in correct order
int noteNum = 60;
for (int ch = 2; ch <= 16; ++ch)
{
expectEquals (channelAssigner.findMidiChannelForNewNote (noteNum), ch);
expectEquals (channelAssigner.findMidiChannelForExistingNote (noteNum), ch);
++noteNum;
}
// check that note-offs are processed
channelAssigner.noteOff (60);
expectEquals (channelAssigner.findMidiChannelForNewNote (60), 2);
expectEquals (channelAssigner.findMidiChannelForExistingNote (60), 2);
channelAssigner.noteOff (61);
expectEquals (channelAssigner.findMidiChannelForNewNote (61), 3);
expectEquals (channelAssigner.findMidiChannelForExistingNote (61), 3);
// check that assigned channel was last to play note
channelAssigner.noteOff (65);
channelAssigner.noteOff (66);
expectEquals (channelAssigner.findMidiChannelForNewNote (66), 8);
expectEquals (channelAssigner.findMidiChannelForNewNote (65), 7);
expectEquals (channelAssigner.findMidiChannelForExistingNote (66), 8);
expectEquals (channelAssigner.findMidiChannelForExistingNote (65), 7);
// find closest channel playing nonequal note
expectEquals (channelAssigner.findMidiChannelForNewNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForNewNote (55), 2);
expectEquals (channelAssigner.findMidiChannelForExistingNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForExistingNote (55), 2);
// all notes off
channelAssigner.allNotesOff();
// last note played
expectEquals (channelAssigner.findMidiChannelForNewNote (66), 8);
expectEquals (channelAssigner.findMidiChannelForNewNote (65), 7);
expectEquals (channelAssigner.findMidiChannelForNewNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForNewNote (55), 2);
expectEquals (channelAssigner.findMidiChannelForExistingNote (66), 8);
expectEquals (channelAssigner.findMidiChannelForExistingNote (65), 7);
expectEquals (channelAssigner.findMidiChannelForExistingNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForExistingNote (55), 2);
// normal assignment
expectEquals (channelAssigner.findMidiChannelForNewNote (101), 3);
expectEquals (channelAssigner.findMidiChannelForNewNote (20), 4);
expectEquals (channelAssigner.findMidiChannelForExistingNote (101), 3);
expectEquals (channelAssigner.findMidiChannelForExistingNote (20), 4);
}
// upper
{
layout.setUpperZone (15);
// upper zone
MPEChannelAssigner channelAssigner (layout.getUpperZone());
// check that channels are assigned in correct order
int noteNum = 60;
for (int ch = 15; ch >= 1; --ch)
{
expectEquals (channelAssigner.findMidiChannelForNewNote (noteNum), ch);
expectEquals (channelAssigner.findMidiChannelForExistingNote (noteNum), ch);
++noteNum;
}
// check that note-offs are processed
channelAssigner.noteOff (60);
expectEquals (channelAssigner.findMidiChannelForNewNote (60), 15);
expectEquals (channelAssigner.findMidiChannelForExistingNote (60), 15);
channelAssigner.noteOff (61);
expectEquals (channelAssigner.findMidiChannelForNewNote (61), 14);
expectEquals (channelAssigner.findMidiChannelForExistingNote (61), 14);
// check that assigned channel was last to play note
channelAssigner.noteOff (65);
channelAssigner.noteOff (66);
expectEquals (channelAssigner.findMidiChannelForNewNote (66), 9);
expectEquals (channelAssigner.findMidiChannelForNewNote (65), 10);
expectEquals (channelAssigner.findMidiChannelForExistingNote (66), 9);
expectEquals (channelAssigner.findMidiChannelForExistingNote (65), 10);
// find closest channel playing nonequal note
expectEquals (channelAssigner.findMidiChannelForNewNote (80), 1);
expectEquals (channelAssigner.findMidiChannelForNewNote (55), 15);
expectEquals (channelAssigner.findMidiChannelForExistingNote (80), 1);
expectEquals (channelAssigner.findMidiChannelForExistingNote (55), 15);
// all notes off
channelAssigner.allNotesOff();
// last note played
expectEquals (channelAssigner.findMidiChannelForNewNote (66), 9);
expectEquals (channelAssigner.findMidiChannelForNewNote (65), 10);
expectEquals (channelAssigner.findMidiChannelForNewNote (80), 1);
expectEquals (channelAssigner.findMidiChannelForNewNote (55), 15);
expectEquals (channelAssigner.findMidiChannelForExistingNote (66), 9);
expectEquals (channelAssigner.findMidiChannelForExistingNote (65), 10);
expectEquals (channelAssigner.findMidiChannelForExistingNote (80), 1);
expectEquals (channelAssigner.findMidiChannelForExistingNote (55), 15);
// normal assignment
expectEquals (channelAssigner.findMidiChannelForNewNote (101), 14);
expectEquals (channelAssigner.findMidiChannelForNewNote (20), 13);
expectEquals (channelAssigner.findMidiChannelForExistingNote (101), 14);
expectEquals (channelAssigner.findMidiChannelForExistingNote (20), 13);
}
// legacy
{
MPEChannelAssigner channelAssigner;
// check that channels are assigned in correct order
int noteNum = 60;
for (int ch = 1; ch <= 16; ++ch)
{
expectEquals (channelAssigner.findMidiChannelForNewNote (noteNum), ch);
expectEquals (channelAssigner.findMidiChannelForExistingNote (noteNum), ch);
++noteNum;
}
// check that note-offs are processed
channelAssigner.noteOff (60);
expectEquals (channelAssigner.findMidiChannelForNewNote (60), 1);
expectEquals (channelAssigner.findMidiChannelForExistingNote (60), 1);
channelAssigner.noteOff (61);
expectEquals (channelAssigner.findMidiChannelForNewNote (61), 2);
expectEquals (channelAssigner.findMidiChannelForExistingNote (61), 2);
// check that assigned channel was last to play note
channelAssigner.noteOff (65);
channelAssigner.noteOff (66);
expectEquals (channelAssigner.findMidiChannelForNewNote (66), 7);
expectEquals (channelAssigner.findMidiChannelForNewNote (65), 6);
expectEquals (channelAssigner.findMidiChannelForExistingNote (66), 7);
expectEquals (channelAssigner.findMidiChannelForExistingNote (65), 6);
// find closest channel playing nonequal note
expectEquals (channelAssigner.findMidiChannelForNewNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForNewNote (55), 1);
expectEquals (channelAssigner.findMidiChannelForExistingNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForExistingNote (55), 1);
// all notes off
channelAssigner.allNotesOff();
// last note played
expectEquals (channelAssigner.findMidiChannelForNewNote (66), 7);
expectEquals (channelAssigner.findMidiChannelForNewNote (65), 6);
expectEquals (channelAssigner.findMidiChannelForNewNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForNewNote (55), 1);
expectEquals (channelAssigner.findMidiChannelForExistingNote (66), 7);
expectEquals (channelAssigner.findMidiChannelForExistingNote (65), 6);
expectEquals (channelAssigner.findMidiChannelForExistingNote (80), 16);
expectEquals (channelAssigner.findMidiChannelForExistingNote (55), 1);
// normal assignment
expectEquals (channelAssigner.findMidiChannelForNewNote (101), 2);
expectEquals (channelAssigner.findMidiChannelForNewNote (20), 3);
expectEquals (channelAssigner.findMidiChannelForExistingNote (101), 2);
expectEquals (channelAssigner.findMidiChannelForExistingNote (20), 3);
}
}
beginTest ("MPEChannelRemapper");
{
// 3 different MPE 'sources', constant IDs
const int sourceID1 = 0;
const int sourceID2 = 1;
const int sourceID3 = 2;
MPEZoneLayout layout;
{
layout.setLowerZone (15);
// lower zone
MPEChannelRemapper channelRemapper (layout.getLowerZone());
// first source, shouldn't remap
for (int ch = 2; ch <= 16; ++ch)
{
auto noteOn = MidiMessage::noteOn (ch, 60, 1.0f);
channelRemapper.remapMidiChannelIfNeeded (noteOn, sourceID1);
expectEquals (noteOn.getChannel(), ch);
}
auto noteOn = MidiMessage::noteOn (2, 60, 1.0f);
// remap onto oldest last-used channel
channelRemapper.remapMidiChannelIfNeeded (noteOn, sourceID2);
expectEquals (noteOn.getChannel(), 2);
// remap onto oldest last-used channel
channelRemapper.remapMidiChannelIfNeeded (noteOn, sourceID3);
expectEquals (noteOn.getChannel(), 3);
// remap to correct channel for source ID
auto noteOff = MidiMessage::noteOff (2, 60, 1.0f);
channelRemapper.remapMidiChannelIfNeeded (noteOff, sourceID3);
expectEquals (noteOff.getChannel(), 3);
}
{
layout.setUpperZone (15);
// upper zone
MPEChannelRemapper channelRemapper (layout.getUpperZone());
// first source, shouldn't remap
for (int ch = 15; ch >= 1; --ch)
{
auto noteOn = MidiMessage::noteOn (ch, 60, 1.0f);
channelRemapper.remapMidiChannelIfNeeded (noteOn, sourceID1);
expectEquals (noteOn.getChannel(), ch);
}
auto noteOn = MidiMessage::noteOn (15, 60, 1.0f);
// remap onto oldest last-used channel
channelRemapper.remapMidiChannelIfNeeded (noteOn, sourceID2);
expectEquals (noteOn.getChannel(), 15);
// remap onto oldest last-used channel
channelRemapper.remapMidiChannelIfNeeded (noteOn, sourceID3);
expectEquals (noteOn.getChannel(), 14);
// remap to correct channel for source ID
auto noteOff = MidiMessage::noteOff (15, 60, 1.0f);
channelRemapper.remapMidiChannelIfNeeded (noteOff, sourceID3);
expectEquals (noteOff.getChannel(), 14);
}
}
}
};
static MPEUtilsUnitTests MPEUtilsUnitTests;
#endif
} // namespace juce

+ 0
- 158
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEUtils.h View File

@@ -1,158 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This class handles the assignment of new MIDI notes to member channels of an active
MPE zone.
To use it, create an instance passing in the MPE zone that it should operate on
and then call use the findMidiChannelForNewNote() method for all note-on messages
and the noteOff() method for all note-off messages.
@tags{Audio}
*/
class MPEChannelAssigner
{
public:
/** Constructor.
This will assign channels within the range of the specified MPE zone.
*/
MPEChannelAssigner (MPEZoneLayout::Zone zoneToUse);
/** Legacy mode constructor.
This will assign channels within the specified range.
*/
MPEChannelAssigner (Range<int> channelRange = Range<int> (1, 17));
/** This method will use a set of rules recommended in the MPE specification to
determine which member channel the specified MIDI note should be assigned to
and will return this channel number.
The rules have the following precedence:
- find a free channel on which the last note played was the same as the one specified
- find the next free channel in round-robin assignment
- find the channel number that is currently playing the closest note (but not the same)
@param noteNumber the MIDI note number to be assigned to a channel
@returns the zone's member channel that this note should be assigned to
*/
int findMidiChannelForNewNote (int noteNumber) noexcept;
/** If a note has been added using findMidiChannelForNewNote() this will return the channel
to which it was assigned, otherwise it will return -1.
*/
int findMidiChannelForExistingNote (int initialNoteOnNumber) noexcept;
/** You must call this method for all note-offs that you receive so that this class
can keep track of the currently playing notes internally.
You can specify the channel number the note off happened on. If you don't, it will
look through all channels to find the registered midi note matching the given note number.
*/
void noteOff (int noteNumber, int midiChannel = -1);
/** Call this to clear all currently playing notes. */
void allNotesOff();
private:
bool isLegacy = false;
std::unique_ptr<MPEZoneLayout::Zone> zone;
int channelIncrement, numChannels, firstChannel, lastChannel, midiChannelLastAssigned;
//==============================================================================
struct MidiChannel
{
Array<int> notes;
int lastNotePlayed = -1;
bool isFree() const noexcept { return notes.isEmpty(); }
};
std::array<MidiChannel, 17> midiChannels;
//==============================================================================
int findMidiChannelPlayingClosestNonequalNote (int noteNumber) noexcept;
};
//==============================================================================
/**
This class handles the logic for remapping MIDI note messages from multiple MPE
sources onto a specified MPE zone.
@tags{Audio}
*/
class MPEChannelRemapper
{
public:
/** Used to indicate that a particular source & channel combination is not currently using MPE. */
static const uint32 notMPE = 0;
/** Constructor */
MPEChannelRemapper (MPEZoneLayout::Zone zoneToRemap);
//==============================================================================
/** Remaps the MIDI channel of the specified MIDI message (if necessary).
Note that the MidiMessage object passed in will have it's channel changed if it
needs to be remapped.
@param message the message to be remapped
@param mpeSourceID the ID of the MPE source of the message. This is up to the
user to define and keep constant
*/
void remapMidiChannelIfNeeded (MidiMessage& message, uint32 mpeSourceID) noexcept;
//==============================================================================
/** Resets all the source & channel combinations. */
void reset() noexcept;
/** Clears a specified channel of this MPE zone. */
void clearChannel (int channel) noexcept;
/** Clears all channels in use by a specified source. */
void clearSource (uint32 mpeSourceID);
private:
MPEZoneLayout::Zone zone;
int channelIncrement;
int firstChannel, lastChannel;
uint32 sourceAndChannel[17];
uint32 lastUsed[17];
uint32 counter = 0;
//==============================================================================
bool applyRemapIfExisting (int channel, uint32 sourceAndChannelID, MidiMessage& m) noexcept;
int getBestChanToReuse() const noexcept;
void zeroArrays();
//==============================================================================
bool messageIsNoteData (const MidiMessage& m) { return (*m.getRawData() & 0xf0) != 0xf0; }
};
} // namespace juce

+ 0
- 193
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEValue.cpp View File

@@ -1,193 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MPEValue::MPEValue() noexcept {}
MPEValue::MPEValue (int value) : normalisedValue (value) {}
//==============================================================================
MPEValue MPEValue::from7BitInt (int value) noexcept
{
jassert (value >= 0 && value <= 127);
auto valueAs14Bit = value <= 64 ? value << 7
: int (jmap<float> (float (value - 64), 0.0f, 63.0f, 0.0f, 8191.0f)) + 8192;
return { valueAs14Bit };
}
MPEValue MPEValue::from14BitInt (int value) noexcept
{
jassert (value >= 0 && value <= 16383);
return { value };
}
MPEValue MPEValue::fromUnsignedFloat (float value) noexcept
{
jassert (0.0f <= value && value <= 1.0f);
return { roundToInt (value * 16383.0f) };
}
MPEValue MPEValue::fromSignedFloat (float value) noexcept
{
jassert (-1.0f <= value && value <= 1.0f);
return { roundToInt (((value + 1.0f) * 16383.0f) / 2.0f) };
}
//==============================================================================
MPEValue MPEValue::minValue() noexcept { return MPEValue::from7BitInt (0); }
MPEValue MPEValue::centreValue() noexcept { return MPEValue::from7BitInt (64); }
MPEValue MPEValue::maxValue() noexcept { return MPEValue::from7BitInt (127); }
int MPEValue::as7BitInt() const noexcept
{
return normalisedValue >> 7;
}
int MPEValue::as14BitInt() const noexcept
{
return normalisedValue;
}
//==============================================================================
float MPEValue::asSignedFloat() const noexcept
{
return (normalisedValue < 8192)
? jmap<float> (float (normalisedValue), 0.0f, 8192.0f, -1.0f, 0.0f)
: jmap<float> (float (normalisedValue), 8192.0f, 16383.0f, 0.0f, 1.0f);
}
float MPEValue::asUnsignedFloat() const noexcept
{
return jmap<float> (float (normalisedValue), 0.0f, 16383.0f, 0.0f, 1.0f);
}
//==============================================================================
bool MPEValue::operator== (const MPEValue& other) const noexcept
{
return normalisedValue == other.normalisedValue;
}
bool MPEValue::operator!= (const MPEValue& other) const noexcept
{
return ! operator== (other);
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class MPEValueTests : public UnitTest
{
public:
MPEValueTests()
: UnitTest ("MPEValue class", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("comparison operator");
{
MPEValue value1 = MPEValue::from7BitInt (7);
MPEValue value2 = MPEValue::from7BitInt (7);
MPEValue value3 = MPEValue::from7BitInt (8);
expect (value1 == value1);
expect (value1 == value2);
expect (value1 != value3);
}
beginTest ("special values");
{
expectEquals (MPEValue::minValue().as7BitInt(), 0);
expectEquals (MPEValue::minValue().as14BitInt(), 0);
expectEquals (MPEValue::centreValue().as7BitInt(), 64);
expectEquals (MPEValue::centreValue().as14BitInt(), 8192);
expectEquals (MPEValue::maxValue().as7BitInt(), 127);
expectEquals (MPEValue::maxValue().as14BitInt(), 16383);
}
beginTest ("zero/minimum value");
{
expectValuesConsistent (MPEValue::from7BitInt (0), 0, 0, -1.0f, 0.0f);
expectValuesConsistent (MPEValue::from14BitInt (0), 0, 0, -1.0f, 0.0f);
expectValuesConsistent (MPEValue::fromUnsignedFloat (0.0f), 0, 0, -1.0f, 0.0f);
expectValuesConsistent (MPEValue::fromSignedFloat (-1.0f), 0, 0, -1.0f, 0.0f);
}
beginTest ("maximum value");
{
expectValuesConsistent (MPEValue::from7BitInt (127), 127, 16383, 1.0f, 1.0f);
expectValuesConsistent (MPEValue::from14BitInt (16383), 127, 16383, 1.0f, 1.0f);
expectValuesConsistent (MPEValue::fromUnsignedFloat (1.0f), 127, 16383, 1.0f, 1.0f);
expectValuesConsistent (MPEValue::fromSignedFloat (1.0f), 127, 16383, 1.0f, 1.0f);
}
beginTest ("centre value");
{
expectValuesConsistent (MPEValue::from7BitInt (64), 64, 8192, 0.0f, 0.5f);
expectValuesConsistent (MPEValue::from14BitInt (8192), 64, 8192, 0.0f, 0.5f);
expectValuesConsistent (MPEValue::fromUnsignedFloat (0.5f), 64, 8192, 0.0f, 0.5f);
expectValuesConsistent (MPEValue::fromSignedFloat (0.0f), 64, 8192, 0.0f, 0.5f);
}
beginTest ("value halfway between min and centre");
{
expectValuesConsistent (MPEValue::from7BitInt (32), 32, 4096, -0.5f, 0.25f);
expectValuesConsistent (MPEValue::from14BitInt (4096), 32, 4096, -0.5f, 0.25f);
expectValuesConsistent (MPEValue::fromUnsignedFloat (0.25f), 32, 4096, -0.5f, 0.25f);
expectValuesConsistent (MPEValue::fromSignedFloat (-0.5f), 32, 4096, -0.5f, 0.25f);
}
}
private:
//==============================================================================
void expectValuesConsistent (MPEValue value,
int expectedValueAs7BitInt,
int expectedValueAs14BitInt,
float expectedValueAsSignedFloat,
float expectedValueAsUnsignedFloat)
{
expectEquals (value.as7BitInt(), expectedValueAs7BitInt);
expectEquals (value.as14BitInt(), expectedValueAs14BitInt);
expectFloatWithinRelativeError (value.asSignedFloat(), expectedValueAsSignedFloat, 0.0001f);
expectFloatWithinRelativeError (value.asUnsignedFloat(), expectedValueAsUnsignedFloat, 0.0001f);
}
//==============================================================================
void expectFloatWithinRelativeError (float actualValue, float expectedValue, float maxRelativeError)
{
const float maxAbsoluteError = jmax (1.0f, std::abs (expectedValue)) * maxRelativeError;
expect (std::abs (expectedValue - actualValue) < maxAbsoluteError);
}
};
static MPEValueTests MPEValueUnitTests;
#endif
} // namespace juce

+ 0
- 103
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEValue.h View File

@@ -1,103 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This class represents a single value for any of the MPE
dimensions of control. It supports values with 7-bit or 14-bit resolutions
(corresponding to 1 or 2 MIDI bytes, respectively). It also offers helper
functions to query the value in a variety of representations that can be
useful in an audio or MIDI context.
@tags{Audio}
*/
class JUCE_API MPEValue
{
public:
//==============================================================================
/** Default constructor.
Constructs an MPEValue corresponding to the centre value.
*/
MPEValue() noexcept;
/** Constructs an MPEValue from an integer between 0 and 127
(using 7-bit precision).
*/
static MPEValue from7BitInt (int value) noexcept;
/** Constructs an MPEValue from an integer between 0 and 16383
(using 14-bit precision).
*/
static MPEValue from14BitInt (int value) noexcept;
/** Constructs an MPEValue from a float between 0.0f and 1.0f. */
static MPEValue fromUnsignedFloat (float value) noexcept;
/** Constructs an MPEValue from a float between -1.0f and 1.0f. */
static MPEValue fromSignedFloat (float value) noexcept;
/** Constructs an MPEValue corresponding to the centre value. */
static MPEValue centreValue() noexcept;
/** Constructs an MPEValue corresponding to the minimum value. */
static MPEValue minValue() noexcept;
/** Constructs an MPEValue corresponding to the maximum value. */
static MPEValue maxValue() noexcept;
/** Retrieves the current value as an integer between 0 and 127.
Information will be lost if the value was initialised with a precision
higher than 7-bit.
*/
int as7BitInt() const noexcept;
/** Retrieves the current value as an integer between 0 and 16383.
Resolution will be lost if the value was initialised with a precision
higher than 14-bit.
*/
int as14BitInt() const noexcept;
/** Retrieves the current value mapped to a float between -1.0f and 1.0f. */
float asSignedFloat() const noexcept;
/** Retrieves the current value mapped to a float between 0.0f and 1.0f. */
float asUnsignedFloat() const noexcept;
/** Returns true if two values are equal. */
bool operator== (const MPEValue& other) const noexcept;
/** Returns true if two values are not equal. */
bool operator!= (const MPEValue& other) const noexcept;
private:
//==============================================================================
MPEValue (int normalisedValue);
int normalisedValue = 8192;
};
} // namespace juce

+ 0
- 396
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.cpp View File

@@ -1,396 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MPEZoneLayout::MPEZoneLayout (MPEZone lower, MPEZone upper)
: lowerZone (lower), upperZone (upper)
{
}
MPEZoneLayout::MPEZoneLayout (MPEZone zone)
: lowerZone (zone.isLowerZone() ? zone : MPEZone()),
upperZone (! zone.isLowerZone() ? zone : MPEZone())
{
}
MPEZoneLayout::MPEZoneLayout (const MPEZoneLayout& other)
: lowerZone (other.lowerZone),
upperZone (other.upperZone)
{
}
MPEZoneLayout& MPEZoneLayout::operator= (const MPEZoneLayout& other)
{
lowerZone = other.lowerZone;
upperZone = other.upperZone;
sendLayoutChangeMessage();
return *this;
}
void MPEZoneLayout::sendLayoutChangeMessage()
{
listeners.call ([this] (Listener& l) { l.zoneLayoutChanged (*this); });
}
//==============================================================================
void MPEZoneLayout::setZone (bool isLower, int numMemberChannels, int perNotePitchbendRange, int masterPitchbendRange) noexcept
{
checkAndLimitZoneParameters (0, 15, numMemberChannels);
checkAndLimitZoneParameters (0, 96, perNotePitchbendRange);
checkAndLimitZoneParameters (0, 96, masterPitchbendRange);
if (isLower)
lowerZone = { MPEZone::Type::lower, numMemberChannels, perNotePitchbendRange, masterPitchbendRange };
else
upperZone = { MPEZone::Type::upper, numMemberChannels, perNotePitchbendRange, masterPitchbendRange };
if (numMemberChannels > 0)
{
auto totalChannels = lowerZone.numMemberChannels + upperZone.numMemberChannels;
if (totalChannels >= 15)
{
if (isLower)
upperZone.numMemberChannels = 14 - numMemberChannels;
else
lowerZone.numMemberChannels = 14 - numMemberChannels;
}
}
sendLayoutChangeMessage();
}
void MPEZoneLayout::setLowerZone (int numMemberChannels, int perNotePitchbendRange, int masterPitchbendRange) noexcept
{
setZone (true, numMemberChannels, perNotePitchbendRange, masterPitchbendRange);
}
void MPEZoneLayout::setUpperZone (int numMemberChannels, int perNotePitchbendRange, int masterPitchbendRange) noexcept
{
setZone (false, numMemberChannels, perNotePitchbendRange, masterPitchbendRange);
}
void MPEZoneLayout::clearAllZones()
{
lowerZone = { MPEZone::Type::lower, 0 };
upperZone = { MPEZone::Type::upper, 0 };
sendLayoutChangeMessage();
}
//==============================================================================
void MPEZoneLayout::processNextMidiEvent (const MidiMessage& message)
{
if (! message.isController())
return;
MidiRPNMessage rpn;
if (rpnDetector.parseControllerMessage (message.getChannel(),
message.getControllerNumber(),
message.getControllerValue(),
rpn))
{
processRpnMessage (rpn);
}
}
void MPEZoneLayout::processRpnMessage (MidiRPNMessage rpn)
{
if (rpn.parameterNumber == MPEMessages::zoneLayoutMessagesRpnNumber)
processZoneLayoutRpnMessage (rpn);
else if (rpn.parameterNumber == 0)
processPitchbendRangeRpnMessage (rpn);
}
void MPEZoneLayout::processZoneLayoutRpnMessage (MidiRPNMessage rpn)
{
if (rpn.value < 16)
{
if (rpn.channel == 1)
setLowerZone (rpn.value);
else if (rpn.channel == 16)
setUpperZone (rpn.value);
}
}
void MPEZoneLayout::updateMasterPitchbend (MPEZone& zone, int value)
{
if (zone.masterPitchbendRange != value)
{
checkAndLimitZoneParameters (0, 96, zone.masterPitchbendRange);
zone.masterPitchbendRange = value;
sendLayoutChangeMessage();
}
}
void MPEZoneLayout::updatePerNotePitchbendRange (MPEZone& zone, int value)
{
if (zone.perNotePitchbendRange != value)
{
checkAndLimitZoneParameters (0, 96, zone.perNotePitchbendRange);
zone.perNotePitchbendRange = value;
sendLayoutChangeMessage();
}
}
void MPEZoneLayout::processPitchbendRangeRpnMessage (MidiRPNMessage rpn)
{
if (rpn.channel == 1)
{
updateMasterPitchbend (lowerZone, rpn.value);
}
else if (rpn.channel == 16)
{
updateMasterPitchbend (upperZone, rpn.value);
}
else
{
if (lowerZone.isUsingChannelAsMemberChannel (rpn.channel))
updatePerNotePitchbendRange (lowerZone, rpn.value);
else if (upperZone.isUsingChannelAsMemberChannel (rpn.channel))
updatePerNotePitchbendRange (upperZone, rpn.value);
}
}
void MPEZoneLayout::processNextMidiBuffer (const MidiBuffer& buffer)
{
for (const auto metadata : buffer)
processNextMidiEvent (metadata.getMessage());
}
//==============================================================================
void MPEZoneLayout::addListener (Listener* const listenerToAdd) noexcept
{
listeners.add (listenerToAdd);
}
void MPEZoneLayout::removeListener (Listener* const listenerToRemove) noexcept
{
listeners.remove (listenerToRemove);
}
//==============================================================================
void MPEZoneLayout::checkAndLimitZoneParameters (int minValue, int maxValue,
int& valueToCheckAndLimit) noexcept
{
if (valueToCheckAndLimit < minValue || valueToCheckAndLimit > maxValue)
{
// if you hit this, one of the parameters you supplied for this zone
// was not within the allowed range!
// we fit this back into the allowed range here to maintain a valid
// state for the zone, but probably the resulting zone is not what you
// wanted it to be!
jassertfalse;
valueToCheckAndLimit = jlimit (minValue, maxValue, valueToCheckAndLimit);
}
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
class MPEZoneLayoutTests : public UnitTest
{
public:
MPEZoneLayoutTests()
: UnitTest ("MPEZoneLayout class", UnitTestCategories::midi)
{}
void runTest() override
{
beginTest ("initialisation");
{
MPEZoneLayout layout;
expect (! layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
}
beginTest ("adding zones");
{
MPEZoneLayout layout;
layout.setLowerZone (7);
expect (layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 7);
layout.setUpperZone (7);
expect (layout.getLowerZone().isActive());
expect (layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 7);
expectEquals (layout.getUpperZone().getMasterChannel(), 16);
expectEquals (layout.getUpperZone().numMemberChannels, 7);
layout.setLowerZone (3);
expect (layout.getLowerZone().isActive());
expect (layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 3);
expectEquals (layout.getUpperZone().getMasterChannel(), 16);
expectEquals (layout.getUpperZone().numMemberChannels, 7);
layout.setUpperZone (3);
expect (layout.getLowerZone().isActive());
expect (layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 3);
expectEquals (layout.getUpperZone().getMasterChannel(), 16);
expectEquals (layout.getUpperZone().numMemberChannels, 3);
layout.setLowerZone (15);
expect (layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 15);
}
beginTest ("clear all zones");
{
MPEZoneLayout layout;
expect (! layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
layout.setLowerZone (7);
layout.setUpperZone (2);
expect (layout.getLowerZone().isActive());
expect (layout.getUpperZone().isActive());
layout.clearAllZones();
expect (! layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
}
beginTest ("process MIDI buffers");
{
MPEZoneLayout layout;
MidiBuffer buffer;
buffer = MPEMessages::setLowerZone (7);
layout.processNextMidiBuffer (buffer);
expect (layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 7);
buffer = MPEMessages::setUpperZone (7);
layout.processNextMidiBuffer (buffer);
expect (layout.getLowerZone().isActive());
expect (layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 7);
expectEquals (layout.getUpperZone().getMasterChannel(), 16);
expectEquals (layout.getUpperZone().numMemberChannels, 7);
{
buffer = MPEMessages::setLowerZone (10);
layout.processNextMidiBuffer (buffer);
expect (layout.getLowerZone().isActive());
expect (layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 10);
expectEquals (layout.getUpperZone().getMasterChannel(), 16);
expectEquals (layout.getUpperZone().numMemberChannels, 4);
buffer = MPEMessages::setLowerZone (10, 33, 44);
layout.processNextMidiBuffer (buffer);
expectEquals (layout.getLowerZone().numMemberChannels, 10);
expectEquals (layout.getLowerZone().perNotePitchbendRange, 33);
expectEquals (layout.getLowerZone().masterPitchbendRange, 44);
}
{
buffer = MPEMessages::setUpperZone (10);
layout.processNextMidiBuffer (buffer);
expect (layout.getLowerZone().isActive());
expect (layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 4);
expectEquals (layout.getUpperZone().getMasterChannel(), 16);
expectEquals (layout.getUpperZone().numMemberChannels, 10);
buffer = MPEMessages::setUpperZone (10, 33, 44);
layout.processNextMidiBuffer (buffer);
expectEquals (layout.getUpperZone().numMemberChannels, 10);
expectEquals (layout.getUpperZone().perNotePitchbendRange, 33);
expectEquals (layout.getUpperZone().masterPitchbendRange, 44);
}
buffer = MPEMessages::clearAllZones();
layout.processNextMidiBuffer (buffer);
expect (! layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
}
beginTest ("process individual MIDI messages");
{
MPEZoneLayout layout;
layout.processNextMidiEvent ({ 0x80, 0x59, 0xd0 }); // unrelated note-off msg
layout.processNextMidiEvent ({ 0xb0, 0x64, 0x06 }); // RPN part 1
layout.processNextMidiEvent ({ 0xb0, 0x65, 0x00 }); // RPN part 2
layout.processNextMidiEvent ({ 0xb8, 0x0b, 0x66 }); // unrelated CC msg
layout.processNextMidiEvent ({ 0xb0, 0x06, 0x03 }); // RPN part 3
layout.processNextMidiEvent ({ 0x90, 0x60, 0x00 }); // unrelated note-on msg
expect (layout.getLowerZone().isActive());
expect (! layout.getUpperZone().isActive());
expectEquals (layout.getLowerZone().getMasterChannel(), 1);
expectEquals (layout.getLowerZone().numMemberChannels, 3);
expectEquals (layout.getLowerZone().perNotePitchbendRange, 48);
expectEquals (layout.getLowerZone().masterPitchbendRange, 2);
}
}
};
static MPEZoneLayoutTests MPEZoneLayoutUnitTests;
#endif
} // namespace juce

+ 0
- 241
libs/juce7/source/modules/juce_audio_basics/mpe/juce_MPEZoneLayout.h View File

@@ -1,241 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This struct represents an MPE zone.
It can either be a lower or an upper zone, where:
- A lower zone encompasses master channel 1 and an arbitrary number of ascending
MIDI channels, increasing from channel 2.
- An upper zone encompasses master channel 16 and an arbitrary number of descending
MIDI channels, decreasing from channel 15.
It also defines a pitchbend range (in semitones) to be applied for per-note pitchbends and
master pitchbends, respectively.
*/
struct MPEZone
{
enum class Type { lower, upper };
MPEZone() = default;
MPEZone (Type type, int memberChannels = 0, int perNotePitchbend = 48, int masterPitchbend = 2)
: zoneType (type),
numMemberChannels (memberChannels),
perNotePitchbendRange (perNotePitchbend),
masterPitchbendRange (masterPitchbend)
{}
bool isLowerZone() const noexcept { return zoneType == Type::lower; }
bool isUpperZone() const noexcept { return zoneType == Type::upper; }
bool isActive() const noexcept { return numMemberChannels > 0; }
int getMasterChannel() const noexcept { return isLowerZone() ? lowerZoneMasterChannel : upperZoneMasterChannel; }
int getFirstMemberChannel() const noexcept { return isLowerZone() ? lowerZoneMasterChannel + 1 : upperZoneMasterChannel - 1; }
int getLastMemberChannel() const noexcept { return isLowerZone() ? (lowerZoneMasterChannel + numMemberChannels)
: (upperZoneMasterChannel - numMemberChannels); }
bool isUsingChannelAsMemberChannel (int channel) const noexcept
{
return isLowerZone() ? (lowerZoneMasterChannel < channel && channel <= getLastMemberChannel())
: (channel < upperZoneMasterChannel && getLastMemberChannel() <= channel);
}
bool isUsing (int channel) const noexcept
{
return isUsingChannelAsMemberChannel (channel) || channel == getMasterChannel();
}
static auto tie (const MPEZone& z)
{
return std::tie (z.zoneType,
z.numMemberChannels,
z.perNotePitchbendRange,
z.masterPitchbendRange);
}
bool operator== (const MPEZone& other) const
{
return tie (*this) == tie (other);
}
bool operator!= (const MPEZone& other) const
{
return tie (*this) != tie (other);
}
//==============================================================================
static constexpr int lowerZoneMasterChannel = 1,
upperZoneMasterChannel = 16;
Type zoneType = Type::lower;
int numMemberChannels = 0;
int perNotePitchbendRange = 48;
int masterPitchbendRange = 2;
};
//==============================================================================
/**
This class represents the current MPE zone layout of a device capable of handling MPE.
An MPE device can have up to two zones: a lower zone with master channel 1 and
allocated MIDI channels increasing from channel 2, and an upper zone with master
channel 16 and allocated MIDI channels decreasing from channel 15. MPE mode is
enabled on a device when one of these zones is active and disabled when both
are inactive.
Use the MPEMessages helper class to convert the zone layout represented
by this object to MIDI message sequences that you can send to an Expressive
MIDI device to set its zone layout, add zones etc.
@see MPEInstrument
@tags{Audio}
*/
class JUCE_API MPEZoneLayout
{
public:
//==============================================================================
/** Creates a layout with inactive upper and lower zones. */
MPEZoneLayout() = default;
/** Creates a layout with the given upper and lower zones. */
MPEZoneLayout (MPEZone lower, MPEZone upper);
/** Creates a layout with a single upper or lower zone, leaving the other zone uninitialised. */
MPEZoneLayout (MPEZone singleZone);
MPEZoneLayout (const MPEZoneLayout& other);
MPEZoneLayout& operator= (const MPEZoneLayout& other);
bool operator== (const MPEZoneLayout& other) const { return lowerZone == other.lowerZone && upperZone == other.upperZone; }
bool operator!= (const MPEZoneLayout& other) const { return ! operator== (other); }
//==============================================================================
/** Returns a struct representing the lower MPE zone. */
MPEZone getLowerZone() const noexcept { return lowerZone; }
/** Returns a struct representing the upper MPE zone. */
MPEZone getUpperZone() const noexcept { return upperZone; }
/** Sets the lower zone of this layout. */
void setLowerZone (int numMemberChannels = 0,
int perNotePitchbendRange = 48,
int masterPitchbendRange = 2) noexcept;
/** Sets the upper zone of this layout. */
void setUpperZone (int numMemberChannels = 0,
int perNotePitchbendRange = 48,
int masterPitchbendRange = 2) noexcept;
/** Clears the lower and upper zones of this layout, making them both inactive
and disabling MPE mode.
*/
void clearAllZones();
/** Returns true if either of the zones are active. */
bool isActive() const { return lowerZone.isActive() || upperZone.isActive(); }
//==============================================================================
/** Pass incoming MIDI messages to an object of this class if you want the
zone layout to properly react to MPE RPN messages like an
MPE device.
MPEMessages::rpnNumber will add or remove zones; RPN 0 will
set the per-note or master pitchbend ranges.
Any other MIDI messages will be ignored by this class.
@see MPEMessages
*/
void processNextMidiEvent (const MidiMessage& message);
/** Pass incoming MIDI buffers to an object of this class if you want the
zone layout to properly react to MPE RPN messages like an
MPE device.
MPEMessages::rpnNumber will add or remove zones; RPN 0 will
set the per-note or master pitchbend ranges.
Any other MIDI messages will be ignored by this class.
@see MPEMessages
*/
void processNextMidiBuffer (const MidiBuffer& buffer);
//==============================================================================
/** Listener class. Derive from this class to allow your class to be
notified about changes to the zone layout.
*/
class Listener
{
public:
/** Destructor. */
virtual ~Listener() = default;
/** Implement this callback to be notified about any changes to this
MPEZoneLayout. Will be called whenever a zone is added, zones are
removed, or any zone's master or note pitchbend ranges change.
*/
virtual void zoneLayoutChanged (const MPEZoneLayout& layout) = 0;
};
//==============================================================================
/** Adds a listener. */
void addListener (Listener* const listenerToAdd) noexcept;
/** Removes a listener. */
void removeListener (Listener* const listenerToRemove) noexcept;
#ifndef DOXYGEN
using Zone = MPEZone;
#endif
private:
//==============================================================================
MPEZone lowerZone { MPEZone::Type::lower, 0 };
MPEZone upperZone { MPEZone::Type::upper, 0 };
MidiRPNDetector rpnDetector;
ListenerList<Listener> listeners;
//==============================================================================
void setZone (bool, int, int, int) noexcept;
void processRpnMessage (MidiRPNMessage);
void processZoneLayoutRpnMessage (MidiRPNMessage);
void processPitchbendRangeRpnMessage (MidiRPNMessage);
void updateMasterPitchbend (MPEZone&, int);
void updatePerNotePitchbendRange (MPEZone&, int);
void sendLayoutChangeMessage();
void checkAndLimitZoneParameters (int, int, int&) noexcept;
};
} // namespace juce

+ 0
- 344
libs/juce7/source/modules/juce_audio_basics/native/juce_mac_CoreAudioLayouts.h View File

@@ -1,344 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
#if ! defined (DOXYGEN) && (JUCE_MAC || JUCE_IOS)
struct CoreAudioLayouts
{
//==============================================================================
struct LayoutTagSpeakerList
{
AudioChannelLayoutTag tag;
AudioChannelSet::ChannelType channelTypes[16];
};
//==============================================================================
// This list has been derived from https://pastebin.com/24dQ4BPJ
// Apple channel labels have been replaced by JUCE channel names
// This means that some layouts will be identical in JUCE but not in CoreAudio
// In Apple's official definition the following tags exist with the same speaker layout and order
// even when *not* represented in JUCE channels
// kAudioChannelLayoutTag_Binaural = kAudioChannelLayoutTag_Stereo
// kAudioChannelLayoutTag_MPEG_5_0_B = kAudioChannelLayoutTag_Pentagonal
// kAudioChannelLayoutTag_ITU_2_2 = kAudioChannelLayoutTag_Quadraphonic
// kAudioChannelLayoutTag_AudioUnit_6_0 = kAudioChannelLayoutTag_Hexagonal
struct SpeakerLayoutTable : AudioChannelSet // save us some typing
{
template <typename... Items>
static constexpr auto getArray (Items... items)
{
return std::array<LayoutTagSpeakerList, sizeof... (items)> { { items... } };
}
static constexpr auto get()
{
using List = LayoutTagSpeakerList;
return getArray (List { kAudioChannelLayoutTag_Mono, { centre } },
List { kAudioChannelLayoutTag_Stereo, { left, right } },
List { kAudioChannelLayoutTag_MPEG_3_0_A, { left, right, centre } },
List { kAudioChannelLayoutTag_ITU_2_1, { left, right, centreSurround } },
List { kAudioChannelLayoutTag_MPEG_4_0_A, { left, right, centre, centreSurround } },
List { kAudioChannelLayoutTag_MPEG_5_0_A, { left, right, centre, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_MPEG_5_1_A, { left, right, centre, LFE, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_AudioUnit_6_0, { left, right, leftSurround, rightSurround, centre, centreSurround } },
List { kAudioChannelLayoutTag_MPEG_6_1_A, { left, right, centre, LFE, leftSurround, rightSurround, centreSurround } },
List { kAudioChannelLayoutTag_DTS_6_0_A, { leftSurroundSide, rightSurroundSide, left, right, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_DTS_6_1_A, { leftSurroundSide, rightSurroundSide, left, right, leftSurround, rightSurround, LFE } },
List { kAudioChannelLayoutTag_AudioUnit_7_0, { left, right, leftSurroundSide, rightSurroundSide, centre, leftSurroundRear, rightSurroundRear } },
List { kAudioChannelLayoutTag_AudioUnit_7_0_Front, { left, right, leftSurround, rightSurround, centre, leftCentre, rightCentre } },
List { kAudioChannelLayoutTag_MPEG_7_1_C, { left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear } },
List { kAudioChannelLayoutTag_MPEG_7_1_A, { left, right, centre, LFE, leftSurround, rightSurround, leftCentre, rightCentre } },
List { kAudioChannelLayoutTag_Ambisonic_B_Format, { ambisonicW, ambisonicX, ambisonicY, ambisonicZ } },
List { kAudioChannelLayoutTag_Quadraphonic, { left, right, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_Pentagonal, { left, right, leftSurroundRear, rightSurroundRear, centre } },
List { kAudioChannelLayoutTag_Hexagonal, { left, right, leftSurroundRear, rightSurroundRear, centre, centreSurround } },
List { kAudioChannelLayoutTag_Octagonal, { left, right, leftSurround, rightSurround, centre, centreSurround, wideLeft, wideRight } },
#if defined (MAC_OS_VERSION_11_0)
List { kAudioChannelLayoutTag_Atmos_5_1_4, { left, right, centre, LFE, leftSurround, rightSurround, topFrontLeft, topFrontRight, topRearLeft, topRearRight } },
List { kAudioChannelLayoutTag_Atmos_7_1_2, { left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topSideLeft, topSideRight } },
#endif
#if defined (MAC_OS_X_VERSION_10_15)
List { kAudioChannelLayoutTag_Atmos_5_1_2, { left, right, centre, LFE, leftSurround, rightSurround, topSideLeft, topSideRight } },
List { kAudioChannelLayoutTag_Atmos_7_1_4, { left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, topFrontLeft, topFrontRight, topRearLeft, topRearRight } },
List { kAudioChannelLayoutTag_Atmos_9_1_6, { left, right, centre, LFE, leftSurroundSide, rightSurroundSide, leftSurroundRear, rightSurroundRear, wideLeft, wideRight, topFrontLeft, topFrontRight, topSideLeft, topSideRight, topRearLeft, topRearRight } },
#endif
// More uncommon layouts...
List { kAudioChannelLayoutTag_StereoHeadphones, { left, right } },
List { kAudioChannelLayoutTag_MatrixStereo, { left, right } },
List { kAudioChannelLayoutTag_MidSide, { centre, discreteChannel0 } },
List { kAudioChannelLayoutTag_XY, { ambisonicX, ambisonicY } },
List { kAudioChannelLayoutTag_Binaural, { left, right } },
List { kAudioChannelLayoutTag_Cube, { left, right, leftSurround, rightSurround, topFrontLeft, topFrontRight, topRearLeft, topRearRight } },
List { kAudioChannelLayoutTag_MPEG_3_0_B, { centre, left, right } },
List { kAudioChannelLayoutTag_MPEG_4_0_B, { centre, left, right, centreSurround } },
List { kAudioChannelLayoutTag_MPEG_5_0_B, { left, right, leftSurround, rightSurround, centre } },
List { kAudioChannelLayoutTag_MPEG_5_0_C, { left, centre, right, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_MPEG_5_0_D, { centre, left, right, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_MPEG_5_1_B, { left, right, leftSurround, rightSurround, centre, LFE } },
List { kAudioChannelLayoutTag_MPEG_5_1_C, { left, centre, right, leftSurround, rightSurround, LFE } },
List { kAudioChannelLayoutTag_MPEG_5_1_D, { centre, left, right, leftSurround, rightSurround, LFE } },
List { kAudioChannelLayoutTag_MPEG_7_1_B, { centre, leftCentre, rightCentre, left, right, leftSurround, rightSurround, LFE } },
List { kAudioChannelLayoutTag_Emagic_Default_7_1, { left, right, leftSurround, rightSurround, centre, LFE, leftCentre, rightCentre } },
List { kAudioChannelLayoutTag_SMPTE_DTV, { left, right, centre, LFE, leftSurround, rightSurround, discreteChannel0 /* leftMatrixTotal */, (ChannelType) (discreteChannel0 + 1) /* rightMatrixTotal */} },
List { kAudioChannelLayoutTag_ITU_2_2, { left, right, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_DVD_4, { left, right, LFE } },
List { kAudioChannelLayoutTag_DVD_5, { left, right, LFE, centreSurround } },
List { kAudioChannelLayoutTag_DVD_6, { left, right, LFE, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_DVD_10, { left, right, centre, LFE } },
List { kAudioChannelLayoutTag_DVD_11, { left, right, centre, LFE, centreSurround } },
List { kAudioChannelLayoutTag_DVD_18, { left, right, leftSurround, rightSurround, LFE } },
List { kAudioChannelLayoutTag_AAC_6_0, { centre, left, right, leftSurround, rightSurround, centreSurround } },
List { kAudioChannelLayoutTag_AAC_6_1, { centre, left, right, leftSurround, rightSurround, centreSurround, LFE } },
List { kAudioChannelLayoutTag_AAC_7_0, { centre, left, right, leftSurround, rightSurround, leftSurroundRear, rightSurroundRear } },
List { kAudioChannelLayoutTag_AAC_7_1_B, { centre, left, right, leftSurround, rightSurround, leftSurroundRear, rightSurroundRear, LFE } },
List { kAudioChannelLayoutTag_AAC_7_1_C, { centre, left, right, leftSurround, rightSurround, LFE, topFrontLeft, topFrontRight } },
List { kAudioChannelLayoutTag_AAC_Octagonal, { centre, left, right, leftSurround, rightSurround, leftSurroundRear, rightSurroundRear, centreSurround } },
List { kAudioChannelLayoutTag_TMH_10_2_std, { left, right, centre, topFrontCentre, leftSurroundSide, rightSurroundSide, leftSurround, rightSurround, topFrontLeft, topFrontRight, wideLeft, wideRight, topRearCentre, centreSurround, LFE, LFE2 } },
List { kAudioChannelLayoutTag_AC3_1_0_1, { centre, LFE } },
List { kAudioChannelLayoutTag_AC3_3_0, { left, centre, right } },
List { kAudioChannelLayoutTag_AC3_3_1, { left, centre, right, centreSurround } },
List { kAudioChannelLayoutTag_AC3_3_0_1, { left, centre, right, LFE } },
List { kAudioChannelLayoutTag_AC3_2_1_1, { left, right, centreSurround, LFE } },
List { kAudioChannelLayoutTag_AC3_3_1_1, { left, centre, right, centreSurround, LFE } },
List { kAudioChannelLayoutTag_EAC_6_0_A, { left, centre, right, leftSurround, rightSurround, centreSurround } },
List { kAudioChannelLayoutTag_EAC_7_0_A, { left, centre, right, leftSurround, rightSurround, leftSurroundRear, rightSurroundRear } },
List { kAudioChannelLayoutTag_EAC3_6_1_A, { left, centre, right, leftSurround, rightSurround, LFE, centreSurround } },
List { kAudioChannelLayoutTag_EAC3_6_1_B, { left, centre, right, leftSurround, rightSurround, LFE, centreSurround } },
List { kAudioChannelLayoutTag_EAC3_6_1_C, { left, centre, right, leftSurround, rightSurround, LFE, topFrontCentre } },
List { kAudioChannelLayoutTag_EAC3_7_1_A, { left, centre, right, leftSurround, rightSurround, LFE, leftSurroundRear, rightSurroundRear } },
List { kAudioChannelLayoutTag_EAC3_7_1_B, { left, centre, right, leftSurround, rightSurround, LFE, leftCentre, rightCentre } },
List { kAudioChannelLayoutTag_EAC3_7_1_C, { left, centre, right, leftSurround, rightSurround, LFE, leftSurroundSide, rightSurroundSide } },
List { kAudioChannelLayoutTag_EAC3_7_1_D, { left, centre, right, leftSurround, rightSurround, LFE, wideLeft, wideRight } },
List { kAudioChannelLayoutTag_EAC3_7_1_E, { left, centre, right, leftSurround, rightSurround, LFE, topFrontLeft, topFrontRight } },
List { kAudioChannelLayoutTag_EAC3_7_1_F, { left, centre, right, leftSurround, rightSurround, LFE, centreSurround, topMiddle } },
List { kAudioChannelLayoutTag_EAC3_7_1_G, { left, centre, right, leftSurround, rightSurround, LFE, centreSurround, topFrontCentre } },
List { kAudioChannelLayoutTag_EAC3_7_1_H, { left, centre, right, leftSurround, rightSurround, LFE, centreSurround, topFrontCentre } },
List { kAudioChannelLayoutTag_DTS_3_1, { centre, left, right, LFE } },
List { kAudioChannelLayoutTag_DTS_4_1, { centre, left, right, centreSurround, LFE } },
List { kAudioChannelLayoutTag_DTS_6_0_B, { centre, left, right, leftSurroundRear, rightSurroundRear, centreSurround } },
List { kAudioChannelLayoutTag_DTS_6_0_C, { centre, centreSurround, left, right, leftSurroundRear, rightSurroundRear } },
List { kAudioChannelLayoutTag_DTS_6_1_B, { centre, left, right, leftSurroundRear, rightSurroundRear, centreSurround, LFE } },
List { kAudioChannelLayoutTag_DTS_6_1_C, { centre, centreSurround, left, right, leftSurroundRear, rightSurroundRear, LFE } },
List { kAudioChannelLayoutTag_DTS_6_1_D, { centre, left, right, leftSurround, rightSurround, LFE, centreSurround } },
List { kAudioChannelLayoutTag_DTS_7_0, { leftCentre, centre, rightCentre, left, right, leftSurround, rightSurround } },
List { kAudioChannelLayoutTag_DTS_7_1, { leftCentre, centre, rightCentre, left, right, leftSurround, rightSurround, LFE } },
List { kAudioChannelLayoutTag_DTS_8_0_A, { leftCentre, rightCentre, left, right, leftSurround, rightSurround, leftSurroundRear, rightSurroundRear } },
List { kAudioChannelLayoutTag_DTS_8_0_B, { leftCentre, centre, rightCentre, left, right, leftSurround, centreSurround, rightSurround } },
List { kAudioChannelLayoutTag_DTS_8_1_A, { leftCentre, rightCentre, left, right, leftSurround, rightSurround, leftSurroundRear, rightSurroundRear, LFE } },
List { kAudioChannelLayoutTag_DTS_8_1_B, { leftCentre, centre, rightCentre, left, right, leftSurround, centreSurround, rightSurround, LFE } });
}
};
public:
//==============================================================================
enum
{
coreAudioHOASN3DLayoutTag = (190U<<16) | 0 // kAudioChannelLayoutTag_HOA_ACN_SN3D
};
//==============================================================================
/** Convert CoreAudio's native AudioChannelLayout to JUCE's AudioChannelSet.
Note that this method cannot preserve the order of channels.
*/
static AudioChannelSet fromCoreAudio (const AudioChannelLayout& layout)
{
return AudioChannelSet::channelSetWithChannels (getCoreAudioLayoutChannels (layout));
}
/** Convert CoreAudio's native AudioChannelLayoutTag to JUCE's AudioChannelSet.
Note that this method cannot preserve the order of channels.
*/
static AudioChannelSet fromCoreAudio (AudioChannelLayoutTag layoutTag)
{
return AudioChannelSet::channelSetWithChannels (getSpeakerLayoutForCoreAudioTag (layoutTag));
}
/** Convert JUCE's AudioChannelSet to CoreAudio's AudioChannelLayoutTag.
Note that this method cannot preserve the order of channels.
*/
static AudioChannelLayoutTag toCoreAudio (const AudioChannelSet& set)
{
if (set.getAmbisonicOrder() >= 0)
return coreAudioHOASN3DLayoutTag | static_cast<unsigned> (set.size());
for (const auto& item : SpeakerLayoutTable::get())
{
AudioChannelSet caSet;
for (int i = 0; i < numElementsInArray (item.channelTypes)
&& item.channelTypes[i] != AudioChannelSet::unknown; ++i)
caSet.addChannel (item.channelTypes[i]);
if (caSet == set)
return item.tag;
}
return kAudioChannelLayoutTag_DiscreteInOrder | static_cast<AudioChannelLayoutTag> (set.size());
}
static const Array<AudioChannelLayoutTag>& getKnownCoreAudioTags()
{
static Array<AudioChannelLayoutTag> tags (createKnownCoreAudioTags());
return tags;
}
//==============================================================================
/** Convert CoreAudio's native AudioChannelLayout to an array of JUCE ChannelTypes. */
static Array<AudioChannelSet::ChannelType> getCoreAudioLayoutChannels (const AudioChannelLayout& layout)
{
switch (layout.mChannelLayoutTag & 0xffff0000)
{
case kAudioChannelLayoutTag_UseChannelBitmap:
return AudioChannelSet::fromWaveChannelMask (static_cast<int> (layout.mChannelBitmap)).getChannelTypes();
case kAudioChannelLayoutTag_UseChannelDescriptions:
{
Array<AudioChannelSet::ChannelType> channels;
for (UInt32 i = 0; i < layout.mNumberChannelDescriptions; ++i)
channels.addIfNotAlreadyThere (getChannelTypeFromAudioChannelLabel (layout.mChannelDescriptions[i].mChannelLabel));
// different speaker mappings may point to the same JUCE speaker so fill up
// this array with discrete channels
for (int j = 0; channels.size() < static_cast<int> (layout.mNumberChannelDescriptions); ++j)
channels.addIfNotAlreadyThere (static_cast<AudioChannelSet::ChannelType> (AudioChannelSet::discreteChannel0 + j));
return channels;
}
case kAudioChannelLayoutTag_DiscreteInOrder:
return AudioChannelSet::discreteChannels (static_cast<int> (layout.mChannelLayoutTag) & 0xffff).getChannelTypes();
default:
break;
}
return getSpeakerLayoutForCoreAudioTag (layout.mChannelLayoutTag);
}
static Array<AudioChannelSet::ChannelType> getSpeakerLayoutForCoreAudioTag (AudioChannelLayoutTag tag)
{
// You need to specify the full AudioChannelLayout when using
// the UseChannelBitmap and UseChannelDescriptions layout tag
jassert (tag != kAudioChannelLayoutTag_UseChannelBitmap && tag != kAudioChannelLayoutTag_UseChannelDescriptions);
Array<AudioChannelSet::ChannelType> speakers;
for (const auto& item : SpeakerLayoutTable::get())
{
if (tag == item.tag)
{
for (int i = 0; i < numElementsInArray (item.channelTypes)
&& item.channelTypes[i] != AudioChannelSet::unknown; ++i)
speakers.add (item.channelTypes[i]);
return speakers;
}
}
auto numChannels = tag & 0xffff;
if (tag >= coreAudioHOASN3DLayoutTag && tag <= (coreAudioHOASN3DLayoutTag | 0xffff))
{
auto sqrtMinusOne = std::sqrt (static_cast<float> (numChannels)) - 1.0f;
auto ambisonicOrder = jmax (0, static_cast<int> (std::floor (sqrtMinusOne)));
if (static_cast<float> (ambisonicOrder) == sqrtMinusOne)
return AudioChannelSet::ambisonic (ambisonicOrder).getChannelTypes();
}
for (UInt32 i = 0; i < numChannels; ++i)
speakers.add (static_cast<AudioChannelSet::ChannelType> (AudioChannelSet::discreteChannel0 + i));
return speakers;
}
private:
static Array<AudioChannelLayoutTag> createKnownCoreAudioTags()
{
Array<AudioChannelLayoutTag> tags;
for (const auto& item : SpeakerLayoutTable::get())
tags.addIfNotAlreadyThere (item.tag);
for (unsigned order = 0; order <= 5; ++order)
tags.addIfNotAlreadyThere (coreAudioHOASN3DLayoutTag | ((order + 1) * (order + 1)));
return tags;
}
//==============================================================================
static AudioChannelSet::ChannelType getChannelTypeFromAudioChannelLabel (AudioChannelLabel label) noexcept
{
if (label >= kAudioChannelLabel_Discrete_0 && label <= kAudioChannelLabel_Discrete_65535)
{
const unsigned int discreteChannelNum = label - kAudioChannelLabel_Discrete_0;
return static_cast<AudioChannelSet::ChannelType> (AudioChannelSet::discreteChannel0 + discreteChannelNum);
}
switch (label)
{
case kAudioChannelLabel_Center:
case kAudioChannelLabel_Mono: return AudioChannelSet::centre;
case kAudioChannelLabel_Left:
case kAudioChannelLabel_HeadphonesLeft: return AudioChannelSet::left;
case kAudioChannelLabel_Right:
case kAudioChannelLabel_HeadphonesRight: return AudioChannelSet::right;
case kAudioChannelLabel_LFEScreen: return AudioChannelSet::LFE;
case kAudioChannelLabel_LeftSurround: return AudioChannelSet::leftSurround;
case kAudioChannelLabel_RightSurround: return AudioChannelSet::rightSurround;
case kAudioChannelLabel_LeftCenter: return AudioChannelSet::leftCentre;
case kAudioChannelLabel_RightCenter: return AudioChannelSet::rightCentre;
case kAudioChannelLabel_CenterSurround: return AudioChannelSet::surround;
case kAudioChannelLabel_LeftSurroundDirect: return AudioChannelSet::leftSurroundSide;
case kAudioChannelLabel_RightSurroundDirect: return AudioChannelSet::rightSurroundSide;
case kAudioChannelLabel_TopCenterSurround: return AudioChannelSet::topMiddle;
case kAudioChannelLabel_VerticalHeightLeft: return AudioChannelSet::topFrontLeft;
case kAudioChannelLabel_VerticalHeightRight: return AudioChannelSet::topFrontRight;
case kAudioChannelLabel_VerticalHeightCenter: return AudioChannelSet::topFrontCentre;
case kAudioChannelLabel_TopBackLeft: return AudioChannelSet::topRearLeft;
case kAudioChannelLabel_RearSurroundLeft: return AudioChannelSet::leftSurroundRear;
case kAudioChannelLabel_TopBackRight: return AudioChannelSet::topRearRight;
case kAudioChannelLabel_RearSurroundRight: return AudioChannelSet::rightSurroundRear;
case kAudioChannelLabel_TopBackCenter: return AudioChannelSet::topRearCentre;
case kAudioChannelLabel_LFE2: return AudioChannelSet::LFE2;
case kAudioChannelLabel_LeftWide: return AudioChannelSet::wideLeft;
case kAudioChannelLabel_RightWide: return AudioChannelSet::wideRight;
case kAudioChannelLabel_Ambisonic_W: return AudioChannelSet::ambisonicW;
case kAudioChannelLabel_Ambisonic_X: return AudioChannelSet::ambisonicX;
case kAudioChannelLabel_Ambisonic_Y: return AudioChannelSet::ambisonicY;
case kAudioChannelLabel_Ambisonic_Z: return AudioChannelSet::ambisonicZ;
default: return AudioChannelSet::unknown;
}
}
};
#endif
} // namespace juce

+ 0
- 80
libs/juce7/source/modules/juce_audio_basics/native/juce_mac_CoreAudioTimeConversions.h View File

@@ -1,80 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
// This file will be included directly by macOS/iOS-specific .cpps
#pragma once
#if ! DOXYGEN
#include <mach/mach_time.h>
namespace juce
{
struct CoreAudioTimeConversions
{
public:
CoreAudioTimeConversions()
{
mach_timebase_info_data_t info{};
mach_timebase_info (&info);
numerator = info.numer;
denominator = info.denom;
}
uint64_t hostTimeToNanos (uint64_t hostTime) const
{
return multiplyByRatio (hostTime, numerator, denominator);
}
uint64_t nanosToHostTime (uint64_t nanos) const
{
return multiplyByRatio (nanos, denominator, numerator);
}
private:
// Adapted from CAHostTimeBase.h in the Core Audio Utility Classes
static uint64_t multiplyByRatio (uint64_t toMultiply, uint64_t numerator, uint64_t denominator)
{
#if defined (__SIZEOF_INT128__)
unsigned __int128
#else
long double
#endif
result = toMultiply;
if (numerator != denominator)
{
result *= numerator;
result /= denominator;
}
return (uint64_t) result;
}
uint64_t numerator = 0, denominator = 0;
};
} // namespace juce
#endif

+ 0
- 179
libs/juce7/source/modules/juce_audio_basics/sources/juce_AudioSource.h View File

@@ -1,179 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Used by AudioSource::getNextAudioBlock().
@tags{Audio}
*/
struct JUCE_API AudioSourceChannelInfo
{
/** Creates an uninitialised AudioSourceChannelInfo. */
AudioSourceChannelInfo() = default;
/** Creates an AudioSourceChannelInfo. */
AudioSourceChannelInfo (AudioBuffer<float>* bufferToUse,
int startSampleOffset, int numSamplesToUse) noexcept
: buffer (bufferToUse),
startSample (startSampleOffset),
numSamples (numSamplesToUse)
{
}
/** Creates an AudioSourceChannelInfo that uses the whole of a buffer.
Note that the buffer provided must not be deleted while the
AudioSourceChannelInfo is still using it.
*/
explicit AudioSourceChannelInfo (AudioBuffer<float>& bufferToUse) noexcept
: buffer (&bufferToUse),
startSample (0),
numSamples (bufferToUse.getNumSamples())
{
}
/** The destination buffer to fill with audio data.
When the AudioSource::getNextAudioBlock() method is called, the active section
of this buffer should be filled with whatever output the source produces.
Only the samples specified by the startSample and numSamples members of this structure
should be affected by the call.
The contents of the buffer when it is passed to the AudioSource::getNextAudioBlock()
method can be treated as the input if the source is performing some kind of filter operation,
but should be cleared if this is not the case - the clearActiveBufferRegion() is
a handy way of doing this.
The number of channels in the buffer could be anything, so the AudioSource
must cope with this in whatever way is appropriate for its function.
*/
AudioBuffer<float>* buffer;
/** The first sample in the buffer from which the callback is expected
to write data. */
int startSample;
/** The number of samples in the buffer which the callback is expected to
fill with data. */
int numSamples;
/** Convenient method to clear the buffer if the source is not producing any data. */
void clearActiveBufferRegion() const
{
if (buffer != nullptr)
buffer->clear (startSample, numSamples);
}
};
//==============================================================================
/**
Base class for objects that can produce a continuous stream of audio.
An AudioSource has two states: 'prepared' and 'unprepared'.
When a source needs to be played, it is first put into a 'prepared' state by a call to
prepareToPlay(), and then repeated calls will be made to its getNextAudioBlock() method to
process the audio data.
Once playback has finished, the releaseResources() method is called to put the stream
back into an 'unprepared' state.
@see AudioFormatReaderSource, ResamplingAudioSource
@tags{Audio}
*/
class JUCE_API AudioSource
{
protected:
//==============================================================================
/** Creates an AudioSource. */
AudioSource() = default;
public:
/** Destructor. */
virtual ~AudioSource() = default;
//==============================================================================
/** Tells the source to prepare for playing.
An AudioSource has two states: prepared and unprepared.
The prepareToPlay() method is guaranteed to be called at least once on an 'unprepared'
source to put it into a 'prepared' state before any calls will be made to getNextAudioBlock().
This callback allows the source to initialise any resources it might need when playing.
Once playback has finished, the releaseResources() method is called to put the stream
back into an 'unprepared' state.
Note that this method could be called more than once in succession without
a matching call to releaseResources(), so make sure your code is robust and
can handle that kind of situation.
@param samplesPerBlockExpected the number of samples that the source
will be expected to supply each time its
getNextAudioBlock() method is called. This
number may vary slightly, because it will be dependent
on audio hardware callbacks, and these aren't
guaranteed to always use a constant block size, so
the source should be able to cope with small variations.
@param sampleRate the sample rate that the output will be used at - this
is needed by sources such as tone generators.
@see releaseResources, getNextAudioBlock
*/
virtual void prepareToPlay (int samplesPerBlockExpected,
double sampleRate) = 0;
/** Allows the source to release anything it no longer needs after playback has stopped.
This will be called when the source is no longer going to have its getNextAudioBlock()
method called, so it should release any spare memory, etc. that it might have
allocated during the prepareToPlay() call.
Note that there's no guarantee that prepareToPlay() will actually have been called before
releaseResources(), and it may be called more than once in succession, so make sure your
code is robust and doesn't make any assumptions about when it will be called.
@see prepareToPlay, getNextAudioBlock
*/
virtual void releaseResources() = 0;
/** Called repeatedly to fetch subsequent blocks of audio data.
After calling the prepareToPlay() method, this callback will be made each
time the audio playback hardware (or whatever other destination the audio
data is going to) needs another block of data.
It will generally be called on a high-priority system thread, or possibly even
an interrupt, so be careful not to do too much work here, as that will cause
audio glitches!
@see AudioSourceChannelInfo, prepareToPlay, releaseResources
*/
virtual void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) = 0;
};
} // namespace juce

+ 0
- 329
libs/juce7/source/modules/juce_audio_basics/sources/juce_BufferingAudioSource.cpp View File

@@ -1,329 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
BufferingAudioSource::BufferingAudioSource (PositionableAudioSource* s,
TimeSliceThread& thread,
bool deleteSourceWhenDeleted,
int bufferSizeSamples,
int numChannels,
bool prefillBufferOnPrepareToPlay)
: source (s, deleteSourceWhenDeleted),
backgroundThread (thread),
numberOfSamplesToBuffer (jmax (1024, bufferSizeSamples)),
numberOfChannels (numChannels),
prefillBuffer (prefillBufferOnPrepareToPlay)
{
jassert (source != nullptr);
jassert (numberOfSamplesToBuffer > 1024); // not much point using this class if you're
// not using a larger buffer..
}
BufferingAudioSource::~BufferingAudioSource()
{
releaseResources();
}
//==============================================================================
void BufferingAudioSource::prepareToPlay (int samplesPerBlockExpected, double newSampleRate)
{
auto bufferSizeNeeded = jmax (samplesPerBlockExpected * 2, numberOfSamplesToBuffer);
if (newSampleRate != sampleRate
|| bufferSizeNeeded != buffer.getNumSamples()
|| ! isPrepared)
{
backgroundThread.removeTimeSliceClient (this);
isPrepared = true;
sampleRate = newSampleRate;
source->prepareToPlay (samplesPerBlockExpected, newSampleRate);
buffer.setSize (numberOfChannels, bufferSizeNeeded);
buffer.clear();
const ScopedLock sl (bufferRangeLock);
bufferValidStart = 0;
bufferValidEnd = 0;
backgroundThread.addTimeSliceClient (this);
do
{
const ScopedUnlock ul (bufferRangeLock);
backgroundThread.moveToFrontOfQueue (this);
Thread::sleep (5);
}
while (prefillBuffer
&& (bufferValidEnd - bufferValidStart < jmin (((int) newSampleRate) / 4, buffer.getNumSamples() / 2)));
}
}
void BufferingAudioSource::releaseResources()
{
isPrepared = false;
backgroundThread.removeTimeSliceClient (this);
buffer.setSize (numberOfChannels, 0);
// MSVC2017 seems to need this if statement to not generate a warning during linking.
// As source is set in the constructor, there is no way that source could
// ever equal this, but it seems to make MSVC2017 happy.
if (source != this)
source->releaseResources();
}
void BufferingAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& info)
{
const auto bufferRange = getValidBufferRange (info.numSamples);
if (bufferRange.isEmpty())
{
// total cache miss
info.clearActiveBufferRegion();
return;
}
const auto validStart = bufferRange.getStart();
const auto validEnd = bufferRange.getEnd();
const ScopedLock sl (callbackLock);
if (validStart > 0)
info.buffer->clear (info.startSample, validStart); // partial cache miss at start
if (validEnd < info.numSamples)
info.buffer->clear (info.startSample + validEnd,
info.numSamples - validEnd); // partial cache miss at end
if (validStart < validEnd)
{
for (int chan = jmin (numberOfChannels, info.buffer->getNumChannels()); --chan >= 0;)
{
jassert (buffer.getNumSamples() > 0);
const auto startBufferIndex = (int) ((validStart + nextPlayPos) % buffer.getNumSamples());
const auto endBufferIndex = (int) ((validEnd + nextPlayPos) % buffer.getNumSamples());
if (startBufferIndex < endBufferIndex)
{
info.buffer->copyFrom (chan, info.startSample + validStart,
buffer,
chan, startBufferIndex,
validEnd - validStart);
}
else
{
const auto initialSize = buffer.getNumSamples() - startBufferIndex;
info.buffer->copyFrom (chan, info.startSample + validStart,
buffer,
chan, startBufferIndex,
initialSize);
info.buffer->copyFrom (chan, info.startSample + validStart + initialSize,
buffer,
chan, 0,
(validEnd - validStart) - initialSize);
}
}
}
nextPlayPos += info.numSamples;
}
bool BufferingAudioSource::waitForNextAudioBlockReady (const AudioSourceChannelInfo& info, uint32 timeout)
{
if (source == nullptr || source->getTotalLength() <= 0)
return false;
if ((nextPlayPos + info.numSamples < 0)
|| (! isLooping() && nextPlayPos > getTotalLength()))
return true;
const auto startTime = Time::getMillisecondCounter();
auto now = startTime;
auto elapsed = (now >= startTime ? now - startTime
: (std::numeric_limits<uint32>::max() - startTime) + now);
while (elapsed <= timeout)
{
const auto bufferRange = getValidBufferRange (info.numSamples);
const auto validStart = bufferRange.getStart();
const auto validEnd = bufferRange.getEnd();
if (validStart <= 0
&& validStart < validEnd
&& validEnd >= info.numSamples)
{
return true;
}
if (elapsed < timeout
&& ! bufferReadyEvent.wait (static_cast<int> (timeout - elapsed)))
{
return false;
}
now = Time::getMillisecondCounter();
elapsed = (now >= startTime ? now - startTime
: (std::numeric_limits<uint32>::max() - startTime) + now);
}
return false;
}
int64 BufferingAudioSource::getNextReadPosition() const
{
jassert (source->getTotalLength() > 0);
const auto pos = nextPlayPos.load();
return (source->isLooping() && nextPlayPos > 0)
? pos % source->getTotalLength()
: pos;
}
void BufferingAudioSource::setNextReadPosition (int64 newPosition)
{
const ScopedLock sl (bufferRangeLock);
nextPlayPos = newPosition;
backgroundThread.moveToFrontOfQueue (this);
}
Range<int> BufferingAudioSource::getValidBufferRange (int numSamples) const
{
const ScopedLock sl (bufferRangeLock);
const auto pos = nextPlayPos.load();
return { (int) (jlimit (bufferValidStart, bufferValidEnd, pos) - pos),
(int) (jlimit (bufferValidStart, bufferValidEnd, pos + numSamples) - pos) };
}
bool BufferingAudioSource::readNextBufferChunk()
{
int64 newBVS, newBVE, sectionToReadStart, sectionToReadEnd;
{
const ScopedLock sl (bufferRangeLock);
if (wasSourceLooping != isLooping())
{
wasSourceLooping = isLooping();
bufferValidStart = 0;
bufferValidEnd = 0;
}
newBVS = jmax ((int64) 0, nextPlayPos.load());
newBVE = newBVS + buffer.getNumSamples() - 4;
sectionToReadStart = 0;
sectionToReadEnd = 0;
constexpr int maxChunkSize = 2048;
if (newBVS < bufferValidStart || newBVS >= bufferValidEnd)
{
newBVE = jmin (newBVE, newBVS + maxChunkSize);
sectionToReadStart = newBVS;
sectionToReadEnd = newBVE;
bufferValidStart = 0;
bufferValidEnd = 0;
}
else if (std::abs ((int) (newBVS - bufferValidStart)) > 512
|| std::abs ((int) (newBVE - bufferValidEnd)) > 512)
{
newBVE = jmin (newBVE, bufferValidEnd + maxChunkSize);
sectionToReadStart = bufferValidEnd;
sectionToReadEnd = newBVE;
bufferValidStart = newBVS;
bufferValidEnd = jmin (bufferValidEnd, newBVE);
}
}
if (sectionToReadStart == sectionToReadEnd)
return false;
jassert (buffer.getNumSamples() > 0);
const auto bufferIndexStart = (int) (sectionToReadStart % buffer.getNumSamples());
const auto bufferIndexEnd = (int) (sectionToReadEnd % buffer.getNumSamples());
if (bufferIndexStart < bufferIndexEnd)
{
readBufferSection (sectionToReadStart,
(int) (sectionToReadEnd - sectionToReadStart),
bufferIndexStart);
}
else
{
const auto initialSize = buffer.getNumSamples() - bufferIndexStart;
readBufferSection (sectionToReadStart,
initialSize,
bufferIndexStart);
readBufferSection (sectionToReadStart + initialSize,
(int) (sectionToReadEnd - sectionToReadStart) - initialSize,
0);
}
{
const ScopedLock sl2 (bufferRangeLock);
bufferValidStart = newBVS;
bufferValidEnd = newBVE;
}
bufferReadyEvent.signal();
return true;
}
void BufferingAudioSource::readBufferSection (int64 start, int length, int bufferOffset)
{
if (source->getNextReadPosition() != start)
source->setNextReadPosition (start);
AudioSourceChannelInfo info (&buffer, bufferOffset, length);
const ScopedLock sl (callbackLock);
source->getNextAudioBlock (info);
}
int BufferingAudioSource::useTimeSlice()
{
return readNextBufferChunk() ? 1 : 100;
}
} // namespace juce

+ 0
- 124
libs/juce7/source/modules/juce_audio_basics/sources/juce_BufferingAudioSource.h View File

@@ -1,124 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
An AudioSource which takes another source as input, and buffers it using a thread.
Create this as a wrapper around another thread, and it will read-ahead with
a background thread to smooth out playback. You can either create one of these
directly, or use it indirectly using an AudioTransportSource.
@see PositionableAudioSource, AudioTransportSource
@tags{Audio}
*/
class JUCE_API BufferingAudioSource : public PositionableAudioSource,
private TimeSliceClient
{
public:
//==============================================================================
/** Creates a BufferingAudioSource.
@param source the input source to read from
@param backgroundThread a background thread that will be used for the
background read-ahead. This object must not be deleted
until after any BufferingAudioSources that are using it
have been deleted!
@param deleteSourceWhenDeleted if true, then the input source object will
be deleted when this object is deleted
@param numberOfSamplesToBuffer the size of buffer to use for reading ahead
@param numberOfChannels the number of channels that will be played
@param prefillBufferOnPrepareToPlay if true, then calling prepareToPlay on this object will
block until the buffer has been filled
*/
BufferingAudioSource (PositionableAudioSource* source,
TimeSliceThread& backgroundThread,
bool deleteSourceWhenDeleted,
int numberOfSamplesToBuffer,
int numberOfChannels = 2,
bool prefillBufferOnPrepareToPlay = true);
/** Destructor.
The input source may be deleted depending on whether the deleteSourceWhenDeleted
flag was set in the constructor.
*/
~BufferingAudioSource() override;
//==============================================================================
/** Implementation of the AudioSource method. */
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
/** Implementation of the AudioSource method. */
void releaseResources() override;
/** Implementation of the AudioSource method. */
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
//==============================================================================
/** Implements the PositionableAudioSource method. */
void setNextReadPosition (int64 newPosition) override;
/** Implements the PositionableAudioSource method. */
int64 getNextReadPosition() const override;
/** Implements the PositionableAudioSource method. */
int64 getTotalLength() const override { return source->getTotalLength(); }
/** Implements the PositionableAudioSource method. */
bool isLooping() const override { return source->isLooping(); }
/** A useful function to block until the next the buffer info can be filled.
This is useful for offline rendering.
*/
bool waitForNextAudioBlockReady (const AudioSourceChannelInfo& info, const uint32 timeout);
private:
//==============================================================================
Range<int> getValidBufferRange (int numSamples) const;
bool readNextBufferChunk();
void readBufferSection (int64 start, int length, int bufferOffset);
int useTimeSlice() override;
//==============================================================================
OptionalScopedPointer<PositionableAudioSource> source;
TimeSliceThread& backgroundThread;
int numberOfSamplesToBuffer, numberOfChannels;
AudioBuffer<float> buffer;
CriticalSection callbackLock, bufferRangeLock;
WaitableEvent bufferReadyEvent;
int64 bufferValidStart = 0, bufferValidEnd = 0;
std::atomic<int64> nextPlayPos { 0 };
double sampleRate = 0;
bool wasSourceLooping = false, isPrepared = false;
const bool prefillBuffer;
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (BufferingAudioSource)
};
} // namespace juce

+ 0
- 187
libs/juce7/source/modules/juce_audio_basics/sources/juce_ChannelRemappingAudioSource.cpp View File

@@ -1,187 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
ChannelRemappingAudioSource::ChannelRemappingAudioSource (AudioSource* const source_,
const bool deleteSourceWhenDeleted)
: source (source_, deleteSourceWhenDeleted),
requiredNumberOfChannels (2)
{
remappedInfo.buffer = &buffer;
remappedInfo.startSample = 0;
}
ChannelRemappingAudioSource::~ChannelRemappingAudioSource() {}
//==============================================================================
void ChannelRemappingAudioSource::setNumberOfChannelsToProduce (const int requiredNumberOfChannels_)
{
const ScopedLock sl (lock);
requiredNumberOfChannels = requiredNumberOfChannels_;
}
void ChannelRemappingAudioSource::clearAllMappings()
{
const ScopedLock sl (lock);
remappedInputs.clear();
remappedOutputs.clear();
}
void ChannelRemappingAudioSource::setInputChannelMapping (const int destIndex, const int sourceIndex)
{
const ScopedLock sl (lock);
while (remappedInputs.size() < destIndex)
remappedInputs.add (-1);
remappedInputs.set (destIndex, sourceIndex);
}
void ChannelRemappingAudioSource::setOutputChannelMapping (const int sourceIndex, const int destIndex)
{
const ScopedLock sl (lock);
while (remappedOutputs.size() < sourceIndex)
remappedOutputs.add (-1);
remappedOutputs.set (sourceIndex, destIndex);
}
int ChannelRemappingAudioSource::getRemappedInputChannel (const int inputChannelIndex) const
{
const ScopedLock sl (lock);
if (inputChannelIndex >= 0 && inputChannelIndex < remappedInputs.size())
return remappedInputs.getUnchecked (inputChannelIndex);
return -1;
}
int ChannelRemappingAudioSource::getRemappedOutputChannel (const int outputChannelIndex) const
{
const ScopedLock sl (lock);
if (outputChannelIndex >= 0 && outputChannelIndex < remappedOutputs.size())
return remappedOutputs .getUnchecked (outputChannelIndex);
return -1;
}
//==============================================================================
void ChannelRemappingAudioSource::prepareToPlay (int samplesPerBlockExpected, double sampleRate)
{
source->prepareToPlay (samplesPerBlockExpected, sampleRate);
}
void ChannelRemappingAudioSource::releaseResources()
{
source->releaseResources();
}
void ChannelRemappingAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill)
{
const ScopedLock sl (lock);
buffer.setSize (requiredNumberOfChannels, bufferToFill.numSamples, false, false, true);
const int numChans = bufferToFill.buffer->getNumChannels();
for (int i = 0; i < buffer.getNumChannels(); ++i)
{
const int remappedChan = getRemappedInputChannel (i);
if (remappedChan >= 0 && remappedChan < numChans)
{
buffer.copyFrom (i, 0, *bufferToFill.buffer,
remappedChan,
bufferToFill.startSample,
bufferToFill.numSamples);
}
else
{
buffer.clear (i, 0, bufferToFill.numSamples);
}
}
remappedInfo.numSamples = bufferToFill.numSamples;
source->getNextAudioBlock (remappedInfo);
bufferToFill.clearActiveBufferRegion();
for (int i = 0; i < requiredNumberOfChannels; ++i)
{
const int remappedChan = getRemappedOutputChannel (i);
if (remappedChan >= 0 && remappedChan < numChans)
{
bufferToFill.buffer->addFrom (remappedChan, bufferToFill.startSample,
buffer, i, 0, bufferToFill.numSamples);
}
}
}
//==============================================================================
std::unique_ptr<XmlElement> ChannelRemappingAudioSource::createXml() const
{
auto e = std::make_unique<XmlElement> ("MAPPINGS");
String ins, outs;
const ScopedLock sl (lock);
for (int i = 0; i < remappedInputs.size(); ++i)
ins << remappedInputs.getUnchecked(i) << ' ';
for (int i = 0; i < remappedOutputs.size(); ++i)
outs << remappedOutputs.getUnchecked(i) << ' ';
e->setAttribute ("inputs", ins.trimEnd());
e->setAttribute ("outputs", outs.trimEnd());
return e;
}
void ChannelRemappingAudioSource::restoreFromXml (const XmlElement& e)
{
if (e.hasTagName ("MAPPINGS"))
{
const ScopedLock sl (lock);
clearAllMappings();
StringArray ins, outs;
ins.addTokens (e.getStringAttribute ("inputs"), false);
outs.addTokens (e.getStringAttribute ("outputs"), false);
for (int i = 0; i < ins.size(); ++i)
remappedInputs.add (ins[i].getIntValue());
for (int i = 0; i < outs.size(); ++i)
remappedOutputs.add (outs[i].getIntValue());
}
}
} // namespace juce

+ 0
- 141
libs/juce7/source/modules/juce_audio_basics/sources/juce_ChannelRemappingAudioSource.h View File

@@ -1,141 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
An AudioSource that takes the audio from another source, and re-maps its
input and output channels to a different arrangement.
You can use this to increase or decrease the number of channels that an
audio source uses, or to re-order those channels.
Call the reset() method before using it to set up a default mapping, and then
the setInputChannelMapping() and setOutputChannelMapping() methods to
create an appropriate mapping, otherwise no channels will be connected and
it'll produce silence.
@see AudioSource
@tags{Audio}
*/
class ChannelRemappingAudioSource : public AudioSource
{
public:
//==============================================================================
/** Creates a remapping source that will pass on audio from the given input.
@param source the input source to use. Make sure that this doesn't
get deleted before the ChannelRemappingAudioSource object
@param deleteSourceWhenDeleted if true, the input source will be deleted
when this object is deleted, if false, the caller is
responsible for its deletion
*/
ChannelRemappingAudioSource (AudioSource* source,
bool deleteSourceWhenDeleted);
/** Destructor. */
~ChannelRemappingAudioSource() override;
//==============================================================================
/** Specifies a number of channels that this audio source must produce from its
getNextAudioBlock() callback.
*/
void setNumberOfChannelsToProduce (int requiredNumberOfChannels);
/** Clears any mapped channels.
After this, no channels are mapped, so this object will produce silence. Create
some mappings with setInputChannelMapping() and setOutputChannelMapping().
*/
void clearAllMappings();
/** Creates an input channel mapping.
When the getNextAudioBlock() method is called, the data in channel sourceChannelIndex of the incoming
data will be sent to destChannelIndex of our input source.
@param destChannelIndex the index of an input channel in our input audio source (i.e. the
source specified when this object was created).
@param sourceChannelIndex the index of the input channel in the incoming audio data buffer
during our getNextAudioBlock() callback
*/
void setInputChannelMapping (int destChannelIndex,
int sourceChannelIndex);
/** Creates an output channel mapping.
When the getNextAudioBlock() method is called, the data returned in channel sourceChannelIndex by
our input audio source will be copied to channel destChannelIndex of the final buffer.
@param sourceChannelIndex the index of an output channel coming from our input audio source
(i.e. the source specified when this object was created).
@param destChannelIndex the index of the output channel in the incoming audio data buffer
during our getNextAudioBlock() callback
*/
void setOutputChannelMapping (int sourceChannelIndex,
int destChannelIndex);
/** Returns the channel from our input that will be sent to channel inputChannelIndex of
our input audio source.
*/
int getRemappedInputChannel (int inputChannelIndex) const;
/** Returns the output channel to which channel outputChannelIndex of our input audio
source will be sent to.
*/
int getRemappedOutputChannel (int outputChannelIndex) const;
//==============================================================================
/** Returns an XML object to encapsulate the state of the mappings.
@see restoreFromXml
*/
std::unique_ptr<XmlElement> createXml() const;
/** Restores the mappings from an XML object created by createXML().
@see createXml
*/
void restoreFromXml (const XmlElement&);
//==============================================================================
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
void releaseResources() override;
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
private:
//==============================================================================
OptionalScopedPointer<AudioSource> source;
Array<int> remappedInputs, remappedOutputs;
int requiredNumberOfChannels;
AudioBuffer<float> buffer;
AudioSourceChannelInfo remappedInfo;
CriticalSection lock;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (ChannelRemappingAudioSource)
};
} // namespace juce

+ 0
- 80
libs/juce7/source/modules/juce_audio_basics/sources/juce_IIRFilterAudioSource.cpp View File

@@ -1,80 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
IIRFilterAudioSource::IIRFilterAudioSource (AudioSource* const inputSource,
const bool deleteInputWhenDeleted)
: input (inputSource, deleteInputWhenDeleted)
{
jassert (inputSource != nullptr);
for (int i = 2; --i >= 0;)
iirFilters.add (new IIRFilter());
}
IIRFilterAudioSource::~IIRFilterAudioSource() {}
//==============================================================================
void IIRFilterAudioSource::setCoefficients (const IIRCoefficients& newCoefficients)
{
for (int i = iirFilters.size(); --i >= 0;)
iirFilters.getUnchecked(i)->setCoefficients (newCoefficients);
}
void IIRFilterAudioSource::makeInactive()
{
for (int i = iirFilters.size(); --i >= 0;)
iirFilters.getUnchecked(i)->makeInactive();
}
//==============================================================================
void IIRFilterAudioSource::prepareToPlay (int samplesPerBlockExpected, double sampleRate)
{
input->prepareToPlay (samplesPerBlockExpected, sampleRate);
for (int i = iirFilters.size(); --i >= 0;)
iirFilters.getUnchecked(i)->reset();
}
void IIRFilterAudioSource::releaseResources()
{
input->releaseResources();
}
void IIRFilterAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill)
{
input->getNextAudioBlock (bufferToFill);
const int numChannels = bufferToFill.buffer->getNumChannels();
while (numChannels > iirFilters.size())
iirFilters.add (new IIRFilter (*iirFilters.getUnchecked (0)));
for (int i = 0; i < numChannels; ++i)
iirFilters.getUnchecked(i)
->processSamples (bufferToFill.buffer->getWritePointer (i, bufferToFill.startSample),
bufferToFill.numSamples);
}
} // namespace juce

+ 0
- 68
libs/juce7/source/modules/juce_audio_basics/sources/juce_IIRFilterAudioSource.h View File

@@ -1,68 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
An AudioSource that performs an IIR filter on another source.
@tags{Audio}
*/
class JUCE_API IIRFilterAudioSource : public AudioSource
{
public:
//==============================================================================
/** Creates a IIRFilterAudioSource for a given input source.
@param inputSource the input source to read from - this must not be null
@param deleteInputWhenDeleted if true, the input source will be deleted when
this object is deleted
*/
IIRFilterAudioSource (AudioSource* inputSource,
bool deleteInputWhenDeleted);
/** Destructor. */
~IIRFilterAudioSource() override;
//==============================================================================
/** Changes the filter to use the same parameters as the one being passed in. */
void setCoefficients (const IIRCoefficients& newCoefficients);
/** Calls IIRFilter::makeInactive() on all the filters being used internally. */
void makeInactive();
//==============================================================================
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
void releaseResources() override;
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
private:
//==============================================================================
OptionalScopedPointer<AudioSource> input;
OwnedArray<IIRFilter> iirFilters;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (IIRFilterAudioSource)
};
} // namespace juce

+ 0
- 261
libs/juce7/source/modules/juce_audio_basics/sources/juce_MemoryAudioSource.cpp View File

@@ -1,261 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MemoryAudioSource::MemoryAudioSource (AudioBuffer<float>& bufferToUse, bool copyMemory, bool shouldLoop)
: isCurrentlyLooping (shouldLoop)
{
if (copyMemory)
buffer.makeCopyOf (bufferToUse);
else
buffer.setDataToReferTo (bufferToUse.getArrayOfWritePointers(),
bufferToUse.getNumChannels(),
bufferToUse.getNumSamples());
}
//==============================================================================
void MemoryAudioSource::prepareToPlay (int /*samplesPerBlockExpected*/, double /*sampleRate*/)
{
position = 0;
}
void MemoryAudioSource::releaseResources() {}
void MemoryAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill)
{
if (buffer.getNumSamples() == 0)
{
bufferToFill.clearActiveBufferRegion();
return;
}
auto& dst = *bufferToFill.buffer;
auto channels = jmin (dst.getNumChannels(), buffer.getNumChannels());
int max = 0, pos = 0;
auto n = buffer.getNumSamples();
auto m = bufferToFill.numSamples;
int i = position;
for (; (i < n || isCurrentlyLooping) && (pos < m); i += max)
{
max = jmin (m - pos, n - (i % n));
int ch = 0;
for (; ch < channels; ++ch)
dst.copyFrom (ch, bufferToFill.startSample + pos, buffer, ch, i % n, max);
for (; ch < dst.getNumChannels(); ++ch)
dst.clear (ch, bufferToFill.startSample + pos, max);
pos += max;
}
if (pos < m)
dst.clear (bufferToFill.startSample + pos, m - pos);
position = i;
}
//==============================================================================
void MemoryAudioSource::setNextReadPosition (int64 newPosition)
{
position = (int) newPosition;
}
int64 MemoryAudioSource::getNextReadPosition() const
{
return position;
}
int64 MemoryAudioSource::getTotalLength() const
{
return buffer.getNumSamples();
}
//==============================================================================
bool MemoryAudioSource::isLooping() const
{
return isCurrentlyLooping;
}
void MemoryAudioSource::setLooping (bool shouldLoop)
{
isCurrentlyLooping = shouldLoop;
}
//==============================================================================
//==============================================================================
#if JUCE_UNIT_TESTS
static bool operator== (const AudioBuffer<float>& a, const AudioBuffer<float>& b)
{
if (a.getNumChannels() != b.getNumChannels())
return false;
for (int channel = 0; channel < a.getNumChannels(); ++channel)
{
auto* aPtr = a.getReadPointer (channel);
auto* bPtr = b.getReadPointer (channel);
if (std::vector<float> (aPtr, aPtr + a.getNumSamples())
!= std::vector<float> (bPtr, bPtr + b.getNumSamples()))
{
return false;
}
}
return true;
}
struct MemoryAudioSourceTests : public UnitTest
{
MemoryAudioSourceTests() : UnitTest ("MemoryAudioSource", UnitTestCategories::audio) {}
void runTest() override
{
constexpr int blockSize = 512;
AudioBuffer<float> bufferToFill { 2, blockSize };
AudioSourceChannelInfo channelInfo { bufferToFill };
beginTest ("A zero-length buffer produces silence, whether or not looping is enabled");
{
for (const bool enableLooping : { false, true })
{
AudioBuffer<float> buffer;
MemoryAudioSource source { buffer, true, false };
source.setLooping (enableLooping);
source.prepareToPlay (blockSize, 44100.0);
for (int i = 0; i < 2; ++i)
{
play (source, channelInfo);
expect (isSilent (bufferToFill));
}
}
}
beginTest ("A short buffer without looping is played once and followed by silence");
{
auto buffer = getShortBuffer();
MemoryAudioSource source { buffer, true, false };
source.setLooping (false);
source.prepareToPlay (blockSize, 44100.0);
play (source, channelInfo);
auto copy = buffer;
copy.setSize (buffer.getNumChannels(), blockSize, true, true, false);
expect (bufferToFill == copy);
play (source, channelInfo);
expect (isSilent (bufferToFill));
}
beginTest ("A short buffer with looping is played multiple times");
{
auto buffer = getShortBuffer();
MemoryAudioSource source { buffer, true, false };
source.setLooping (true);
source.prepareToPlay (blockSize, 44100.0);
play (source, channelInfo);
for (int sample = 0; sample < buffer.getNumSamples(); ++sample)
expect (bufferToFill.getSample (0, sample + buffer.getNumSamples()) == buffer.getSample (0, sample));
expect (! isSilent (bufferToFill));
}
beginTest ("A long buffer without looping is played once");
{
auto buffer = getLongBuffer();
MemoryAudioSource source { buffer, true, false };
source.setLooping (false);
source.prepareToPlay (blockSize, 44100.0);
play (source, channelInfo);
auto copy = buffer;
copy.setSize (buffer.getNumChannels(), blockSize, true, true, false);
expect (bufferToFill == copy);
for (int i = 0; i < 10; ++i)
play (source, channelInfo);
expect (isSilent (bufferToFill));
}
beginTest ("A long buffer with looping is played multiple times");
{
auto buffer = getLongBuffer();
MemoryAudioSource source { buffer, true, false };
source.setLooping (true);
source.prepareToPlay (blockSize, 44100.0);
for (int i = 0; i < 100; ++i)
{
play (source, channelInfo);
expect (bufferToFill.getSample (0, 0) == buffer.getSample (0, (i * blockSize) % buffer.getNumSamples()));
}
}
}
static AudioBuffer<float> getTestBuffer (int length)
{
AudioBuffer<float> buffer { 2, length };
for (int channel = 0; channel < buffer.getNumChannels(); ++channel)
for (int sample = 0; sample < buffer.getNumSamples(); ++sample)
buffer.setSample (channel, sample, jmap ((float) sample, 0.0f, (float) length, -1.0f, 1.0f));
return buffer;
}
static AudioBuffer<float> getShortBuffer() { return getTestBuffer (5); }
static AudioBuffer<float> getLongBuffer() { return getTestBuffer (1000); }
static void play (MemoryAudioSource& source, AudioSourceChannelInfo& info)
{
info.clearActiveBufferRegion();
source.getNextAudioBlock (info);
}
static bool isSilent (const AudioBuffer<float>& b)
{
for (int channel = 0; channel < b.getNumChannels(); ++channel)
if (b.findMinMax (channel, 0, b.getNumSamples()) != Range<float>{})
return false;
return true;
}
};
static MemoryAudioSourceTests memoryAudioSourceTests;
#endif
} // namespace juce

+ 0
- 82
libs/juce7/source/modules/juce_audio_basics/sources/juce_MemoryAudioSource.h View File

@@ -1,82 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
An AudioSource which takes some float audio data as an input.
@tags{Audio}
*/
class JUCE_API MemoryAudioSource : public PositionableAudioSource
{
public:
//==============================================================================
/** Creates a MemoryAudioSource by providing an audio buffer.
If copyMemory is true then the buffer will be copied into an internal
buffer which will be owned by the MemoryAudioSource. If copyMemory is
false, then you must ensure that the lifetime of the audio buffer is
at least as long as the MemoryAudioSource.
*/
MemoryAudioSource (AudioBuffer<float>& audioBuffer, bool copyMemory, bool shouldLoop = false);
//==============================================================================
/** Implementation of the AudioSource method. */
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
/** Implementation of the AudioSource method. */
void releaseResources() override;
/** Implementation of the AudioSource method. */
void getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill) override;
//==============================================================================
/** Implementation of the PositionableAudioSource method. */
void setNextReadPosition (int64 newPosition) override;
/** Implementation of the PositionableAudioSource method. */
int64 getNextReadPosition() const override;
/** Implementation of the PositionableAudioSource method. */
int64 getTotalLength() const override;
//==============================================================================
/** Implementation of the PositionableAudioSource method. */
bool isLooping() const override;
/** Implementation of the PositionableAudioSource method. */
void setLooping (bool shouldLoop) override;
private:
//==============================================================================
AudioBuffer<float> buffer;
int position = 0;
bool isCurrentlyLooping;
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MemoryAudioSource)
};
} // namespace juce

+ 0
- 158
libs/juce7/source/modules/juce_audio_basics/sources/juce_MixerAudioSource.cpp View File

@@ -1,158 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
MixerAudioSource::MixerAudioSource()
: currentSampleRate (0.0), bufferSizeExpected (0)
{
}
MixerAudioSource::~MixerAudioSource()
{
removeAllInputs();
}
//==============================================================================
void MixerAudioSource::addInputSource (AudioSource* input, const bool deleteWhenRemoved)
{
if (input != nullptr && ! inputs.contains (input))
{
double localRate;
int localBufferSize;
{
const ScopedLock sl (lock);
localRate = currentSampleRate;
localBufferSize = bufferSizeExpected;
}
if (localRate > 0.0)
input->prepareToPlay (localBufferSize, localRate);
const ScopedLock sl (lock);
inputsToDelete.setBit (inputs.size(), deleteWhenRemoved);
inputs.add (input);
}
}
void MixerAudioSource::removeInputSource (AudioSource* const input)
{
if (input != nullptr)
{
std::unique_ptr<AudioSource> toDelete;
{
const ScopedLock sl (lock);
const int index = inputs.indexOf (input);
if (index < 0)
return;
if (inputsToDelete [index])
toDelete.reset (input);
inputsToDelete.shiftBits (-1, index);
inputs.remove (index);
}
input->releaseResources();
}
}
void MixerAudioSource::removeAllInputs()
{
OwnedArray<AudioSource> toDelete;
{
const ScopedLock sl (lock);
for (int i = inputs.size(); --i >= 0;)
if (inputsToDelete[i])
toDelete.add (inputs.getUnchecked(i));
inputs.clear();
}
for (int i = toDelete.size(); --i >= 0;)
toDelete.getUnchecked(i)->releaseResources();
}
void MixerAudioSource::prepareToPlay (int samplesPerBlockExpected, double sampleRate)
{
tempBuffer.setSize (2, samplesPerBlockExpected);
const ScopedLock sl (lock);
currentSampleRate = sampleRate;
bufferSizeExpected = samplesPerBlockExpected;
for (int i = inputs.size(); --i >= 0;)
inputs.getUnchecked(i)->prepareToPlay (samplesPerBlockExpected, sampleRate);
}
void MixerAudioSource::releaseResources()
{
const ScopedLock sl (lock);
for (int i = inputs.size(); --i >= 0;)
inputs.getUnchecked(i)->releaseResources();
tempBuffer.setSize (2, 0);
currentSampleRate = 0;
bufferSizeExpected = 0;
}
void MixerAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& info)
{
const ScopedLock sl (lock);
if (inputs.size() > 0)
{
inputs.getUnchecked(0)->getNextAudioBlock (info);
if (inputs.size() > 1)
{
tempBuffer.setSize (jmax (1, info.buffer->getNumChannels()),
info.buffer->getNumSamples());
AudioSourceChannelInfo info2 (&tempBuffer, 0, info.numSamples);
for (int i = 1; i < inputs.size(); ++i)
{
inputs.getUnchecked(i)->getNextAudioBlock (info2);
for (int chan = 0; chan < info.buffer->getNumChannels(); ++chan)
info.buffer->addFrom (chan, info.startSample, tempBuffer, chan, 0, info.numSamples);
}
}
}
else
{
info.clearActiveBufferRegion();
}
}
} // namespace juce

+ 0
- 99
libs/juce7/source/modules/juce_audio_basics/sources/juce_MixerAudioSource.h View File

@@ -1,99 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
An AudioSource that mixes together the output of a set of other AudioSources.
Input sources can be added and removed while the mixer is running as long as their
prepareToPlay() and releaseResources() methods are called before and after adding
them to the mixer.
@tags{Audio}
*/
class JUCE_API MixerAudioSource : public AudioSource
{
public:
//==============================================================================
/** Creates a MixerAudioSource. */
MixerAudioSource();
/** Destructor. */
~MixerAudioSource() override;
//==============================================================================
/** Adds an input source to the mixer.
If the mixer is running you'll need to make sure that the input source
is ready to play by calling its prepareToPlay() method before adding it.
If the mixer is stopped, then its input sources will be automatically
prepared when the mixer's prepareToPlay() method is called.
@param newInput the source to add to the mixer
@param deleteWhenRemoved if true, then this source will be deleted when
no longer needed by the mixer.
*/
void addInputSource (AudioSource* newInput, bool deleteWhenRemoved);
/** Removes an input source.
If the source was added by calling addInputSource() with the deleteWhenRemoved
flag set, it will be deleted by this method.
*/
void removeInputSource (AudioSource* input);
/** Removes all the input sources.
Any sources which were added by calling addInputSource() with the deleteWhenRemoved
flag set will be deleted by this method.
*/
void removeAllInputs();
//==============================================================================
/** Implementation of the AudioSource method.
This will call prepareToPlay() on all its input sources.
*/
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
/** Implementation of the AudioSource method.
This will call releaseResources() on all its input sources.
*/
void releaseResources() override;
/** Implementation of the AudioSource method. */
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
private:
//==============================================================================
Array<AudioSource*> inputs;
BigInteger inputsToDelete;
CriticalSection lock;
AudioBuffer<float> tempBuffer;
double currentSampleRate;
int bufferSizeExpected;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MixerAudioSource)
};
} // namespace juce

+ 0
- 76
libs/juce7/source/modules/juce_audio_basics/sources/juce_PositionableAudioSource.h View File

@@ -1,76 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
A type of AudioSource which can be repositioned.
The basic AudioSource just streams continuously with no idea of a current
time or length, so the PositionableAudioSource is used for a finite stream
that has a current read position.
@see AudioSource, AudioTransportSource
@tags{Audio}
*/
class JUCE_API PositionableAudioSource : public AudioSource
{
protected:
//==============================================================================
/** Creates the PositionableAudioSource. */
PositionableAudioSource() = default;
public:
/** Destructor */
~PositionableAudioSource() override = default;
//==============================================================================
/** Tells the stream to move to a new position.
Calling this indicates that the next call to AudioSource::getNextAudioBlock()
should return samples from this position.
Note that this may be called on a different thread to getNextAudioBlock(),
so the subclass should make sure it's synchronised.
*/
virtual void setNextReadPosition (int64 newPosition) = 0;
/** Returns the position from which the next block will be returned.
@see setNextReadPosition
*/
virtual int64 getNextReadPosition() const = 0;
/** Returns the total length of the stream (in samples). */
virtual int64 getTotalLength() const = 0;
/** Returns true if this source is actually playing in a loop. */
virtual bool isLooping() const = 0;
/** Tells the source whether you'd like it to play in a loop. */
virtual void setLooping (bool shouldLoop) { ignoreUnused (shouldLoop); }
};
} // namespace juce

+ 0
- 265
libs/juce7/source/modules/juce_audio_basics/sources/juce_ResamplingAudioSource.cpp View File

@@ -1,265 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
ResamplingAudioSource::ResamplingAudioSource (AudioSource* const inputSource,
const bool deleteInputWhenDeleted,
const int channels)
: input (inputSource, deleteInputWhenDeleted),
numChannels (channels)
{
jassert (input != nullptr);
zeromem (coefficients, sizeof (coefficients));
}
ResamplingAudioSource::~ResamplingAudioSource() {}
void ResamplingAudioSource::setResamplingRatio (const double samplesInPerOutputSample)
{
jassert (samplesInPerOutputSample > 0);
const SpinLock::ScopedLockType sl (ratioLock);
ratio = jmax (0.0, samplesInPerOutputSample);
}
void ResamplingAudioSource::prepareToPlay (int samplesPerBlockExpected, double sampleRate)
{
const SpinLock::ScopedLockType sl (ratioLock);
auto scaledBlockSize = roundToInt (samplesPerBlockExpected * ratio);
input->prepareToPlay (scaledBlockSize, sampleRate * ratio);
buffer.setSize (numChannels, scaledBlockSize + 32);
filterStates.calloc (numChannels);
srcBuffers.calloc (numChannels);
destBuffers.calloc (numChannels);
createLowPass (ratio);
flushBuffers();
}
void ResamplingAudioSource::flushBuffers()
{
const ScopedLock sl (callbackLock);
buffer.clear();
bufferPos = 0;
sampsInBuffer = 0;
subSampleOffset = 0.0;
resetFilters();
}
void ResamplingAudioSource::releaseResources()
{
input->releaseResources();
buffer.setSize (numChannels, 0);
}
void ResamplingAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& info)
{
const ScopedLock sl (callbackLock);
double localRatio;
{
const SpinLock::ScopedLockType ratioSl (ratioLock);
localRatio = ratio;
}
if (lastRatio != localRatio)
{
createLowPass (localRatio);
lastRatio = localRatio;
}
const int sampsNeeded = roundToInt (info.numSamples * localRatio) + 3;
int bufferSize = buffer.getNumSamples();
if (bufferSize < sampsNeeded + 8)
{
bufferPos %= bufferSize;
bufferSize = sampsNeeded + 32;
buffer.setSize (buffer.getNumChannels(), bufferSize, true, true);
}
bufferPos %= bufferSize;
int endOfBufferPos = bufferPos + sampsInBuffer;
const int channelsToProcess = jmin (numChannels, info.buffer->getNumChannels());
while (sampsNeeded > sampsInBuffer)
{
endOfBufferPos %= bufferSize;
int numToDo = jmin (sampsNeeded - sampsInBuffer,
bufferSize - endOfBufferPos);
AudioSourceChannelInfo readInfo (&buffer, endOfBufferPos, numToDo);
input->getNextAudioBlock (readInfo);
if (localRatio > 1.0001)
{
// for down-sampling, pre-apply the filter..
for (int i = channelsToProcess; --i >= 0;)
applyFilter (buffer.getWritePointer (i, endOfBufferPos), numToDo, filterStates[i]);
}
sampsInBuffer += numToDo;
endOfBufferPos += numToDo;
}
for (int channel = 0; channel < channelsToProcess; ++channel)
{
destBuffers[channel] = info.buffer->getWritePointer (channel, info.startSample);
srcBuffers[channel] = buffer.getReadPointer (channel);
}
int nextPos = (bufferPos + 1) % bufferSize;
for (int m = info.numSamples; --m >= 0;)
{
jassert (sampsInBuffer > 0 && nextPos != endOfBufferPos);
const float alpha = (float) subSampleOffset;
for (int channel = 0; channel < channelsToProcess; ++channel)
*destBuffers[channel]++ = srcBuffers[channel][bufferPos]
+ alpha * (srcBuffers[channel][nextPos] - srcBuffers[channel][bufferPos]);
subSampleOffset += localRatio;
while (subSampleOffset >= 1.0)
{
if (++bufferPos >= bufferSize)
bufferPos = 0;
--sampsInBuffer;
nextPos = (bufferPos + 1) % bufferSize;
subSampleOffset -= 1.0;
}
}
if (localRatio < 0.9999)
{
// for up-sampling, apply the filter after transposing..
for (int i = channelsToProcess; --i >= 0;)
applyFilter (info.buffer->getWritePointer (i, info.startSample), info.numSamples, filterStates[i]);
}
else if (localRatio <= 1.0001 && info.numSamples > 0)
{
// if the filter's not currently being applied, keep it stoked with the last couple of samples to avoid discontinuities
for (int i = channelsToProcess; --i >= 0;)
{
const float* const endOfBuffer = info.buffer->getReadPointer (i, info.startSample + info.numSamples - 1);
FilterState& fs = filterStates[i];
if (info.numSamples > 1)
{
fs.y2 = fs.x2 = *(endOfBuffer - 1);
}
else
{
fs.y2 = fs.y1;
fs.x2 = fs.x1;
}
fs.y1 = fs.x1 = *endOfBuffer;
}
}
jassert (sampsInBuffer >= 0);
}
void ResamplingAudioSource::createLowPass (const double frequencyRatio)
{
const double proportionalRate = (frequencyRatio > 1.0) ? 0.5 / frequencyRatio
: 0.5 * frequencyRatio;
const double n = 1.0 / std::tan (MathConstants<double>::pi * jmax (0.001, proportionalRate));
const double nSquared = n * n;
const double c1 = 1.0 / (1.0 + MathConstants<double>::sqrt2 * n + nSquared);
setFilterCoefficients (c1,
c1 * 2.0f,
c1,
1.0,
c1 * 2.0 * (1.0 - nSquared),
c1 * (1.0 - MathConstants<double>::sqrt2 * n + nSquared));
}
void ResamplingAudioSource::setFilterCoefficients (double c1, double c2, double c3, double c4, double c5, double c6)
{
const double a = 1.0 / c4;
c1 *= a;
c2 *= a;
c3 *= a;
c5 *= a;
c6 *= a;
coefficients[0] = c1;
coefficients[1] = c2;
coefficients[2] = c3;
coefficients[3] = c4;
coefficients[4] = c5;
coefficients[5] = c6;
}
void ResamplingAudioSource::resetFilters()
{
if (filterStates != nullptr)
filterStates.clear ((size_t) numChannels);
}
void ResamplingAudioSource::applyFilter (float* samples, int num, FilterState& fs)
{
while (--num >= 0)
{
const double in = *samples;
double out = coefficients[0] * in
+ coefficients[1] * fs.x1
+ coefficients[2] * fs.x2
- coefficients[4] * fs.y1
- coefficients[5] * fs.y2;
#if JUCE_INTEL
if (! (out < -1.0e-8 || out > 1.0e-8))
out = 0;
#endif
fs.x2 = fs.x1;
fs.x1 = in;
fs.y2 = fs.y1;
fs.y1 = out;
*samples++ = (float) out;
}
}
} // namespace juce

+ 0
- 106
libs/juce7/source/modules/juce_audio_basics/sources/juce_ResamplingAudioSource.h View File

@@ -1,106 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
A type of AudioSource that takes an input source and changes its sample rate.
@see AudioSource, LagrangeInterpolator, CatmullRomInterpolator
@tags{Audio}
*/
class JUCE_API ResamplingAudioSource : public AudioSource
{
public:
//==============================================================================
/** Creates a ResamplingAudioSource for a given input source.
@param inputSource the input source to read from
@param deleteInputWhenDeleted if true, the input source will be deleted when
this object is deleted
@param numChannels the number of channels to process
*/
ResamplingAudioSource (AudioSource* inputSource,
bool deleteInputWhenDeleted,
int numChannels = 2);
/** Destructor. */
~ResamplingAudioSource() override;
/** Changes the resampling ratio.
(This value can be changed at any time, even while the source is running).
@param samplesInPerOutputSample if set to 1.0, the input is passed through; higher
values will speed it up; lower values will slow it
down. The ratio must be greater than 0
*/
void setResamplingRatio (double samplesInPerOutputSample);
/** Returns the current resampling ratio.
This is the value that was set by setResamplingRatio().
*/
double getResamplingRatio() const noexcept { return ratio; }
/** Clears any buffers and filters that the resampler is using. */
void flushBuffers();
//==============================================================================
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
void releaseResources() override;
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
private:
//==============================================================================
OptionalScopedPointer<AudioSource> input;
double ratio = 1.0, lastRatio = 1.0;
AudioBuffer<float> buffer;
int bufferPos = 0, sampsInBuffer = 0;
double subSampleOffset = 0.0;
double coefficients[6];
SpinLock ratioLock;
CriticalSection callbackLock;
const int numChannels;
HeapBlock<float*> destBuffers;
HeapBlock<const float*> srcBuffers;
void setFilterCoefficients (double c1, double c2, double c3, double c4, double c5, double c6);
void createLowPass (double proportionalRate);
struct FilterState
{
double x1, x2, y1, y2;
};
HeapBlock<FilterState> filterStates;
void resetFilters();
void applyFilter (float* samples, int num, FilterState& fs);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (ResamplingAudioSource)
};
} // namespace juce

+ 0
- 83
libs/juce7/source/modules/juce_audio_basics/sources/juce_ReverbAudioSource.cpp View File

@@ -1,83 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
ReverbAudioSource::ReverbAudioSource (AudioSource* const inputSource, const bool deleteInputWhenDeleted)
: input (inputSource, deleteInputWhenDeleted),
bypass (false)
{
jassert (inputSource != nullptr);
}
ReverbAudioSource::~ReverbAudioSource() {}
void ReverbAudioSource::prepareToPlay (int samplesPerBlockExpected, double sampleRate)
{
const ScopedLock sl (lock);
input->prepareToPlay (samplesPerBlockExpected, sampleRate);
reverb.setSampleRate (sampleRate);
}
void ReverbAudioSource::releaseResources() {}
void ReverbAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& bufferToFill)
{
const ScopedLock sl (lock);
input->getNextAudioBlock (bufferToFill);
if (! bypass)
{
float* const firstChannel = bufferToFill.buffer->getWritePointer (0, bufferToFill.startSample);
if (bufferToFill.buffer->getNumChannels() > 1)
{
reverb.processStereo (firstChannel,
bufferToFill.buffer->getWritePointer (1, bufferToFill.startSample),
bufferToFill.numSamples);
}
else
{
reverb.processMono (firstChannel, bufferToFill.numSamples);
}
}
}
void ReverbAudioSource::setParameters (const Reverb::Parameters& newParams)
{
const ScopedLock sl (lock);
reverb.setParameters (newParams);
}
void ReverbAudioSource::setBypassed (bool b) noexcept
{
if (bypass != b)
{
const ScopedLock sl (lock);
bypass = b;
reverb.reset();
}
}
} // namespace juce

+ 0
- 74
libs/juce7/source/modules/juce_audio_basics/sources/juce_ReverbAudioSource.h View File

@@ -1,74 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
An AudioSource that uses the Reverb class to apply a reverb to another AudioSource.
@see Reverb
@tags{Audio}
*/
class JUCE_API ReverbAudioSource : public AudioSource
{
public:
/** Creates a ReverbAudioSource to process a given input source.
@param inputSource the input source to read from - this must not be null
@param deleteInputWhenDeleted if true, the input source will be deleted when
this object is deleted
*/
ReverbAudioSource (AudioSource* inputSource,
bool deleteInputWhenDeleted);
/** Destructor. */
~ReverbAudioSource() override;
//==============================================================================
/** Returns the parameters from the reverb. */
const Reverb::Parameters& getParameters() const noexcept { return reverb.getParameters(); }
/** Changes the reverb's parameters. */
void setParameters (const Reverb::Parameters& newParams);
void setBypassed (bool isBypassed) noexcept;
bool isBypassed() const noexcept { return bypass; }
//==============================================================================
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
void releaseResources() override;
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
private:
//==============================================================================
CriticalSection lock;
OptionalScopedPointer<AudioSource> input;
Reverb reverb;
std::atomic<bool> bypass;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (ReverbAudioSource)
};
} // namespace juce

+ 0
- 78
libs/juce7/source/modules/juce_audio_basics/sources/juce_ToneGeneratorAudioSource.cpp View File

@@ -1,78 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
ToneGeneratorAudioSource::ToneGeneratorAudioSource()
: frequency (1000.0),
sampleRate (44100.0),
currentPhase (0.0),
phasePerSample (0.0),
amplitude (0.5f)
{
}
ToneGeneratorAudioSource::~ToneGeneratorAudioSource()
{
}
//==============================================================================
void ToneGeneratorAudioSource::setAmplitude (const float newAmplitude)
{
amplitude = newAmplitude;
}
void ToneGeneratorAudioSource::setFrequency (const double newFrequencyHz)
{
frequency = newFrequencyHz;
phasePerSample = 0.0;
}
//==============================================================================
void ToneGeneratorAudioSource::prepareToPlay (int /*samplesPerBlockExpected*/, double rate)
{
currentPhase = 0.0;
phasePerSample = 0.0;
sampleRate = rate;
}
void ToneGeneratorAudioSource::releaseResources()
{
}
void ToneGeneratorAudioSource::getNextAudioBlock (const AudioSourceChannelInfo& info)
{
if (phasePerSample == 0.0)
phasePerSample = MathConstants<double>::twoPi / (sampleRate / frequency);
for (int i = 0; i < info.numSamples; ++i)
{
const float sample = amplitude * (float) std::sin (currentPhase);
currentPhase += phasePerSample;
for (int j = info.buffer->getNumChannels(); --j >= 0;)
info.buffer->setSample (j, info.startSample + i, sample);
}
}
} // namespace juce

+ 0
- 71
libs/juce7/source/modules/juce_audio_basics/sources/juce_ToneGeneratorAudioSource.h View File

@@ -1,71 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
A simple AudioSource that generates a sine wave.
@tags{Audio}
*/
class JUCE_API ToneGeneratorAudioSource : public AudioSource
{
public:
//==============================================================================
/** Creates a ToneGeneratorAudioSource. */
ToneGeneratorAudioSource();
/** Destructor. */
~ToneGeneratorAudioSource() override;
//==============================================================================
/** Sets the signal's amplitude. */
void setAmplitude (float newAmplitude);
/** Sets the signal's frequency. */
void setFrequency (double newFrequencyHz);
//==============================================================================
/** Implementation of the AudioSource method. */
void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
/** Implementation of the AudioSource method. */
void releaseResources() override;
/** Implementation of the AudioSource method. */
void getNextAudioBlock (const AudioSourceChannelInfo&) override;
private:
//==============================================================================
double frequency, sampleRate;
double currentPhase, phasePerSample;
float amplitude;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (ToneGeneratorAudioSource)
};
} // namespace juce

+ 0
- 585
libs/juce7/source/modules/juce_audio_basics/synthesisers/juce_Synthesiser.cpp View File

@@ -1,585 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
SynthesiserSound::SynthesiserSound() {}
SynthesiserSound::~SynthesiserSound() {}
//==============================================================================
SynthesiserVoice::SynthesiserVoice() {}
SynthesiserVoice::~SynthesiserVoice() {}
bool SynthesiserVoice::isPlayingChannel (const int midiChannel) const
{
return currentPlayingMidiChannel == midiChannel;
}
void SynthesiserVoice::setCurrentPlaybackSampleRate (const double newRate)
{
currentSampleRate = newRate;
}
bool SynthesiserVoice::isVoiceActive() const
{
return getCurrentlyPlayingNote() >= 0;
}
void SynthesiserVoice::clearCurrentNote()
{
currentlyPlayingNote = -1;
currentlyPlayingSound = nullptr;
currentPlayingMidiChannel = 0;
}
void SynthesiserVoice::aftertouchChanged (int) {}
void SynthesiserVoice::channelPressureChanged (int) {}
bool SynthesiserVoice::wasStartedBefore (const SynthesiserVoice& other) const noexcept
{
return noteOnTime < other.noteOnTime;
}
void SynthesiserVoice::renderNextBlock (AudioBuffer<double>& outputBuffer,
int startSample, int numSamples)
{
AudioBuffer<double> subBuffer (outputBuffer.getArrayOfWritePointers(),
outputBuffer.getNumChannels(),
startSample, numSamples);
tempBuffer.makeCopyOf (subBuffer, true);
renderNextBlock (tempBuffer, 0, numSamples);
subBuffer.makeCopyOf (tempBuffer, true);
}
//==============================================================================
Synthesiser::Synthesiser()
{
for (int i = 0; i < numElementsInArray (lastPitchWheelValues); ++i)
lastPitchWheelValues[i] = 0x2000;
}
Synthesiser::~Synthesiser()
{
}
//==============================================================================
SynthesiserVoice* Synthesiser::getVoice (const int index) const
{
const ScopedLock sl (lock);
return voices [index];
}
void Synthesiser::clearVoices()
{
const ScopedLock sl (lock);
voices.clear();
}
SynthesiserVoice* Synthesiser::addVoice (SynthesiserVoice* const newVoice)
{
const ScopedLock sl (lock);
newVoice->setCurrentPlaybackSampleRate (sampleRate);
return voices.add (newVoice);
}
void Synthesiser::removeVoice (const int index)
{
const ScopedLock sl (lock);
voices.remove (index);
}
void Synthesiser::clearSounds()
{
const ScopedLock sl (lock);
sounds.clear();
}
SynthesiserSound* Synthesiser::addSound (const SynthesiserSound::Ptr& newSound)
{
const ScopedLock sl (lock);
return sounds.add (newSound);
}
void Synthesiser::removeSound (const int index)
{
const ScopedLock sl (lock);
sounds.remove (index);
}
void Synthesiser::setNoteStealingEnabled (const bool shouldSteal)
{
shouldStealNotes = shouldSteal;
}
void Synthesiser::setMinimumRenderingSubdivisionSize (int numSamples, bool shouldBeStrict) noexcept
{
jassert (numSamples > 0); // it wouldn't make much sense for this to be less than 1
minimumSubBlockSize = numSamples;
subBlockSubdivisionIsStrict = shouldBeStrict;
}
//==============================================================================
void Synthesiser::setCurrentPlaybackSampleRate (const double newRate)
{
if (sampleRate != newRate)
{
const ScopedLock sl (lock);
allNotesOff (0, false);
sampleRate = newRate;
for (auto* voice : voices)
voice->setCurrentPlaybackSampleRate (newRate);
}
}
template <typename floatType>
void Synthesiser::processNextBlock (AudioBuffer<floatType>& outputAudio,
const MidiBuffer& midiData,
int startSample,
int numSamples)
{
// must set the sample rate before using this!
jassert (sampleRate != 0);
const int targetChannels = outputAudio.getNumChannels();
auto midiIterator = midiData.findNextSamplePosition (startSample);
bool firstEvent = true;
const ScopedLock sl (lock);
for (; numSamples > 0; ++midiIterator)
{
if (midiIterator == midiData.cend())
{
if (targetChannels > 0)
renderVoices (outputAudio, startSample, numSamples);
return;
}
const auto metadata = *midiIterator;
const int samplesToNextMidiMessage = metadata.samplePosition - startSample;
if (samplesToNextMidiMessage >= numSamples)
{
if (targetChannels > 0)
renderVoices (outputAudio, startSample, numSamples);
handleMidiEvent (metadata.getMessage());
break;
}
if (samplesToNextMidiMessage < ((firstEvent && ! subBlockSubdivisionIsStrict) ? 1 : minimumSubBlockSize))
{
handleMidiEvent (metadata.getMessage());
continue;
}
firstEvent = false;
if (targetChannels > 0)
renderVoices (outputAudio, startSample, samplesToNextMidiMessage);
handleMidiEvent (metadata.getMessage());
startSample += samplesToNextMidiMessage;
numSamples -= samplesToNextMidiMessage;
}
std::for_each (midiIterator,
midiData.cend(),
[&] (const MidiMessageMetadata& meta) { handleMidiEvent (meta.getMessage()); });
}
// explicit template instantiation
template void Synthesiser::processNextBlock<float> (AudioBuffer<float>&, const MidiBuffer&, int, int);
template void Synthesiser::processNextBlock<double> (AudioBuffer<double>&, const MidiBuffer&, int, int);
void Synthesiser::renderNextBlock (AudioBuffer<float>& outputAudio, const MidiBuffer& inputMidi,
int startSample, int numSamples)
{
processNextBlock (outputAudio, inputMidi, startSample, numSamples);
}
void Synthesiser::renderNextBlock (AudioBuffer<double>& outputAudio, const MidiBuffer& inputMidi,
int startSample, int numSamples)
{
processNextBlock (outputAudio, inputMidi, startSample, numSamples);
}
void Synthesiser::renderVoices (AudioBuffer<float>& buffer, int startSample, int numSamples)
{
for (auto* voice : voices)
voice->renderNextBlock (buffer, startSample, numSamples);
}
void Synthesiser::renderVoices (AudioBuffer<double>& buffer, int startSample, int numSamples)
{
for (auto* voice : voices)
voice->renderNextBlock (buffer, startSample, numSamples);
}
void Synthesiser::handleMidiEvent (const MidiMessage& m)
{
const int channel = m.getChannel();
if (m.isNoteOn())
{
noteOn (channel, m.getNoteNumber(), m.getFloatVelocity());
}
else if (m.isNoteOff())
{
noteOff (channel, m.getNoteNumber(), m.getFloatVelocity(), true);
}
else if (m.isAllNotesOff() || m.isAllSoundOff())
{
allNotesOff (channel, true);
}
else if (m.isPitchWheel())
{
const int wheelPos = m.getPitchWheelValue();
lastPitchWheelValues [channel - 1] = wheelPos;
handlePitchWheel (channel, wheelPos);
}
else if (m.isAftertouch())
{
handleAftertouch (channel, m.getNoteNumber(), m.getAfterTouchValue());
}
else if (m.isChannelPressure())
{
handleChannelPressure (channel, m.getChannelPressureValue());
}
else if (m.isController())
{
handleController (channel, m.getControllerNumber(), m.getControllerValue());
}
else if (m.isProgramChange())
{
handleProgramChange (channel, m.getProgramChangeNumber());
}
}
//==============================================================================
void Synthesiser::noteOn (const int midiChannel,
const int midiNoteNumber,
const float velocity)
{
const ScopedLock sl (lock);
for (auto* sound : sounds)
{
if (sound->appliesToNote (midiNoteNumber) && sound->appliesToChannel (midiChannel))
{
// If hitting a note that's still ringing, stop it first (it could be
// still playing because of the sustain or sostenuto pedal).
for (auto* voice : voices)
if (voice->getCurrentlyPlayingNote() == midiNoteNumber && voice->isPlayingChannel (midiChannel))
stopVoice (voice, 1.0f, true);
startVoice (findFreeVoice (sound, midiChannel, midiNoteNumber, shouldStealNotes),
sound, midiChannel, midiNoteNumber, velocity);
}
}
}
void Synthesiser::startVoice (SynthesiserVoice* const voice,
SynthesiserSound* const sound,
const int midiChannel,
const int midiNoteNumber,
const float velocity)
{
if (voice != nullptr && sound != nullptr)
{
if (voice->currentlyPlayingSound != nullptr)
voice->stopNote (0.0f, false);
voice->currentlyPlayingNote = midiNoteNumber;
voice->currentPlayingMidiChannel = midiChannel;
voice->noteOnTime = ++lastNoteOnCounter;
voice->currentlyPlayingSound = sound;
voice->setKeyDown (true);
voice->setSostenutoPedalDown (false);
voice->setSustainPedalDown (sustainPedalsDown[midiChannel]);
voice->startNote (midiNoteNumber, velocity, sound,
lastPitchWheelValues [midiChannel - 1]);
}
}
void Synthesiser::stopVoice (SynthesiserVoice* voice, float velocity, const bool allowTailOff)
{
jassert (voice != nullptr);
voice->stopNote (velocity, allowTailOff);
// the subclass MUST call clearCurrentNote() if it's not tailing off! RTFM for stopNote()!
jassert (allowTailOff || (voice->getCurrentlyPlayingNote() < 0 && voice->getCurrentlyPlayingSound() == nullptr));
}
void Synthesiser::noteOff (const int midiChannel,
const int midiNoteNumber,
const float velocity,
const bool allowTailOff)
{
const ScopedLock sl (lock);
for (auto* voice : voices)
{
if (voice->getCurrentlyPlayingNote() == midiNoteNumber
&& voice->isPlayingChannel (midiChannel))
{
if (auto sound = voice->getCurrentlyPlayingSound())
{
if (sound->appliesToNote (midiNoteNumber)
&& sound->appliesToChannel (midiChannel))
{
jassert (! voice->keyIsDown || voice->isSustainPedalDown() == sustainPedalsDown [midiChannel]);
voice->setKeyDown (false);
if (! (voice->isSustainPedalDown() || voice->isSostenutoPedalDown()))
stopVoice (voice, velocity, allowTailOff);
}
}
}
}
}
void Synthesiser::allNotesOff (const int midiChannel, const bool allowTailOff)
{
const ScopedLock sl (lock);
for (auto* voice : voices)
if (midiChannel <= 0 || voice->isPlayingChannel (midiChannel))
voice->stopNote (1.0f, allowTailOff);
sustainPedalsDown.clear();
}
void Synthesiser::handlePitchWheel (const int midiChannel, const int wheelValue)
{
const ScopedLock sl (lock);
for (auto* voice : voices)
if (midiChannel <= 0 || voice->isPlayingChannel (midiChannel))
voice->pitchWheelMoved (wheelValue);
}
void Synthesiser::handleController (const int midiChannel,
const int controllerNumber,
const int controllerValue)
{
switch (controllerNumber)
{
case 0x40: handleSustainPedal (midiChannel, controllerValue >= 64); break;
case 0x42: handleSostenutoPedal (midiChannel, controllerValue >= 64); break;
case 0x43: handleSoftPedal (midiChannel, controllerValue >= 64); break;
default: break;
}
const ScopedLock sl (lock);
for (auto* voice : voices)
if (midiChannel <= 0 || voice->isPlayingChannel (midiChannel))
voice->controllerMoved (controllerNumber, controllerValue);
}
void Synthesiser::handleAftertouch (int midiChannel, int midiNoteNumber, int aftertouchValue)
{
const ScopedLock sl (lock);
for (auto* voice : voices)
if (voice->getCurrentlyPlayingNote() == midiNoteNumber
&& (midiChannel <= 0 || voice->isPlayingChannel (midiChannel)))
voice->aftertouchChanged (aftertouchValue);
}
void Synthesiser::handleChannelPressure (int midiChannel, int channelPressureValue)
{
const ScopedLock sl (lock);
for (auto* voice : voices)
if (midiChannel <= 0 || voice->isPlayingChannel (midiChannel))
voice->channelPressureChanged (channelPressureValue);
}
void Synthesiser::handleSustainPedal (int midiChannel, bool isDown)
{
jassert (midiChannel > 0 && midiChannel <= 16);
const ScopedLock sl (lock);
if (isDown)
{
sustainPedalsDown.setBit (midiChannel);
for (auto* voice : voices)
if (voice->isPlayingChannel (midiChannel) && voice->isKeyDown())
voice->setSustainPedalDown (true);
}
else
{
for (auto* voice : voices)
{
if (voice->isPlayingChannel (midiChannel))
{
voice->setSustainPedalDown (false);
if (! (voice->isKeyDown() || voice->isSostenutoPedalDown()))
stopVoice (voice, 1.0f, true);
}
}
sustainPedalsDown.clearBit (midiChannel);
}
}
void Synthesiser::handleSostenutoPedal (int midiChannel, bool isDown)
{
jassert (midiChannel > 0 && midiChannel <= 16);
const ScopedLock sl (lock);
for (auto* voice : voices)
{
if (voice->isPlayingChannel (midiChannel))
{
if (isDown)
voice->setSostenutoPedalDown (true);
else if (voice->isSostenutoPedalDown())
stopVoice (voice, 1.0f, true);
}
}
}
void Synthesiser::handleSoftPedal (int midiChannel, bool /*isDown*/)
{
ignoreUnused (midiChannel);
jassert (midiChannel > 0 && midiChannel <= 16);
}
void Synthesiser::handleProgramChange (int midiChannel, int programNumber)
{
ignoreUnused (midiChannel, programNumber);
jassert (midiChannel > 0 && midiChannel <= 16);
}
//==============================================================================
SynthesiserVoice* Synthesiser::findFreeVoice (SynthesiserSound* soundToPlay,
int midiChannel, int midiNoteNumber,
const bool stealIfNoneAvailable) const
{
const ScopedLock sl (lock);
for (auto* voice : voices)
if ((! voice->isVoiceActive()) && voice->canPlaySound (soundToPlay))
return voice;
if (stealIfNoneAvailable)
return findVoiceToSteal (soundToPlay, midiChannel, midiNoteNumber);
return nullptr;
}
SynthesiserVoice* Synthesiser::findVoiceToSteal (SynthesiserSound* soundToPlay,
int /*midiChannel*/, int midiNoteNumber) const
{
// This voice-stealing algorithm applies the following heuristics:
// - Re-use the oldest notes first
// - Protect the lowest & topmost notes, even if sustained, but not if they've been released.
// apparently you are trying to render audio without having any voices...
jassert (! voices.isEmpty());
// These are the voices we want to protect (ie: only steal if unavoidable)
SynthesiserVoice* low = nullptr; // Lowest sounding note, might be sustained, but NOT in release phase
SynthesiserVoice* top = nullptr; // Highest sounding note, might be sustained, but NOT in release phase
// this is a list of voices we can steal, sorted by how long they've been running
Array<SynthesiserVoice*> usableVoices;
usableVoices.ensureStorageAllocated (voices.size());
for (auto* voice : voices)
{
if (voice->canPlaySound (soundToPlay))
{
jassert (voice->isVoiceActive()); // We wouldn't be here otherwise
usableVoices.add (voice);
// NB: Using a functor rather than a lambda here due to scare-stories about
// compilers generating code containing heap allocations..
struct Sorter
{
bool operator() (const SynthesiserVoice* a, const SynthesiserVoice* b) const noexcept { return a->wasStartedBefore (*b); }
};
std::sort (usableVoices.begin(), usableVoices.end(), Sorter());
if (! voice->isPlayingButReleased()) // Don't protect released notes
{
auto note = voice->getCurrentlyPlayingNote();
if (low == nullptr || note < low->getCurrentlyPlayingNote())
low = voice;
if (top == nullptr || note > top->getCurrentlyPlayingNote())
top = voice;
}
}
}
// Eliminate pathological cases (ie: only 1 note playing): we always give precedence to the lowest note(s)
if (top == low)
top = nullptr;
// The oldest note that's playing with the target pitch is ideal..
for (auto* voice : usableVoices)
if (voice->getCurrentlyPlayingNote() == midiNoteNumber)
return voice;
// Oldest voice that has been released (no finger on it and not held by sustain pedal)
for (auto* voice : usableVoices)
if (voice != low && voice != top && voice->isPlayingButReleased())
return voice;
// Oldest voice that doesn't have a finger on it:
for (auto* voice : usableVoices)
if (voice != low && voice != top && ! voice->isKeyDown())
return voice;
// Oldest voice that isn't protected
for (auto* voice : usableVoices)
if (voice != low && voice != top)
return voice;
// We've only got "protected" voices now: lowest note takes priority
jassert (low != nullptr);
// Duophonic synth: give priority to the bass note:
if (top != nullptr)
return top;
return low;
}
} // namespace juce

+ 0
- 637
libs/juce7/source/modules/juce_audio_basics/synthesisers/juce_Synthesiser.h View File

@@ -1,637 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
Describes one of the sounds that a Synthesiser can play.
A synthesiser can contain one or more sounds, and a sound can choose which
midi notes and channels can trigger it.
The SynthesiserSound is a passive class that just describes what the sound is -
the actual audio rendering for a sound is done by a SynthesiserVoice. This allows
more than one SynthesiserVoice to play the same sound at the same time.
@see Synthesiser, SynthesiserVoice
@tags{Audio}
*/
class JUCE_API SynthesiserSound : public ReferenceCountedObject
{
protected:
//==============================================================================
SynthesiserSound();
public:
/** Destructor. */
~SynthesiserSound() override;
//==============================================================================
/** Returns true if this sound should be played when a given midi note is pressed.
The Synthesiser will use this information when deciding which sounds to trigger
for a given note.
*/
virtual bool appliesToNote (int midiNoteNumber) = 0;
/** Returns true if the sound should be triggered by midi events on a given channel.
The Synthesiser will use this information when deciding which sounds to trigger
for a given note.
*/
virtual bool appliesToChannel (int midiChannel) = 0;
/** The class is reference-counted, so this is a handy pointer class for it. */
using Ptr = ReferenceCountedObjectPtr<SynthesiserSound>;
private:
//==============================================================================
JUCE_LEAK_DETECTOR (SynthesiserSound)
};
//==============================================================================
/**
Represents a voice that a Synthesiser can use to play a SynthesiserSound.
A voice plays a single sound at a time, and a synthesiser holds an array of
voices so that it can play polyphonically.
@see Synthesiser, SynthesiserSound
@tags{Audio}
*/
class JUCE_API SynthesiserVoice
{
public:
//==============================================================================
/** Creates a voice. */
SynthesiserVoice();
/** Destructor. */
virtual ~SynthesiserVoice();
//==============================================================================
/** Returns the midi note that this voice is currently playing.
Returns a value less than 0 if no note is playing.
*/
int getCurrentlyPlayingNote() const noexcept { return currentlyPlayingNote; }
/** Returns the sound that this voice is currently playing.
Returns nullptr if it's not playing.
*/
SynthesiserSound::Ptr getCurrentlyPlayingSound() const noexcept { return currentlyPlayingSound; }
/** Must return true if this voice object is capable of playing the given sound.
If there are different classes of sound, and different classes of voice, a voice can
choose which ones it wants to take on.
A typical implementation of this method may just return true if there's only one type
of voice and sound, or it might check the type of the sound object passed-in and
see if it's one that it understands.
*/
virtual bool canPlaySound (SynthesiserSound*) = 0;
/** Called to start a new note.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void startNote (int midiNoteNumber,
float velocity,
SynthesiserSound* sound,
int currentPitchWheelPosition) = 0;
/** Called to stop a note.
This will be called during the rendering callback, so must be fast and thread-safe.
The velocity indicates how quickly the note was released - 0 is slowly, 1 is quickly.
If allowTailOff is false or the voice doesn't want to tail-off, then it must stop all
sound immediately, and must call clearCurrentNote() to reset the state of this voice
and allow the synth to reassign it another sound.
If allowTailOff is true and the voice decides to do a tail-off, then it's allowed to
begin fading out its sound, and it can stop playing until it's finished. As soon as it
finishes playing (during the rendering callback), it must make sure that it calls
clearCurrentNote().
*/
virtual void stopNote (float velocity, bool allowTailOff) = 0;
/** Returns true if this voice is currently busy playing a sound.
By default this just checks the getCurrentlyPlayingNote() value, but can
be overridden for more advanced checking.
*/
virtual bool isVoiceActive() const;
/** Called to let the voice know that the pitch wheel has been moved.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void pitchWheelMoved (int newPitchWheelValue) = 0;
/** Called to let the voice know that a midi controller has been moved.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void controllerMoved (int controllerNumber, int newControllerValue) = 0;
/** Called to let the voice know that the aftertouch has changed.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void aftertouchChanged (int newAftertouchValue);
/** Called to let the voice know that the channel pressure has changed.
This will be called during the rendering callback, so must be fast and thread-safe.
*/
virtual void channelPressureChanged (int newChannelPressureValue);
//==============================================================================
/** Renders the next block of data for this voice.
The output audio data must be added to the current contents of the buffer provided.
Only the region of the buffer between startSample and (startSample + numSamples)
should be altered by this method.
If the voice is currently silent, it should just return without doing anything.
If the sound that the voice is playing finishes during the course of this rendered
block, it must call clearCurrentNote(), to tell the synthesiser that it has finished.
The size of the blocks that are rendered can change each time it is called, and may
involve rendering as little as 1 sample at a time. In between rendering callbacks,
the voice's methods will be called to tell it about note and controller events.
*/
virtual void renderNextBlock (AudioBuffer<float>& outputBuffer,
int startSample,
int numSamples) = 0;
/** A double-precision version of renderNextBlock() */
virtual void renderNextBlock (AudioBuffer<double>& outputBuffer,
int startSample,
int numSamples);
/** Changes the voice's reference sample rate.
The rate is set so that subclasses know the output rate and can set their pitch
accordingly.
This method is called by the synth, and subclasses can access the current rate with
the currentSampleRate member.
*/
virtual void setCurrentPlaybackSampleRate (double newRate);
/** Returns true if the voice is currently playing a sound which is mapped to the given
midi channel.
If it's not currently playing, this will return false.
*/
virtual bool isPlayingChannel (int midiChannel) const;
/** Returns the current target sample rate at which rendering is being done.
Subclasses may need to know this so that they can pitch things correctly.
*/
double getSampleRate() const noexcept { return currentSampleRate; }
/** Returns true if the key that triggered this voice is still held down.
Note that the voice may still be playing after the key was released (e.g because the
sostenuto pedal is down).
*/
bool isKeyDown() const noexcept { return keyIsDown; }
/** Allows you to modify the flag indicating that the key that triggered this voice is still held down.
@see isKeyDown
*/
void setKeyDown (bool isNowDown) noexcept { keyIsDown = isNowDown; }
/** Returns true if the sustain pedal is currently active for this voice. */
bool isSustainPedalDown() const noexcept { return sustainPedalDown; }
/** Modifies the sustain pedal flag. */
void setSustainPedalDown (bool isNowDown) noexcept { sustainPedalDown = isNowDown; }
/** Returns true if the sostenuto pedal is currently active for this voice. */
bool isSostenutoPedalDown() const noexcept { return sostenutoPedalDown; }
/** Modifies the sostenuto pedal flag. */
void setSostenutoPedalDown (bool isNowDown) noexcept { sostenutoPedalDown = isNowDown; }
/** Returns true if a voice is sounding in its release phase **/
bool isPlayingButReleased() const noexcept
{
return isVoiceActive() && ! (isKeyDown() || isSostenutoPedalDown() || isSustainPedalDown());
}
/** Returns true if this voice started playing its current note before the other voice did. */
bool wasStartedBefore (const SynthesiserVoice& other) const noexcept;
protected:
/** Resets the state of this voice after a sound has finished playing.
The subclass must call this when it finishes playing a note and becomes available
to play new ones.
It must either call it in the stopNote() method, or if the voice is tailing off,
then it should call it later during the renderNextBlock method, as soon as it
finishes its tail-off.
It can also be called at any time during the render callback if the sound happens
to have finished, e.g. if it's playing a sample and the sample finishes.
*/
void clearCurrentNote();
private:
//==============================================================================
friend class Synthesiser;
double currentSampleRate = 44100.0;
int currentlyPlayingNote = -1, currentPlayingMidiChannel = 0;
uint32 noteOnTime = 0;
SynthesiserSound::Ptr currentlyPlayingSound;
bool keyIsDown = false, sustainPedalDown = false, sostenutoPedalDown = false;
AudioBuffer<float> tempBuffer;
JUCE_LEAK_DETECTOR (SynthesiserVoice)
};
//==============================================================================
/**
Base class for a musical device that can play sounds.
To create a synthesiser, you'll need to create a subclass of SynthesiserSound
to describe each sound available to your synth, and a subclass of SynthesiserVoice
which can play back one of these sounds.
Then you can use the addVoice() and addSound() methods to give the synthesiser a
set of sounds, and a set of voices it can use to play them. If you only give it
one voice it will be monophonic - the more voices it has, the more polyphony it'll
have available.
Then repeatedly call the renderNextBlock() method to produce the audio. Any midi
events that go in will be scanned for note on/off messages, and these are used to
start and stop the voices playing the appropriate sounds.
While it's playing, you can also cause notes to be triggered by calling the noteOn(),
noteOff() and other controller methods.
Before rendering, be sure to call the setCurrentPlaybackSampleRate() to tell it
what the target playback rate is. This value is passed on to the voices so that
they can pitch their output correctly.
@tags{Audio}
*/
class JUCE_API Synthesiser
{
public:
//==============================================================================
/** Creates a new synthesiser.
You'll need to add some sounds and voices before it'll make any sound.
*/
Synthesiser();
/** Destructor. */
virtual ~Synthesiser();
//==============================================================================
/** Deletes all voices. */
void clearVoices();
/** Returns the number of voices that have been added. */
int getNumVoices() const noexcept { return voices.size(); }
/** Returns one of the voices that have been added. */
SynthesiserVoice* getVoice (int index) const;
/** Adds a new voice to the synth.
All the voices should be the same class of object and are treated equally.
The object passed in will be managed by the synthesiser, which will delete
it later on when no longer needed. The caller should not retain a pointer to the
voice.
*/
SynthesiserVoice* addVoice (SynthesiserVoice* newVoice);
/** Deletes one of the voices. */
void removeVoice (int index);
//==============================================================================
/** Deletes all sounds. */
void clearSounds();
/** Returns the number of sounds that have been added to the synth. */
int getNumSounds() const noexcept { return sounds.size(); }
/** Returns one of the sounds. */
SynthesiserSound::Ptr getSound (int index) const noexcept { return sounds[index]; }
/** Adds a new sound to the synthesiser.
The object passed in is reference counted, so will be deleted when the
synthesiser and all voices are no longer using it.
*/
SynthesiserSound* addSound (const SynthesiserSound::Ptr& newSound);
/** Removes and deletes one of the sounds. */
void removeSound (int index);
//==============================================================================
/** If set to true, then the synth will try to take over an existing voice if
it runs out and needs to play another note.
The value of this boolean is passed into findFreeVoice(), so the result will
depend on the implementation of this method.
*/
void setNoteStealingEnabled (bool shouldStealNotes);
/** Returns true if note-stealing is enabled.
@see setNoteStealingEnabled
*/
bool isNoteStealingEnabled() const noexcept { return shouldStealNotes; }
//==============================================================================
/** Triggers a note-on event.
The default method here will find all the sounds that want to be triggered by
this note/channel. For each sound, it'll try to find a free voice, and use the
voice to start playing the sound.
Subclasses might want to override this if they need a more complex algorithm.
This method will be called automatically according to the midi data passed into
renderNextBlock(), but may be called explicitly too.
The midiChannel parameter is the channel, between 1 and 16 inclusive.
*/
virtual void noteOn (int midiChannel,
int midiNoteNumber,
float velocity);
/** Triggers a note-off event.
This will turn off any voices that are playing a sound for the given note/channel.
If allowTailOff is true, the voices will be allowed to fade out the notes gracefully
(if they can do). If this is false, the notes will all be cut off immediately.
This method will be called automatically according to the midi data passed into
renderNextBlock(), but may be called explicitly too.
The midiChannel parameter is the channel, between 1 and 16 inclusive.
*/
virtual void noteOff (int midiChannel,
int midiNoteNumber,
float velocity,
bool allowTailOff);
/** Turns off all notes.
This will turn off any voices that are playing a sound on the given midi channel.
If midiChannel is 0 or less, then all voices will be turned off, regardless of
which channel they're playing. Otherwise it represents a valid midi channel, from
1 to 16 inclusive.
If allowTailOff is true, the voices will be allowed to fade out the notes gracefully
(if they can do). If this is false, the notes will all be cut off immediately.
This method will be called automatically according to the midi data passed into
renderNextBlock(), but may be called explicitly too.
*/
virtual void allNotesOff (int midiChannel,
bool allowTailOff);
/** Sends a pitch-wheel message to any active voices.
This will send a pitch-wheel message to any voices that are playing sounds on
the given midi channel.
This method will be called automatically according to the midi data passed into
renderNextBlock(), but may be called explicitly too.
@param midiChannel the midi channel, from 1 to 16 inclusive
@param wheelValue the wheel position, from 0 to 0x3fff, as returned by MidiMessage::getPitchWheelValue()
*/
virtual void handlePitchWheel (int midiChannel,
int wheelValue);
/** Sends a midi controller message to any active voices.
This will send a midi controller message to any voices that are playing sounds on
the given midi channel.
This method will be called automatically according to the midi data passed into
renderNextBlock(), but may be called explicitly too.
@param midiChannel the midi channel, from 1 to 16 inclusive
@param controllerNumber the midi controller type, as returned by MidiMessage::getControllerNumber()
@param controllerValue the midi controller value, between 0 and 127, as returned by MidiMessage::getControllerValue()
*/
virtual void handleController (int midiChannel,
int controllerNumber,
int controllerValue);
/** Sends an aftertouch message.
This will send an aftertouch message to any voices that are playing sounds on
the given midi channel and note number.
This method will be called automatically according to the midi data passed into
renderNextBlock(), but may be called explicitly too.
@param midiChannel the midi channel, from 1 to 16 inclusive
@param midiNoteNumber the midi note number, 0 to 127
@param aftertouchValue the aftertouch value, between 0 and 127,
as returned by MidiMessage::getAftertouchValue()
*/
virtual void handleAftertouch (int midiChannel, int midiNoteNumber, int aftertouchValue);
/** Sends a channel pressure message.
This will send a channel pressure message to any voices that are playing sounds on
the given midi channel.
This method will be called automatically according to the midi data passed into
renderNextBlock(), but may be called explicitly too.
@param midiChannel the midi channel, from 1 to 16 inclusive
@param channelPressureValue the pressure value, between 0 and 127, as returned
by MidiMessage::getChannelPressureValue()
*/
virtual void handleChannelPressure (int midiChannel, int channelPressureValue);
/** Handles a sustain pedal event. */
virtual void handleSustainPedal (int midiChannel, bool isDown);
/** Handles a sostenuto pedal event. */
virtual void handleSostenutoPedal (int midiChannel, bool isDown);
/** Can be overridden to handle soft pedal events. */
virtual void handleSoftPedal (int midiChannel, bool isDown);
/** Can be overridden to handle an incoming program change message.
The base class implementation of this has no effect, but you may want to make your
own synth react to program changes.
*/
virtual void handleProgramChange (int midiChannel,
int programNumber);
//==============================================================================
/** Tells the synthesiser what the sample rate is for the audio it's being used to render.
This value is propagated to the voices so that they can use it to render the correct
pitches.
*/
virtual void setCurrentPlaybackSampleRate (double sampleRate);
/** Creates the next block of audio output.
This will process the next numSamples of data from all the voices, and add that output
to the audio block supplied, starting from the offset specified. Note that the
data will be added to the current contents of the buffer, so you should clear it
before calling this method if necessary.
The midi events in the inputMidi buffer are parsed for note and controller events,
and these are used to trigger the voices. Note that the startSample offset applies
both to the audio output buffer and the midi input buffer, so any midi events
with timestamps outside the specified region will be ignored.
*/
void renderNextBlock (AudioBuffer<float>& outputAudio,
const MidiBuffer& inputMidi,
int startSample,
int numSamples);
void renderNextBlock (AudioBuffer<double>& outputAudio,
const MidiBuffer& inputMidi,
int startSample,
int numSamples);
/** Returns the current target sample rate at which rendering is being done.
Subclasses may need to know this so that they can pitch things correctly.
*/
double getSampleRate() const noexcept { return sampleRate; }
/** Sets a minimum limit on the size to which audio sub-blocks will be divided when rendering.
When rendering, the audio blocks that are passed into renderNextBlock() will be split up
into smaller blocks that lie between all the incoming midi messages, and it is these smaller
sub-blocks that are rendered with multiple calls to renderVoices().
Obviously in a pathological case where there are midi messages on every sample, then
renderVoices() could be called once per sample and lead to poor performance, so this
setting allows you to set a lower limit on the block size.
The default setting is 32, which means that midi messages are accurate to about < 1ms
accuracy, which is probably fine for most purposes, but you may want to increase or
decrease this value for your synth.
If shouldBeStrict is true, the audio sub-blocks will strictly never be smaller than numSamples.
If shouldBeStrict is false (default), the first audio sub-block in the buffer is allowed
to be smaller, to make sure that the first MIDI event in a buffer will always be sample-accurate
(this can sometimes help to avoid quantisation or phasing issues).
*/
void setMinimumRenderingSubdivisionSize (int numSamples, bool shouldBeStrict = false) noexcept;
protected:
//==============================================================================
/** This is used to control access to the rendering callback and the note trigger methods. */
CriticalSection lock;
OwnedArray<SynthesiserVoice> voices;
ReferenceCountedArray<SynthesiserSound> sounds;
/** The last pitch-wheel values for each midi channel. */
int lastPitchWheelValues [16];
/** Renders the voices for the given range.
By default this just calls renderNextBlock() on each voice, but you may need
to override it to handle custom cases.
*/
virtual void renderVoices (AudioBuffer<float>& outputAudio,
int startSample, int numSamples);
virtual void renderVoices (AudioBuffer<double>& outputAudio,
int startSample, int numSamples);
/** Searches through the voices to find one that's not currently playing, and
which can play the given sound.
Returns nullptr if all voices are busy and stealing isn't enabled.
To implement a custom note-stealing algorithm, you can either override this
method, or (preferably) override findVoiceToSteal().
*/
virtual SynthesiserVoice* findFreeVoice (SynthesiserSound* soundToPlay,
int midiChannel,
int midiNoteNumber,
bool stealIfNoneAvailable) const;
/** Chooses a voice that is most suitable for being re-used.
The default method will attempt to find the oldest voice that isn't the
bottom or top note being played. If that's not suitable for your synth,
you can override this method and do something more cunning instead.
*/
virtual SynthesiserVoice* findVoiceToSteal (SynthesiserSound* soundToPlay,
int midiChannel,
int midiNoteNumber) const;
/** Starts a specified voice playing a particular sound.
You'll probably never need to call this, it's used internally by noteOn(), but
may be needed by subclasses for custom behaviours.
*/
void startVoice (SynthesiserVoice* voice,
SynthesiserSound* sound,
int midiChannel,
int midiNoteNumber,
float velocity);
/** Stops a given voice.
You should never need to call this, it's used internally by noteOff, but is protected
in case it's useful for some custom subclasses. It basically just calls through to
SynthesiserVoice::stopNote(), and has some assertions to sanity-check a few things.
*/
void stopVoice (SynthesiserVoice*, float velocity, bool allowTailOff);
/** Can be overridden to do custom handling of incoming midi events. */
virtual void handleMidiEvent (const MidiMessage&);
private:
//==============================================================================
double sampleRate = 0;
uint32 lastNoteOnCounter = 0;
int minimumSubBlockSize = 32;
bool subBlockSubdivisionIsStrict = false;
bool shouldStealNotes = true;
BigInteger sustainPedalsDown;
template <typename floatType>
void processNextBlock (AudioBuffer<floatType>&, const MidiBuffer&, int startSample, int numSamples);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (Synthesiser)
};
} // namespace juce

+ 0
- 297
libs/juce7/source/modules/juce_audio_basics/utilities/juce_ADSR.h View File

@@ -1,297 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
A very simple ADSR envelope class.
To use it, call setSampleRate() with the current sample rate and give it some parameters
with setParameters() then call getNextSample() to get the envelope value to be applied
to each audio sample or applyEnvelopeToBuffer() to apply the envelope to a whole buffer.
Do not change the parameters during playback. If you change the parameters before the
release stage has completed then you must call reset() before the next call to
noteOn().
@tags{Audio}
*/
class JUCE_API ADSR
{
public:
//==============================================================================
ADSR()
{
recalculateRates();
}
//==============================================================================
/**
Holds the parameters being used by an ADSR object.
@tags{Audio}
*/
struct JUCE_API Parameters
{
Parameters() = default;
Parameters (float attackTimeSeconds,
float decayTimeSeconds,
float sustainLevel,
float releaseTimeSeconds)
: attack (attackTimeSeconds),
decay (decayTimeSeconds),
sustain (sustainLevel),
release (releaseTimeSeconds)
{
}
float attack = 0.1f, decay = 0.1f, sustain = 1.0f, release = 0.1f;
};
/** Sets the parameters that will be used by an ADSR object.
You must have called setSampleRate() with the correct sample rate before
this otherwise the values may be incorrect!
@see getParameters
*/
void setParameters (const Parameters& newParameters)
{
// need to call setSampleRate() first!
jassert (sampleRate > 0.0);
parameters = newParameters;
recalculateRates();
}
/** Returns the parameters currently being used by an ADSR object.
@see setParameters
*/
const Parameters& getParameters() const noexcept { return parameters; }
/** Returns true if the envelope is in its attack, decay, sustain or release stage. */
bool isActive() const noexcept { return state != State::idle; }
//==============================================================================
/** Sets the sample rate that will be used for the envelope.
This must be called before the getNextSample() or setParameters() methods.
*/
void setSampleRate (double newSampleRate) noexcept
{
jassert (newSampleRate > 0.0);
sampleRate = newSampleRate;
}
//==============================================================================
/** Resets the envelope to an idle state. */
void reset() noexcept
{
envelopeVal = 0.0f;
state = State::idle;
}
/** Starts the attack phase of the envelope. */
void noteOn() noexcept
{
if (attackRate > 0.0f)
{
state = State::attack;
}
else if (decayRate > 0.0f)
{
envelopeVal = 1.0f;
state = State::decay;
}
else
{
envelopeVal = parameters.sustain;
state = State::sustain;
}
}
/** Starts the release phase of the envelope. */
void noteOff() noexcept
{
if (state != State::idle)
{
if (parameters.release > 0.0f)
{
releaseRate = (float) (envelopeVal / (parameters.release * sampleRate));
state = State::release;
}
else
{
reset();
}
}
}
//==============================================================================
/** Returns the next sample value for an ADSR object.
@see applyEnvelopeToBuffer
*/
float getNextSample() noexcept
{
switch (state)
{
case State::idle:
{
return 0.0f;
}
case State::attack:
{
envelopeVal += attackRate;
if (envelopeVal >= 1.0f)
{
envelopeVal = 1.0f;
goToNextState();
}
break;
}
case State::decay:
{
envelopeVal -= decayRate;
if (envelopeVal <= parameters.sustain)
{
envelopeVal = parameters.sustain;
goToNextState();
}
break;
}
case State::sustain:
{
envelopeVal = parameters.sustain;
break;
}
case State::release:
{
envelopeVal -= releaseRate;
if (envelopeVal <= 0.0f)
goToNextState();
break;
}
}
return envelopeVal;
}
/** This method will conveniently apply the next numSamples number of envelope values
to an AudioBuffer.
@see getNextSample
*/
template <typename FloatType>
void applyEnvelopeToBuffer (AudioBuffer<FloatType>& buffer, int startSample, int numSamples)
{
jassert (startSample + numSamples <= buffer.getNumSamples());
if (state == State::idle)
{
buffer.clear (startSample, numSamples);
return;
}
if (state == State::sustain)
{
buffer.applyGain (startSample, numSamples, parameters.sustain);
return;
}
auto numChannels = buffer.getNumChannels();
while (--numSamples >= 0)
{
auto env = getNextSample();
for (int i = 0; i < numChannels; ++i)
buffer.getWritePointer (i)[startSample] *= env;
++startSample;
}
}
private:
//==============================================================================
void recalculateRates() noexcept
{
auto getRate = [] (float distance, float timeInSeconds, double sr)
{
return timeInSeconds > 0.0f ? (float) (distance / (timeInSeconds * sr)) : -1.0f;
};
attackRate = getRate (1.0f, parameters.attack, sampleRate);
decayRate = getRate (1.0f - parameters.sustain, parameters.decay, sampleRate);
releaseRate = getRate (parameters.sustain, parameters.release, sampleRate);
if ((state == State::attack && attackRate <= 0.0f)
|| (state == State::decay && (decayRate <= 0.0f || envelopeVal <= parameters.sustain))
|| (state == State::release && releaseRate <= 0.0f))
{
goToNextState();
}
}
void goToNextState() noexcept
{
if (state == State::attack)
{
state = (decayRate > 0.0f ? State::decay : State::sustain);
return;
}
if (state == State::decay)
{
state = State::sustain;
return;
}
if (state == State::release)
reset();
}
//==============================================================================
enum class State { idle, attack, decay, sustain, release };
State state = State::idle;
Parameters parameters;
double sampleRate = 44100.0;
float envelopeVal = 0.0f, attackRate = 0.0f, decayRate = 0.0f, releaseRate = 0.0f;
};
} // namespace juce

+ 0
- 257
libs/juce7/source/modules/juce_audio_basics/utilities/juce_ADSR_test.cpp View File

@@ -1,257 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
struct ADSRTests : public UnitTest
{
ADSRTests() : UnitTest ("ADSR", UnitTestCategories::audio) {}
void runTest() override
{
constexpr double sampleRate = 44100.0;
const ADSR::Parameters parameters { 0.1f, 0.1f, 0.5f, 0.1f };
ADSR adsr;
adsr.setSampleRate (sampleRate);
adsr.setParameters (parameters);
beginTest ("Idle");
{
adsr.reset();
expect (! adsr.isActive());
expectEquals (adsr.getNextSample(), 0.0f);
}
beginTest ("Attack");
{
adsr.reset();
adsr.noteOn();
expect (adsr.isActive());
auto buffer = getTestBuffer (sampleRate, parameters.attack);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isIncreasing (buffer));
}
beginTest ("Decay");
{
adsr.reset();
adsr.noteOn();
advanceADSR (adsr, roundToInt (parameters.attack * sampleRate));
auto buffer = getTestBuffer (sampleRate, parameters.decay);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isDecreasing (buffer));
}
beginTest ("Sustain");
{
adsr.reset();
adsr.noteOn();
advanceADSR (adsr, roundToInt ((parameters.attack + parameters.decay + 0.01) * sampleRate));
auto random = getRandom();
for (int numTests = 0; numTests < 100; ++numTests)
{
const auto sustainLevel = random.nextFloat();
const auto sustainLength = jmax (0.1f, random.nextFloat());
adsr.setParameters ({ parameters.attack, parameters.decay, sustainLevel, parameters.release });
auto buffer = getTestBuffer (sampleRate, sustainLength);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isSustained (buffer, sustainLevel));
}
}
beginTest ("Release");
{
adsr.reset();
adsr.noteOn();
advanceADSR (adsr, roundToInt ((parameters.attack + parameters.decay) * sampleRate));
adsr.noteOff();
auto buffer = getTestBuffer (sampleRate, parameters.release);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isDecreasing (buffer));
}
beginTest ("Zero-length attack jumps to decay");
{
adsr.reset();
adsr.setParameters ({ 0.0f, parameters.decay, parameters.sustain, parameters.release });
adsr.noteOn();
auto buffer = getTestBuffer (sampleRate, parameters.decay);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isDecreasing (buffer));
}
beginTest ("Zero-length decay jumps to sustain");
{
adsr.reset();
adsr.setParameters ({ parameters.attack, 0.0f, parameters.sustain, parameters.release });
adsr.noteOn();
advanceADSR (adsr, roundToInt (parameters.attack * sampleRate));
adsr.getNextSample();
expectEquals (adsr.getNextSample(), parameters.sustain);
auto buffer = getTestBuffer (sampleRate, 1);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isSustained (buffer, parameters.sustain));
}
beginTest ("Zero-length attack and decay jumps to sustain");
{
adsr.reset();
adsr.setParameters ({ 0.0f, 0.0f, parameters.sustain, parameters.release });
adsr.noteOn();
expectEquals (adsr.getNextSample(), parameters.sustain);
auto buffer = getTestBuffer (sampleRate, 1);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isSustained (buffer, parameters.sustain));
}
beginTest ("Zero-length attack and decay releases correctly");
{
adsr.reset();
adsr.setParameters ({ 0.0f, 0.0f, parameters.sustain, parameters.release });
adsr.noteOn();
adsr.noteOff();
auto buffer = getTestBuffer (sampleRate, parameters.release);
adsr.applyEnvelopeToBuffer (buffer, 0, buffer.getNumSamples());
expect (isDecreasing (buffer));
}
beginTest ("Zero-length release resets to idle");
{
adsr.reset();
adsr.setParameters ({ parameters.attack, parameters.decay, parameters.sustain, 0.0f });
adsr.noteOn();
advanceADSR (adsr, roundToInt ((parameters.attack + parameters.decay) * sampleRate));
adsr.noteOff();
expect (! adsr.isActive());
}
}
static void advanceADSR (ADSR& adsr, int numSamplesToAdvance)
{
while (--numSamplesToAdvance >= 0)
adsr.getNextSample();
}
static AudioBuffer<float> getTestBuffer (double sampleRate, float lengthInSeconds)
{
AudioBuffer<float> buffer { 2, roundToInt (lengthInSeconds * sampleRate) };
for (int channel = 0; channel < buffer.getNumChannels(); ++channel)
for (int sample = 0; sample < buffer.getNumSamples(); ++sample)
buffer.setSample (channel, sample, 1.0f);
return buffer;
}
static bool isIncreasing (const AudioBuffer<float>& b)
{
jassert (b.getNumChannels() > 0 && b.getNumSamples() > 0);
for (int channel = 0; channel < b.getNumChannels(); ++channel)
{
float previousSample = -1.0f;
for (int sample = 0; sample < b.getNumSamples(); ++sample)
{
const auto currentSample = b.getSample (channel, sample);
if (currentSample <= previousSample)
return false;
previousSample = currentSample;
}
}
return true;
}
static bool isDecreasing (const AudioBuffer<float>& b)
{
jassert (b.getNumChannels() > 0 && b.getNumSamples() > 0);
for (int channel = 0; channel < b.getNumChannels(); ++channel)
{
float previousSample = std::numeric_limits<float>::max();
for (int sample = 0; sample < b.getNumSamples(); ++sample)
{
const auto currentSample = b.getSample (channel, sample);
if (currentSample >= previousSample)
return false;
previousSample = currentSample;
}
}
return true;
}
static bool isSustained (const AudioBuffer<float>& b, float sustainLevel)
{
jassert (b.getNumChannels() > 0 && b.getNumSamples() > 0);
for (int channel = 0; channel < b.getNumChannels(); ++channel)
if (b.findMinMax (channel, 0, b.getNumSamples()) != Range<float> { sustainLevel, sustainLevel })
return false;
return true;
}
};
static ADSRTests adsrTests;
} // namespace juce

+ 0
- 112
libs/juce7/source/modules/juce_audio_basics/utilities/juce_Decibels.h View File

@@ -1,112 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
//==============================================================================
/**
This class contains some helpful static methods for dealing with decibel values.
@tags{Audio}
*/
class Decibels
{
public:
//==============================================================================
/** Converts a dBFS value to its equivalent gain level.
A gain of 1.0 = 0 dB, and lower gains map onto negative decibel values. Any
decibel value lower than minusInfinityDb will return a gain of 0.
*/
template <typename Type>
static Type decibelsToGain (Type decibels,
Type minusInfinityDb = Type (defaultMinusInfinitydB))
{
return decibels > minusInfinityDb ? std::pow (Type (10.0), decibels * Type (0.05))
: Type();
}
/** Converts a gain level into a dBFS value.
A gain of 1.0 = 0 dB, and lower gains map onto negative decibel values.
If the gain is 0 (or negative), then the method will return the value
provided as minusInfinityDb.
*/
template <typename Type>
static Type gainToDecibels (Type gain,
Type minusInfinityDb = Type (defaultMinusInfinitydB))
{
return gain > Type() ? jmax (minusInfinityDb, static_cast<Type> (std::log10 (gain)) * Type (20.0))
: minusInfinityDb;
}
//==============================================================================
/** Converts a decibel reading to a string.
By default the returned string will have the 'dB' suffix added, but this can be removed by
setting the shouldIncludeSuffix argument to false. If a customMinusInfinityString argument
is provided this will be returned if the value is lower than minusInfinityDb, otherwise
the return value will be "-INF".
*/
template <typename Type>
static String toString (Type decibels,
int decimalPlaces = 2,
Type minusInfinityDb = Type (defaultMinusInfinitydB),
bool shouldIncludeSuffix = true,
StringRef customMinusInfinityString = {})
{
String s;
s.preallocateBytes (20);
if (decibels <= minusInfinityDb)
{
if (customMinusInfinityString.isEmpty())
s << "-INF";
else
s << customMinusInfinityString;
}
else
{
if (decibels >= Type())
s << '+';
if (decimalPlaces <= 0)
s << roundToInt (decibels);
else
s << String (decibels, decimalPlaces);
}
if (shouldIncludeSuffix)
s << " dB";
return s;
}
private:
//==============================================================================
enum { defaultMinusInfinitydB = -100 };
Decibels() = delete; // This class can't be instantiated, it's just a holder for static methods..
};
} // namespace juce

+ 0
- 497
libs/juce7/source/modules/juce_audio_basics/utilities/juce_GenericInterpolator.h View File

@@ -1,497 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
/**
An interpolator base class for resampling streams of floats.
Note that the resamplers are stateful, so when there's a break in the continuity
of the input stream you're feeding it, you should call reset() before feeding
it any new data. And like with any other stateful filter, if you're resampling
multiple channels, make sure each one uses its own interpolator object.
@see LagrangeInterpolator, CatmullRomInterpolator, WindowedSincInterpolator,
LinearInterpolator, ZeroOrderHoldInterpolator
@tags{Audio}
*/
template <class InterpolatorTraits, int memorySize>
class JUCE_API GenericInterpolator
{
public:
GenericInterpolator() noexcept { reset(); }
GenericInterpolator (GenericInterpolator&&) noexcept = default;
GenericInterpolator& operator= (GenericInterpolator&&) noexcept = default;
/** Returns the latency of the interpolation algorithm in isolation.
In the context of resampling the total latency of a process using
the interpolator is the base latency divided by the speed ratio.
*/
static constexpr float getBaseLatency() noexcept
{
return InterpolatorTraits::algorithmicLatency;
}
/** Resets the state of the interpolator.
Call this when there's a break in the continuity of the input data stream.
*/
void reset() noexcept
{
indexBuffer = 0;
subSamplePos = 1.0;
std::fill (std::begin (lastInputSamples), std::end (lastInputSamples), 0.0f);
}
/** Resamples a stream of samples.
@param speedRatio the number of input samples to use for each output sample
@param inputSamples the source data to read from. This must contain at
least (speedRatio * numOutputSamplesToProduce) samples.
@param outputSamples the buffer to write the results into
@param numOutputSamplesToProduce the number of output samples that should be created
@returns the actual number of input samples that were used
*/
int process (double speedRatio,
const float* inputSamples,
float* outputSamples,
int numOutputSamplesToProduce) noexcept
{
return interpolate (speedRatio, inputSamples, outputSamples, numOutputSamplesToProduce);
}
/** Resamples a stream of samples.
@param speedRatio the number of input samples to use for each output sample
@param inputSamples the source data to read from. This must contain at
least (speedRatio * numOutputSamplesToProduce) samples.
@param outputSamples the buffer to write the results into
@param numOutputSamplesToProduce the number of output samples that should be created
@param numInputSamplesAvailable the number of available input samples. If it needs more samples
than available, it either wraps back for wrapAround samples, or
it feeds zeroes
@param wrapAround if the stream exceeds available samples, it wraps back for
wrapAround samples. If wrapAround is set to 0, it will feed zeroes.
@returns the actual number of input samples that were used
*/
int process (double speedRatio,
const float* inputSamples,
float* outputSamples,
int numOutputSamplesToProduce,
int numInputSamplesAvailable,
int wrapAround) noexcept
{
return interpolate (speedRatio, inputSamples, outputSamples,
numOutputSamplesToProduce, numInputSamplesAvailable, wrapAround);
}
/** Resamples a stream of samples, adding the results to the output data
with a gain.
@param speedRatio the number of input samples to use for each output sample
@param inputSamples the source data to read from. This must contain at
least (speedRatio * numOutputSamplesToProduce) samples.
@param outputSamples the buffer to write the results to - the result values will be added
to any pre-existing data in this buffer after being multiplied by
the gain factor
@param numOutputSamplesToProduce the number of output samples that should be created
@param gain a gain factor to multiply the resulting samples by before
adding them to the destination buffer
@returns the actual number of input samples that were used
*/
int processAdding (double speedRatio,
const float* inputSamples,
float* outputSamples,
int numOutputSamplesToProduce,
float gain) noexcept
{
return interpolateAdding (speedRatio, inputSamples, outputSamples, numOutputSamplesToProduce, gain);
}
/** Resamples a stream of samples, adding the results to the output data
with a gain.
@param speedRatio the number of input samples to use for each output sample
@param inputSamples the source data to read from. This must contain at
least (speedRatio * numOutputSamplesToProduce) samples.
@param outputSamples the buffer to write the results to - the result values will be added
to any pre-existing data in this buffer after being multiplied by
the gain factor
@param numOutputSamplesToProduce the number of output samples that should be created
@param numInputSamplesAvailable the number of available input samples. If it needs more samples
than available, it either wraps back for wrapAround samples, or
it feeds zeroes
@param wrapAround if the stream exceeds available samples, it wraps back for
wrapAround samples. If wrapAround is set to 0, it will feed zeroes.
@param gain a gain factor to multiply the resulting samples by before
adding them to the destination buffer
@returns the actual number of input samples that were used
*/
int processAdding (double speedRatio,
const float* inputSamples,
float* outputSamples,
int numOutputSamplesToProduce,
int numInputSamplesAvailable,
int wrapAround,
float gain) noexcept
{
return interpolateAdding (speedRatio, inputSamples, outputSamples,
numOutputSamplesToProduce, numInputSamplesAvailable, wrapAround, gain);
}
private:
//==============================================================================
forcedinline void pushInterpolationSample (float newValue) noexcept
{
lastInputSamples[indexBuffer] = newValue;
if (++indexBuffer == memorySize)
indexBuffer = 0;
}
forcedinline void pushInterpolationSamples (const float* input,
int numOutputSamplesToProduce) noexcept
{
if (numOutputSamplesToProduce >= memorySize)
{
const auto* const offsetInput = input + (numOutputSamplesToProduce - memorySize);
for (int i = 0; i < memorySize; ++i)
pushInterpolationSample (offsetInput[i]);
}
else
{
for (int i = 0; i < numOutputSamplesToProduce; ++i)
pushInterpolationSample (input[i]);
}
}
forcedinline void pushInterpolationSamples (const float* input,
int numOutputSamplesToProduce,
int numInputSamplesAvailable,
int wrapAround) noexcept
{
if (numOutputSamplesToProduce >= memorySize)
{
if (numInputSamplesAvailable >= memorySize)
{
pushInterpolationSamples (input,
numOutputSamplesToProduce);
}
else
{
pushInterpolationSamples (input + ((numOutputSamplesToProduce - numInputSamplesAvailable) - 1),
numInputSamplesAvailable);
if (wrapAround > 0)
{
numOutputSamplesToProduce -= wrapAround;
pushInterpolationSamples (input + ((numOutputSamplesToProduce - (memorySize - numInputSamplesAvailable)) - 1),
memorySize - numInputSamplesAvailable);
}
else
{
for (int i = numInputSamplesAvailable; i < memorySize; ++i)
pushInterpolationSample (0.0f);
}
}
}
else
{
if (numOutputSamplesToProduce > numInputSamplesAvailable)
{
for (int i = 0; i < numInputSamplesAvailable; ++i)
pushInterpolationSample (input[i]);
const auto extraSamples = numOutputSamplesToProduce - numInputSamplesAvailable;
if (wrapAround > 0)
{
const auto* const offsetInput = input + (numInputSamplesAvailable - wrapAround);
for (int i = 0; i < extraSamples; ++i)
pushInterpolationSample (offsetInput[i]);
}
else
{
for (int i = 0; i < extraSamples; ++i)
pushInterpolationSample (0.0f);
}
}
else
{
for (int i = 0; i < numOutputSamplesToProduce; ++i)
pushInterpolationSample (input[i]);
}
}
}
//==============================================================================
int interpolate (double speedRatio,
const float* input,
float* output,
int numOutputSamplesToProduce) noexcept
{
auto pos = subSamplePos;
int numUsed = 0;
while (numOutputSamplesToProduce > 0)
{
while (pos >= 1.0)
{
pushInterpolationSample (input[numUsed++]);
pos -= 1.0;
}
*output++ = InterpolatorTraits::valueAtOffset (lastInputSamples, (float) pos, indexBuffer);
pos += speedRatio;
--numOutputSamplesToProduce;
}
subSamplePos = pos;
return numUsed;
}
int interpolate (double speedRatio,
const float* input, float* output,
int numOutputSamplesToProduce,
int numInputSamplesAvailable,
int wrap) noexcept
{
auto originalIn = input;
auto pos = subSamplePos;
bool exceeded = false;
if (speedRatio < 1.0)
{
for (int i = numOutputSamplesToProduce; --i >= 0;)
{
if (pos >= 1.0)
{
if (exceeded)
{
pushInterpolationSample (0.0f);
}
else
{
pushInterpolationSample (*input++);
if (--numInputSamplesAvailable <= 0)
{
if (wrap > 0)
{
input -= wrap;
numInputSamplesAvailable += wrap;
}
else
{
exceeded = true;
}
}
}
pos -= 1.0;
}
*output++ = InterpolatorTraits::valueAtOffset (lastInputSamples, (float) pos, indexBuffer);
pos += speedRatio;
}
}
else
{
for (int i = numOutputSamplesToProduce; --i >= 0;)
{
while (pos < speedRatio)
{
if (exceeded)
{
pushInterpolationSample (0);
}
else
{
pushInterpolationSample (*input++);
if (--numInputSamplesAvailable <= 0)
{
if (wrap > 0)
{
input -= wrap;
numInputSamplesAvailable += wrap;
}
else
{
exceeded = true;
}
}
}
pos += 1.0;
}
pos -= speedRatio;
*output++ = InterpolatorTraits::valueAtOffset (lastInputSamples, jmax (0.0f, 1.0f - (float) pos), indexBuffer);
}
}
subSamplePos = pos;
if (wrap == 0)
return (int) (input - originalIn);
return ((int) (input - originalIn) + wrap) % wrap;
}
int interpolateAdding (double speedRatio,
const float* input,
float* output,
int numOutputSamplesToProduce,
int numInputSamplesAvailable,
int wrap,
float gain) noexcept
{
auto originalIn = input;
auto pos = subSamplePos;
bool exceeded = false;
if (speedRatio < 1.0)
{
for (int i = numOutputSamplesToProduce; --i >= 0;)
{
if (pos >= 1.0)
{
if (exceeded)
{
pushInterpolationSample (0.0);
}
else
{
pushInterpolationSample (*input++);
if (--numInputSamplesAvailable <= 0)
{
if (wrap > 0)
{
input -= wrap;
numInputSamplesAvailable += wrap;
}
else
{
numInputSamplesAvailable = true;
}
}
}
pos -= 1.0;
}
*output++ += gain * InterpolatorTraits::valueAtOffset (lastInputSamples, (float) pos, indexBuffer);
pos += speedRatio;
}
}
else
{
for (int i = numOutputSamplesToProduce; --i >= 0;)
{
while (pos < speedRatio)
{
if (exceeded)
{
pushInterpolationSample (0.0);
}
else
{
pushInterpolationSample (*input++);
if (--numInputSamplesAvailable <= 0)
{
if (wrap > 0)
{
input -= wrap;
numInputSamplesAvailable += wrap;
}
else
{
exceeded = true;
}
}
}
pos += 1.0;
}
pos -= speedRatio;
*output++ += gain * InterpolatorTraits::valueAtOffset (lastInputSamples, jmax (0.0f, 1.0f - (float) pos), indexBuffer);
}
}
subSamplePos = pos;
if (wrap == 0)
return (int) (input - originalIn);
return ((int) (input - originalIn) + wrap) % wrap;
}
int interpolateAdding (double speedRatio,
const float* input,
float* output,
int numOutputSamplesToProduce,
float gain) noexcept
{
auto pos = subSamplePos;
int numUsed = 0;
while (numOutputSamplesToProduce > 0)
{
while (pos >= 1.0)
{
pushInterpolationSample (input[numUsed++]);
pos -= 1.0;
}
*output++ += gain * InterpolatorTraits::valueAtOffset (lastInputSamples, (float) pos, indexBuffer);
pos += speedRatio;
--numOutputSamplesToProduce;
}
subSamplePos = pos;
return numUsed;
}
//==============================================================================
float lastInputSamples[(size_t) memorySize];
double subSamplePos = 1.0;
int indexBuffer = 0;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (GenericInterpolator)
};
} // namespace juce

+ 0
- 340
libs/juce7/source/modules/juce_audio_basics/utilities/juce_IIRFilter.cpp View File

@@ -1,340 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
IIRCoefficients::IIRCoefficients() noexcept
{
zeromem (coefficients, sizeof (coefficients));
}
IIRCoefficients::~IIRCoefficients() noexcept {}
IIRCoefficients::IIRCoefficients (const IIRCoefficients& other) noexcept
{
memcpy (coefficients, other.coefficients, sizeof (coefficients));
}
IIRCoefficients& IIRCoefficients::operator= (const IIRCoefficients& other) noexcept
{
memcpy (coefficients, other.coefficients, sizeof (coefficients));
return *this;
}
IIRCoefficients::IIRCoefficients (double c1, double c2, double c3,
double c4, double c5, double c6) noexcept
{
auto a = 1.0 / c4;
coefficients[0] = (float) (c1 * a);
coefficients[1] = (float) (c2 * a);
coefficients[2] = (float) (c3 * a);
coefficients[3] = (float) (c5 * a);
coefficients[4] = (float) (c6 * a);
}
IIRCoefficients IIRCoefficients::makeLowPass (double sampleRate,
double frequency) noexcept
{
return makeLowPass (sampleRate, frequency, 1.0 / MathConstants<double>::sqrt2);
}
IIRCoefficients IIRCoefficients::makeLowPass (double sampleRate,
double frequency,
double Q) noexcept
{
jassert (sampleRate > 0.0);
jassert (frequency > 0.0 && frequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto n = 1.0 / std::tan (MathConstants<double>::pi * frequency / sampleRate);
auto nSquared = n * n;
auto c1 = 1.0 / (1.0 + 1.0 / Q * n + nSquared);
return IIRCoefficients (c1,
c1 * 2.0,
c1,
1.0,
c1 * 2.0 * (1.0 - nSquared),
c1 * (1.0 - 1.0 / Q * n + nSquared));
}
IIRCoefficients IIRCoefficients::makeHighPass (double sampleRate,
double frequency) noexcept
{
return makeHighPass (sampleRate, frequency, 1.0 / std::sqrt(2.0));
}
IIRCoefficients IIRCoefficients::makeHighPass (double sampleRate,
double frequency,
double Q) noexcept
{
jassert (sampleRate > 0.0);
jassert (frequency > 0.0 && frequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto n = std::tan (MathConstants<double>::pi * frequency / sampleRate);
auto nSquared = n * n;
auto c1 = 1.0 / (1.0 + 1.0 / Q * n + nSquared);
return IIRCoefficients (c1,
c1 * -2.0,
c1,
1.0,
c1 * 2.0 * (nSquared - 1.0),
c1 * (1.0 - 1.0 / Q * n + nSquared));
}
IIRCoefficients IIRCoefficients::makeBandPass (double sampleRate,
double frequency) noexcept
{
return makeBandPass (sampleRate, frequency, 1.0 / MathConstants<double>::sqrt2);
}
IIRCoefficients IIRCoefficients::makeBandPass (double sampleRate,
double frequency,
double Q) noexcept
{
jassert (sampleRate > 0.0);
jassert (frequency > 0.0 && frequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto n = 1.0 / std::tan (MathConstants<double>::pi * frequency / sampleRate);
auto nSquared = n * n;
auto c1 = 1.0 / (1.0 + 1.0 / Q * n + nSquared);
return IIRCoefficients (c1 * n / Q,
0.0,
-c1 * n / Q,
1.0,
c1 * 2.0 * (1.0 - nSquared),
c1 * (1.0 - 1.0 / Q * n + nSquared));
}
IIRCoefficients IIRCoefficients::makeNotchFilter (double sampleRate,
double frequency) noexcept
{
return makeNotchFilter (sampleRate, frequency, 1.0 / MathConstants<double>::sqrt2);
}
IIRCoefficients IIRCoefficients::makeNotchFilter (double sampleRate,
double frequency,
double Q) noexcept
{
jassert (sampleRate > 0.0);
jassert (frequency > 0.0 && frequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto n = 1.0 / std::tan (MathConstants<double>::pi * frequency / sampleRate);
auto nSquared = n * n;
auto c1 = 1.0 / (1.0 + n / Q + nSquared);
return IIRCoefficients (c1 * (1.0 + nSquared),
2.0 * c1 * (1.0 - nSquared),
c1 * (1.0 + nSquared),
1.0,
c1 * 2.0 * (1.0 - nSquared),
c1 * (1.0 - n / Q + nSquared));
}
IIRCoefficients IIRCoefficients::makeAllPass (double sampleRate,
double frequency) noexcept
{
return makeAllPass (sampleRate, frequency, 1.0 / MathConstants<double>::sqrt2);
}
IIRCoefficients IIRCoefficients::makeAllPass (double sampleRate,
double frequency,
double Q) noexcept
{
jassert (sampleRate > 0.0);
jassert (frequency > 0.0 && frequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto n = 1.0 / std::tan (MathConstants<double>::pi * frequency / sampleRate);
auto nSquared = n * n;
auto c1 = 1.0 / (1.0 + 1.0 / Q * n + nSquared);
return IIRCoefficients (c1 * (1.0 - n / Q + nSquared),
c1 * 2.0 * (1.0 - nSquared),
1.0,
1.0,
c1 * 2.0 * (1.0 - nSquared),
c1 * (1.0 - n / Q + nSquared));
}
IIRCoefficients IIRCoefficients::makeLowShelf (double sampleRate,
double cutOffFrequency,
double Q,
float gainFactor) noexcept
{
jassert (sampleRate > 0.0);
jassert (cutOffFrequency > 0.0 && cutOffFrequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto A = jmax (0.0f, std::sqrt (gainFactor));
auto aminus1 = A - 1.0;
auto aplus1 = A + 1.0;
auto omega = (MathConstants<double>::twoPi * jmax (cutOffFrequency, 2.0)) / sampleRate;
auto coso = std::cos (omega);
auto beta = std::sin (omega) * std::sqrt (A) / Q;
auto aminus1TimesCoso = aminus1 * coso;
return IIRCoefficients (A * (aplus1 - aminus1TimesCoso + beta),
A * 2.0 * (aminus1 - aplus1 * coso),
A * (aplus1 - aminus1TimesCoso - beta),
aplus1 + aminus1TimesCoso + beta,
-2.0 * (aminus1 + aplus1 * coso),
aplus1 + aminus1TimesCoso - beta);
}
IIRCoefficients IIRCoefficients::makeHighShelf (double sampleRate,
double cutOffFrequency,
double Q,
float gainFactor) noexcept
{
jassert (sampleRate > 0.0);
jassert (cutOffFrequency > 0.0 && cutOffFrequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto A = jmax (0.0f, std::sqrt (gainFactor));
auto aminus1 = A - 1.0;
auto aplus1 = A + 1.0;
auto omega = (MathConstants<double>::twoPi * jmax (cutOffFrequency, 2.0)) / sampleRate;
auto coso = std::cos (omega);
auto beta = std::sin (omega) * std::sqrt (A) / Q;
auto aminus1TimesCoso = aminus1 * coso;
return IIRCoefficients (A * (aplus1 + aminus1TimesCoso + beta),
A * -2.0 * (aminus1 + aplus1 * coso),
A * (aplus1 + aminus1TimesCoso - beta),
aplus1 - aminus1TimesCoso + beta,
2.0 * (aminus1 - aplus1 * coso),
aplus1 - aminus1TimesCoso - beta);
}
IIRCoefficients IIRCoefficients::makePeakFilter (double sampleRate,
double frequency,
double Q,
float gainFactor) noexcept
{
jassert (sampleRate > 0.0);
jassert (frequency > 0.0 && frequency <= sampleRate * 0.5);
jassert (Q > 0.0);
auto A = jmax (0.0f, std::sqrt (gainFactor));
auto omega = (MathConstants<double>::twoPi * jmax (frequency, 2.0)) / sampleRate;
auto alpha = 0.5 * std::sin (omega) / Q;
auto c2 = -2.0 * std::cos (omega);
auto alphaTimesA = alpha * A;
auto alphaOverA = alpha / A;
return IIRCoefficients (1.0 + alphaTimesA,
c2,
1.0 - alphaTimesA,
1.0 + alphaOverA,
c2,
1.0 - alphaOverA);
}
//==============================================================================
template <typename Mutex>
IIRFilterBase<Mutex>::IIRFilterBase() noexcept = default;
template <typename Mutex>
IIRFilterBase<Mutex>::IIRFilterBase (const IIRFilterBase& other) noexcept : active (other.active)
{
const typename Mutex::ScopedLockType sl (other.processLock);
coefficients = other.coefficients;
}
//==============================================================================
template <typename Mutex>
void IIRFilterBase<Mutex>::makeInactive() noexcept
{
const typename Mutex::ScopedLockType sl (processLock);
active = false;
}
template <typename Mutex>
void IIRFilterBase<Mutex>::setCoefficients (const IIRCoefficients& newCoefficients) noexcept
{
const typename Mutex::ScopedLockType sl (processLock);
coefficients = newCoefficients;
active = true;
}
//==============================================================================
template <typename Mutex>
void IIRFilterBase<Mutex>::reset() noexcept
{
const typename Mutex::ScopedLockType sl (processLock);
v1 = v2 = 0.0;
}
template <typename Mutex>
float IIRFilterBase<Mutex>::processSingleSampleRaw (float in) noexcept
{
auto out = coefficients.coefficients[0] * in + v1;
JUCE_SNAP_TO_ZERO (out);
v1 = coefficients.coefficients[1] * in - coefficients.coefficients[3] * out + v2;
v2 = coefficients.coefficients[2] * in - coefficients.coefficients[4] * out;
return out;
}
template <typename Mutex>
void IIRFilterBase<Mutex>::processSamples (float* const samples, const int numSamples) noexcept
{
const typename Mutex::ScopedLockType sl (processLock);
if (active)
{
auto c0 = coefficients.coefficients[0];
auto c1 = coefficients.coefficients[1];
auto c2 = coefficients.coefficients[2];
auto c3 = coefficients.coefficients[3];
auto c4 = coefficients.coefficients[4];
auto lv1 = v1, lv2 = v2;
for (int i = 0; i < numSamples; ++i)
{
auto in = samples[i];
auto out = c0 * in + lv1;
samples[i] = out;
lv1 = c1 * in - c3 * out + lv2;
lv2 = c2 * in - c4 * out;
}
JUCE_SNAP_TO_ZERO (lv1); v1 = lv1;
JUCE_SNAP_TO_ZERO (lv2); v2 = lv2;
}
}
template class IIRFilterBase<SpinLock>;
template class IIRFilterBase<DummyCriticalSection>;
} // namespace juce

+ 0
- 254
libs/juce7/source/modules/juce_audio_basics/utilities/juce_IIRFilter.h View File

@@ -1,254 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
class IIRFilter;
//==============================================================================
/**
A set of coefficients for use in an IIRFilter object.
@see IIRFilter
@tags{Audio}
*/
class JUCE_API IIRCoefficients
{
public:
//==============================================================================
/** Creates a null set of coefficients (which will produce silence). */
IIRCoefficients() noexcept;
/** Directly constructs an object from the raw coefficients.
Most people will want to use the static methods instead of this, but
the constructor is public to allow tinkerers to create their own custom
filters!
*/
IIRCoefficients (double c1, double c2, double c3,
double c4, double c5, double c6) noexcept;
/** Creates a copy of another filter. */
IIRCoefficients (const IIRCoefficients&) noexcept;
/** Creates a copy of another filter. */
IIRCoefficients& operator= (const IIRCoefficients&) noexcept;
/** Destructor. */
~IIRCoefficients() noexcept;
//==============================================================================
/** Returns the coefficients for a low-pass filter. */
static IIRCoefficients makeLowPass (double sampleRate,
double frequency) noexcept;
/** Returns the coefficients for a low-pass filter with variable Q. */
static IIRCoefficients makeLowPass (double sampleRate,
double frequency,
double Q) noexcept;
//==============================================================================
/** Returns the coefficients for a high-pass filter. */
static IIRCoefficients makeHighPass (double sampleRate,
double frequency) noexcept;
/** Returns the coefficients for a high-pass filter with variable Q. */
static IIRCoefficients makeHighPass (double sampleRate,
double frequency,
double Q) noexcept;
//==============================================================================
/** Returns the coefficients for a band-pass filter. */
static IIRCoefficients makeBandPass (double sampleRate, double frequency) noexcept;
/** Returns the coefficients for a band-pass filter with variable Q. */
static IIRCoefficients makeBandPass (double sampleRate,
double frequency,
double Q) noexcept;
//==============================================================================
/** Returns the coefficients for a notch filter. */
static IIRCoefficients makeNotchFilter (double sampleRate, double frequency) noexcept;
/** Returns the coefficients for a notch filter with variable Q. */
static IIRCoefficients makeNotchFilter (double sampleRate,
double frequency,
double Q) noexcept;
//==============================================================================
/** Returns the coefficients for an all-pass filter. */
static IIRCoefficients makeAllPass (double sampleRate, double frequency) noexcept;
/** Returns the coefficients for an all-pass filter with variable Q. */
static IIRCoefficients makeAllPass (double sampleRate,
double frequency,
double Q) noexcept;
//==============================================================================
/** Returns the coefficients for a low-pass shelf filter with variable Q and gain.
The gain is a scale factor that the low frequencies are multiplied by, so values
greater than 1.0 will boost the low frequencies, values less than 1.0 will
attenuate them.
*/
static IIRCoefficients makeLowShelf (double sampleRate,
double cutOffFrequency,
double Q,
float gainFactor) noexcept;
/** Returns the coefficients for a high-pass shelf filter with variable Q and gain.
The gain is a scale factor that the high frequencies are multiplied by, so values
greater than 1.0 will boost the high frequencies, values less than 1.0 will
attenuate them.
*/
static IIRCoefficients makeHighShelf (double sampleRate,
double cutOffFrequency,
double Q,
float gainFactor) noexcept;
/** Returns the coefficients for a peak filter centred around a
given frequency, with a variable Q and gain.
The gain is a scale factor that the centre frequencies are multiplied by, so
values greater than 1.0 will boost the centre frequencies, values less than
1.0 will attenuate them.
*/
static IIRCoefficients makePeakFilter (double sampleRate,
double centreFrequency,
double Q,
float gainFactor) noexcept;
//==============================================================================
/** The raw coefficients.
You should leave these numbers alone unless you really know what you're doing.
*/
float coefficients[5];
};
//==============================================================================
/**
An IIR filter that can perform low, high, or band-pass filtering on an
audio signal.
@see IIRCoefficient, IIRFilterAudioSource
@tags{Audio}
*/
template <typename Mutex>
class JUCE_API IIRFilterBase
{
public:
//==============================================================================
/** Creates a filter.
Initially the filter is inactive, so will have no effect on samples that
you process with it. Use the setCoefficients() method to turn it into the
type of filter needed.
*/
IIRFilterBase() noexcept;
/** Creates a copy of another filter. */
IIRFilterBase (const IIRFilterBase&) noexcept;
//==============================================================================
/** Clears the filter so that any incoming data passes through unchanged. */
void makeInactive() noexcept;
/** Applies a set of coefficients to this filter. */
void setCoefficients (const IIRCoefficients& newCoefficients) noexcept;
/** Returns the coefficients that this filter is using. */
IIRCoefficients getCoefficients() const noexcept { return coefficients; }
//==============================================================================
/** Resets the filter's processing pipeline, ready to start a new stream of data.
Note that this clears the processing state, but the type of filter and
its coefficients aren't changed. To put a filter into an inactive state, use
the makeInactive() method.
*/
void reset() noexcept;
/** Performs the filter operation on the given set of samples. */
void processSamples (float* samples, int numSamples) noexcept;
/** Processes a single sample, without any locking or checking.
Use this if you need fast processing of a single value, but be aware that
this isn't thread-safe in the way that processSamples() is.
*/
float processSingleSampleRaw (float sample) noexcept;
protected:
//==============================================================================
Mutex processLock;
IIRCoefficients coefficients;
float v1 = 0, v2 = 0;
bool active = false;
// The exact meaning of an assignment operator would be ambiguous since the filters are
// stateful. If you want to copy the coefficients, then just use setCoefficients().
IIRFilter& operator= (const IIRFilter&) = delete;
JUCE_LEAK_DETECTOR (IIRFilter)
};
/**
An IIR filter that can perform low, high, or band-pass filtering on an
audio signal, and which attempts to implement basic thread-safety.
This class synchronises calls to some of its member functions, making it
safe (although not necessarily real-time-safe) to reset the filter or
apply new coefficients while the filter is processing on another thread.
In most cases this style of internal locking should not be used, and you
should attempt to provide thread-safety at a higher level in your program.
If you can guarantee that calls to the filter will be synchronised externally,
you could consider switching to SingleThreadedIIRFilter instead.
@see SingleThreadedIIRFilter, IIRCoefficient, IIRFilterAudioSource
@tags{Audio}
*/
class IIRFilter : public IIRFilterBase<SpinLock>
{
public:
using IIRFilterBase::IIRFilterBase;
};
/**
An IIR filter that can perform low, high, or band-pass filtering on an
audio signal, with no thread-safety guarantees.
You should use this class if you need an IIR filter, and don't plan to
call its member functions from multiple threads at once.
@see IIRFilter, IIRCoefficient, IIRFilterAudioSource
@tags{Audio}
*/
class SingleThreadedIIRFilter : public IIRFilterBase<DummyCriticalSection>
{
public:
using IIRFilterBase::IIRFilterBase;
};
} // namespace juce

+ 0
- 188
libs/juce7/source/modules/juce_audio_basics/utilities/juce_Interpolators.cpp View File

@@ -1,188 +0,0 @@
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2022 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
The code included in this file is provided under the terms of the ISC license
http://www.isc.org/downloads/software-support-policy/isc-license. Permission
To use, copy, modify, and/or distribute this software for any purpose with or
without fee is hereby granted provided that the above copyright notice and
this permission notice appear in all copies.
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
#if JUCE_UNIT_TESTS
class InterpolatorTests : public UnitTest
{
public:
InterpolatorTests()
: UnitTest ("InterpolatorTests", UnitTestCategories::audio)
{
}
private:
template <typename InterpolatorType>
void runInterplatorTests (const String& interpolatorName)
{
auto createGaussian = [] (std::vector<float>& destination, float scale, float centreInSamples, float width)
{
for (size_t i = 0; i < destination.size(); ++i)
{
auto x = (((float) i) - centreInSamples) * width;
destination[i] = std::exp (-(x * x));
}
FloatVectorOperations::multiply (destination.data(), scale, (int) destination.size());
};
auto findGaussianPeak = [] (const std::vector<float>& input) -> float
{
auto max = std::max_element (std::begin (input), std::end (input));
auto maxPrev = max - 1;
jassert (maxPrev >= std::begin (input));
auto maxNext = max + 1;
jassert (maxNext < std::end (input));
auto quadraticMaxLoc = (*maxPrev - *maxNext) / (2.0f * ((*maxNext + *maxPrev) - (2.0f * *max)));
return quadraticMaxLoc + (float) std::distance (std::begin (input), max);
};
auto expectAllElementsWithin = [this] (const std::vector<float>& v1, const std::vector<float>& v2, float tolerance)
{
expectEquals ((int) v1.size(), (int) v2.size());
for (size_t i = 0; i < v1.size(); ++i)
expectWithinAbsoluteError (v1[i], v2[i], tolerance);
};
InterpolatorType interpolator;
constexpr size_t inputSize = 1001;
static_assert (inputSize > 800 + InterpolatorType::getBaseLatency(),
"The test InterpolatorTests input buffer is too small");
std::vector<float> input (inputSize);
constexpr auto inputGaussianMidpoint = (float) (inputSize - 1) / 2.0f;
constexpr auto inputGaussianValueAtEnds = 0.000001f;
const auto inputGaussianWidth = std::sqrt (-std::log (inputGaussianValueAtEnds)) / inputGaussianMidpoint;
createGaussian (input, 1.0f, inputGaussianMidpoint, inputGaussianWidth);
for (auto speedRatio : { 0.4, 0.8263, 1.0, 1.05, 1.2384, 1.6 })
{
const auto expectedGaussianMidpoint = (inputGaussianMidpoint + InterpolatorType::getBaseLatency()) / (float) speedRatio;
const auto expectedGaussianWidth = inputGaussianWidth * (float) speedRatio;
const auto outputBufferSize = (size_t) std::floor ((float) input.size() / speedRatio);
for (int numBlocks : { 1, 5 })
{
const auto inputBlockSize = (float) input.size() / (float) numBlocks;
const auto outputBlockSize = (int) std::floor (inputBlockSize / speedRatio);
std::vector<float> output (outputBufferSize, std::numeric_limits<float>::min());
beginTest (interpolatorName + " process " + String (numBlocks) + " blocks ratio " + String (speedRatio));
interpolator.reset();
{
auto* inputPtr = input.data();
auto* outputPtr = output.data();
for (int i = 0; i < numBlocks; ++i)
{
auto numInputSamplesRead = interpolator.process (speedRatio, inputPtr, outputPtr, outputBlockSize);
inputPtr += numInputSamplesRead;
outputPtr += outputBlockSize;
}
}
expectWithinAbsoluteError (findGaussianPeak (output), expectedGaussianMidpoint, 0.1f);
std::vector<float> expectedOutput (output.size());
createGaussian (expectedOutput, 1.0f, expectedGaussianMidpoint, expectedGaussianWidth);
expectAllElementsWithin (output, expectedOutput, 0.02f);
beginTest (interpolatorName + " process adding " + String (numBlocks) + " blocks ratio " + String (speedRatio));
interpolator.reset();
constexpr float addingGain = 0.7384f;
{
auto* inputPtr = input.data();
auto* outputPtr = output.data();
for (int i = 0; i < numBlocks; ++i)
{
auto numInputSamplesRead = interpolator.processAdding (speedRatio, inputPtr, outputPtr, outputBlockSize, addingGain);
inputPtr += numInputSamplesRead;
outputPtr += outputBlockSize;
}
}
expectWithinAbsoluteError (findGaussianPeak (output), expectedGaussianMidpoint, 0.1f);
std::vector<float> additionalOutput (output.size());
createGaussian (additionalOutput, addingGain, expectedGaussianMidpoint, expectedGaussianWidth);
FloatVectorOperations::add (expectedOutput.data(), additionalOutput.data(), (int) additionalOutput.size());
expectAllElementsWithin (output, expectedOutput, 0.02f);
}
beginTest (interpolatorName + " process wrap 0 ratio " + String (speedRatio));
std::vector<float> doubleLengthOutput (2 * outputBufferSize, std::numeric_limits<float>::min());
interpolator.reset();
interpolator.process (speedRatio, input.data(), doubleLengthOutput.data(), (int) doubleLengthOutput.size(),
(int) input.size(), 0);
std::vector<float> expectedDoubleLengthOutput (doubleLengthOutput.size());
createGaussian (expectedDoubleLengthOutput, 1.0f, expectedGaussianMidpoint, expectedGaussianWidth);
expectAllElementsWithin (doubleLengthOutput, expectedDoubleLengthOutput, 0.02f);
beginTest (interpolatorName + " process wrap double ratio " + String (speedRatio));
interpolator.reset();
interpolator.process (speedRatio, input.data(), doubleLengthOutput.data(), (int) doubleLengthOutput.size(),
(int) input.size(), (int) input.size());
std::vector<float> secondGaussian (doubleLengthOutput.size());
createGaussian (secondGaussian, 1.0f, expectedGaussianMidpoint + (float) outputBufferSize, expectedGaussianWidth);
FloatVectorOperations::add (expectedDoubleLengthOutput.data(), secondGaussian.data(), (int) expectedDoubleLengthOutput.size());
expectAllElementsWithin (doubleLengthOutput, expectedDoubleLengthOutput, 0.02f);
}
}
public:
void runTest() override
{
runInterplatorTests<WindowedSincInterpolator> ("WindowedSincInterpolator");
runInterplatorTests<LagrangeInterpolator> ("LagrangeInterpolator");
runInterplatorTests<CatmullRomInterpolator> ("CatmullRomInterpolator");
runInterplatorTests<LinearInterpolator> ("LinearInterpolator");
}
};
static InterpolatorTests interpolatorTests;
#endif
} // namespace juce

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save