Browse Source

Update code to use new AudioData interleaving/deinterleaving API

v6.1.6
ed 3 years ago
parent
commit
69d4e0186f
6 changed files with 68 additions and 81 deletions
  1. +18
    -13
      modules/juce_audio_basics/buffers/juce_AudioDataConverters.cpp
  2. +9
    -12
      modules/juce_audio_devices/native/juce_android_Audio.cpp
  3. +17
    -24
      modules/juce_audio_devices/native/juce_android_Oboe.cpp
  4. +18
    -24
      modules/juce_audio_devices/native/juce_android_OpenSL.cpp
  5. +3
    -4
      modules/juce_audio_utils/native/juce_mac_AudioCDBurner.mm
  6. +3
    -4
      modules/juce_audio_utils/native/juce_win32_AudioCDBurner.cpp

+ 18
- 13
modules/juce_audio_basics/buffers/juce_AudioDataConverters.cpp View File

@@ -434,16 +434,21 @@ void AudioDataConverters::convertFormatToFloat (DataFormat sourceFormat, const v
//==============================================================================
void AudioDataConverters::interleaveSamples (const float** source, float* dest, int numSamples, int numChannels)
{
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples);
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { source, numChannels },
AudioData::InterleavedDest<Format> { dest, numChannels },
numSamples);
}
void AudioDataConverters::deinterleaveSamples (const float* source, float** dest, int numSamples, int numChannels)
{
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples);
}
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { source, numChannels },
AudioData::NonInterleavedDest<Format> { dest, numChannels },
numSamples);
}
//==============================================================================
//==============================================================================
@@ -574,6 +579,8 @@ public:
beginTest ("Round-trip conversion: Float32");
Test1 <AudioData::Float32>::test (*this, r);
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
beginTest ("Interleaving");
{
constexpr auto numChannels = 4;
@@ -586,10 +593,9 @@ public:
for (int i = 0; i < numSamples; ++i)
sourceBuffer.setSample (ch, i, r.nextFloat());
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getArrayOfReadPointers(), numChannels,
destBuffer.getWritePointer (0), numChannels,
numSamples);
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { sourceBuffer.getArrayOfReadPointers(), numChannels },
AudioData::InterleavedDest<Format> { destBuffer.getWritePointer (0), numChannels },
numSamples);
for (int ch = 0; ch < numChannels; ++ch)
for (int i = 0; i < numSamples; ++i)
@@ -608,10 +614,9 @@ public:
for (int i = 0; i < numSamples; ++i)
sourceBuffer.setSample (0, ch + (i * numChannels), r.nextFloat());
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getReadPointer (0), numChannels,
destBuffer.getArrayOfWritePointers(), numChannels,
numSamples);
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { sourceBuffer.getReadPointer (0), numChannels },
AudioData::NonInterleavedDest<Format> { destBuffer.getArrayOfWritePointers(), numChannels },
numSamples);
for (int ch = 0; ch < numChannels; ++ch)
for (int i = 0; i < numSamples; ++i)


+ 9
- 12
modules/juce_audio_devices/native/juce_android_Audio.cpp View File

@@ -326,6 +326,9 @@ public:
JNIEnv* env = getEnv();
jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels));
using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
while (! threadShouldExit())
{
if (inputDevice != nullptr)
@@ -339,12 +342,9 @@ public:
jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr);
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (src),
numDeviceInputChannels,
inputChannelBuffer.getArrayOfWritePointers(),
inputChannelBuffer.getNumChannels(),
actualBufferSize);
AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (src), numDeviceInputChannels },
AudioData::NonInterleavedDest<NativeFloat32> { inputChannelBuffer.getArrayOfWritePointers(), inputChannelBuffer.getNumChannels() },
actualBufferSize);
env->ReleaseShortArrayElements (audioBuffer, src, 0);
}
@@ -374,12 +374,9 @@ public:
jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr);
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Int16, AudioData::NativeEndian> (outputChannelBuffer.getArrayOfReadPointers(),
outputChannelBuffer.getNumChannels(),
reinterpret_cast<uint16*> (dest),
numDeviceOutputChannels,
actualBufferSize);
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { outputChannelBuffer.getArrayOfReadPointers(), outputChannelBuffer.getNumChannels() },
AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dest), numDeviceOutputChannels },
actualBufferSize);
env->ReleaseShortArrayElements (audioBuffer, dest, 0);
jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels);


+ 17
- 24
modules/juce_audio_devices/native/juce_android_Oboe.cpp View File

@@ -44,28 +44,25 @@ struct OboeAudioIODeviceBufferHelpers<int16>
static bool referAudioBufferDirectlyToOboeIfPossible (int16*, AudioBuffer<float>&, int) { return false; }
using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>;
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
static void convertFromOboe (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
{
const auto numChannels = audioBuffer.getNumChannels();
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
numChannels,
audioBuffer.getArrayOfWritePointers(),
numChannels,
numSamples);
AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels },
AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
numSamples);
}
static void convertToOboe (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved, int numSamples)
{
const auto numChannels = audioBuffer.getNumChannels();
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Int16, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
numChannels,
reinterpret_cast<uint16*> (dstInterleaved),
numChannels,
numSamples);
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels },
numSamples);
}
};
@@ -87,6 +84,8 @@ struct OboeAudioIODeviceBufferHelpers<float>
return false;
}
using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
static void convertFromOboe (const float* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples)
{
auto numChannels = audioBuffer.getNumChannels();
@@ -96,12 +95,9 @@ struct OboeAudioIODeviceBufferHelpers<float>
// No need to convert, we instructed the buffer to point to the src data directly already
jassert (audioBuffer.getWritePointer (0) != srcInterleaved);
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
numChannels,
audioBuffer.getArrayOfWritePointers(),
numChannels,
numSamples);
AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { srcInterleaved, numChannels },
AudioData::NonInterleavedDest<Format> { audioBuffer.getArrayOfWritePointers(), numChannels },
numSamples);
}
}
@@ -114,12 +110,9 @@ struct OboeAudioIODeviceBufferHelpers<float>
// No need to convert, we instructed the buffer to point to the src data directly already
jassert (audioBuffer.getReadPointer (0) != dstInterleaved);
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Float32, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(),
numChannels,
dstInterleaved,
numChannels,
numSamples);
AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { audioBuffer.getArrayOfReadPointers(), numChannels },
AudioData::InterleavedDest<Format> { dstInterleaved, numChannels },
numSamples);
}
}
};


+ 18
- 24
modules/juce_audio_devices/native/juce_android_OpenSL.cpp View File

@@ -194,28 +194,25 @@ struct BufferHelpers<int16>
static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {}
using LittleEndianInt16 = AudioData::Format<AudioData::Int16, AudioData::LittleEndian>;
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer)
{
const auto numChannels = audioBuffer.getNumChannels();
AudioData::deinterleaveSamples<AudioData::Int16, AudioData::LittleEndian,
AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved),
numChannels,
audioBuffer.getArrayOfWritePointers(),
numChannels,
audioBuffer.getNumSamples());
AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels },
AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
audioBuffer.getNumSamples());
}
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved)
{
const auto numChannels = audioBuffer.getNumChannels();
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Int16, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
numChannels,
reinterpret_cast<uint16*> (dstInterleaved),
numChannels,
audioBuffer.getNumSamples());
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
AudioData::InterleavedDest<LittleEndianInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels },
audioBuffer.getNumSamples());
}
};
@@ -244,6 +241,9 @@ struct BufferHelpers<float>
audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples());
}
using LittleEndianFloat32 = AudioData::Format<AudioData::Float32, AudioData::LittleEndian>;
using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>;
static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer)
{
const auto numChannels = audioBuffer.getNumChannels();
@@ -254,12 +254,9 @@ struct BufferHelpers<float>
return;
}
AudioData::deinterleaveSamples<AudioData::Float32, AudioData::LittleEndian,
AudioData::Float32, AudioData::NativeEndian> (srcInterleaved,
numChannels,
audioBuffer.getArrayOfWritePointers(),
numChannels,
audioBuffer.getNumSamples());
AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianFloat32> { srcInterleaved, numChannels },
AudioData::nonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels },
audioBuffer.getNumSamples());
}
static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved)
@@ -272,12 +269,9 @@ struct BufferHelpers<float>
return;
}
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Float32, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(),
numChannels,
dstInterleaved,
numChannels,
audioBuffer.getNumSamples());
AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels },
AudioData::InterleavedDest<LittleEndianFloat32> { dstInterleaved, numChannels },
audioBuffer.getNumSamples());
}
};


+ 3
- 4
modules/juce_audio_utils/native/juce_mac_AudioCDBurner.mm View File

@@ -132,10 +132,9 @@ private:
source->source->getNextAudioBlock (info);
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Int16, AudioData::LittleEndian> (tempBuffer.getArrayOfReadPointers(), 2,
reinterpret_cast<uint16*> (buffer), 2,
numSamples);
AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { tempBuffer.getArrayOfReadPointers(), 2 },
AudioData::InterleavedDest<AudioData::Int16, AudioData::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 },
numSamples);
source->readPosition += numSamples;
}


+ 3
- 4
modules/juce_audio_utils/native/juce_win32_AudioCDBurner.cpp View File

@@ -388,10 +388,9 @@ bool AudioCDBurner::addAudioTrack (AudioSource* audioSource, int numSamples)
buffer.clear (bytesPerBlock);
AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian,
AudioData::Int16, AudioData::LittleEndian> (sourceBuffer.getArrayOfReadPointers(), 2,
reinterpret_cast<uint16*> (buffer), 2,
samplesPerBlock);
AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { sourceBuffer.getArrayOfReadPointers(), 2 },
AudioData::InterleavedDest<AudioData::Int16, Audiodata::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 },
samplesPerBlock);
hr = pimpl->redbook->AddAudioTrackBlocks (buffer, bytesPerBlock);


Loading…
Cancel
Save