| @@ -434,16 +434,21 @@ void AudioDataConverters::convertFormatToFloat (DataFormat sourceFormat, const v | |||||
| //============================================================================== | //============================================================================== | ||||
| void AudioDataConverters::interleaveSamples (const float** source, float* dest, int numSamples, int numChannels) | void AudioDataConverters::interleaveSamples (const float** source, float* dest, int numSamples, int numChannels) | ||||
| { | { | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples); | |||||
| using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { source, numChannels }, | |||||
| AudioData::InterleavedDest<Format> { dest, numChannels }, | |||||
| numSamples); | |||||
| } | } | ||||
| void AudioDataConverters::deinterleaveSamples (const float* source, float** dest, int numSamples, int numChannels) | void AudioDataConverters::deinterleaveSamples (const float* source, float** dest, int numSamples, int numChannels) | ||||
| { | { | ||||
| AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (source, numChannels, dest, numChannels, numSamples); | |||||
| } | |||||
| using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { source, numChannels }, | |||||
| AudioData::NonInterleavedDest<Format> { dest, numChannels }, | |||||
| numSamples); | |||||
| } | |||||
| //============================================================================== | //============================================================================== | ||||
| //============================================================================== | //============================================================================== | ||||
| @@ -574,6 +579,8 @@ public: | |||||
| beginTest ("Round-trip conversion: Float32"); | beginTest ("Round-trip conversion: Float32"); | ||||
| Test1 <AudioData::Float32>::test (*this, r); | Test1 <AudioData::Float32>::test (*this, r); | ||||
| using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| beginTest ("Interleaving"); | beginTest ("Interleaving"); | ||||
| { | { | ||||
| constexpr auto numChannels = 4; | constexpr auto numChannels = 4; | ||||
| @@ -586,10 +593,9 @@ public: | |||||
| for (int i = 0; i < numSamples; ++i) | for (int i = 0; i < numSamples; ++i) | ||||
| sourceBuffer.setSample (ch, i, r.nextFloat()); | sourceBuffer.setSample (ch, i, r.nextFloat()); | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getArrayOfReadPointers(), numChannels, | |||||
| destBuffer.getWritePointer (0), numChannels, | |||||
| numSamples); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { sourceBuffer.getArrayOfReadPointers(), numChannels }, | |||||
| AudioData::InterleavedDest<Format> { destBuffer.getWritePointer (0), numChannels }, | |||||
| numSamples); | |||||
| for (int ch = 0; ch < numChannels; ++ch) | for (int ch = 0; ch < numChannels; ++ch) | ||||
| for (int i = 0; i < numSamples; ++i) | for (int i = 0; i < numSamples; ++i) | ||||
| @@ -608,10 +614,9 @@ public: | |||||
| for (int i = 0; i < numSamples; ++i) | for (int i = 0; i < numSamples; ++i) | ||||
| sourceBuffer.setSample (0, ch + (i * numChannels), r.nextFloat()); | sourceBuffer.setSample (0, ch + (i * numChannels), r.nextFloat()); | ||||
| AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (sourceBuffer.getReadPointer (0), numChannels, | |||||
| destBuffer.getArrayOfWritePointers(), numChannels, | |||||
| numSamples); | |||||
| AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { sourceBuffer.getReadPointer (0), numChannels }, | |||||
| AudioData::NonInterleavedDest<Format> { destBuffer.getArrayOfWritePointers(), numChannels }, | |||||
| numSamples); | |||||
| for (int ch = 0; ch < numChannels; ++ch) | for (int ch = 0; ch < numChannels; ++ch) | ||||
| for (int i = 0; i < numSamples; ++i) | for (int i = 0; i < numSamples; ++i) | ||||
| @@ -326,6 +326,9 @@ public: | |||||
| JNIEnv* env = getEnv(); | JNIEnv* env = getEnv(); | ||||
| jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels)); | jshortArray audioBuffer = env->NewShortArray (actualBufferSize * jmax (numDeviceOutputChannels, numDeviceInputChannels)); | ||||
| using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>; | |||||
| using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| while (! threadShouldExit()) | while (! threadShouldExit()) | ||||
| { | { | ||||
| if (inputDevice != nullptr) | if (inputDevice != nullptr) | ||||
| @@ -339,12 +342,9 @@ public: | |||||
| jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr); | jshort* const src = env->GetShortArrayElements (audioBuffer, nullptr); | ||||
| AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (src), | |||||
| numDeviceInputChannels, | |||||
| inputChannelBuffer.getArrayOfWritePointers(), | |||||
| inputChannelBuffer.getNumChannels(), | |||||
| actualBufferSize); | |||||
| AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (src), numDeviceInputChannels }, | |||||
| AudioData::NonInterleavedDest<NativeFloat32> { inputChannelBuffer.getArrayOfWritePointers(), inputChannelBuffer.getNumChannels() }, | |||||
| actualBufferSize); | |||||
| env->ReleaseShortArrayElements (audioBuffer, src, 0); | env->ReleaseShortArrayElements (audioBuffer, src, 0); | ||||
| } | } | ||||
| @@ -374,12 +374,9 @@ public: | |||||
| jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr); | jshort* const dest = env->GetShortArrayElements (audioBuffer, nullptr); | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Int16, AudioData::NativeEndian> (outputChannelBuffer.getArrayOfReadPointers(), | |||||
| outputChannelBuffer.getNumChannels(), | |||||
| reinterpret_cast<uint16*> (dest), | |||||
| numDeviceOutputChannels, | |||||
| actualBufferSize); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { outputChannelBuffer.getArrayOfReadPointers(), outputChannelBuffer.getNumChannels() }, | |||||
| AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dest), numDeviceOutputChannels }, | |||||
| actualBufferSize); | |||||
| env->ReleaseShortArrayElements (audioBuffer, dest, 0); | env->ReleaseShortArrayElements (audioBuffer, dest, 0); | ||||
| jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels); | jint numWritten = env->CallIntMethod (outputDevice, AudioTrack.write, audioBuffer, 0, actualBufferSize * numDeviceOutputChannels); | ||||
| @@ -44,28 +44,25 @@ struct OboeAudioIODeviceBufferHelpers<int16> | |||||
| static bool referAudioBufferDirectlyToOboeIfPossible (int16*, AudioBuffer<float>&, int) { return false; } | static bool referAudioBufferDirectlyToOboeIfPossible (int16*, AudioBuffer<float>&, int) { return false; } | ||||
| using NativeInt16 = AudioData::Format<AudioData::Int16, AudioData::NativeEndian>; | |||||
| using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| static void convertFromOboe (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples) | static void convertFromOboe (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples) | ||||
| { | { | ||||
| const auto numChannels = audioBuffer.getNumChannels(); | const auto numChannels = audioBuffer.getNumChannels(); | ||||
| AudioData::deinterleaveSamples<AudioData::Int16, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved), | |||||
| numChannels, | |||||
| audioBuffer.getArrayOfWritePointers(), | |||||
| numChannels, | |||||
| numSamples); | |||||
| AudioData::deinterleaveSamples (AudioData::InterleavedSource<NativeInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels }, | |||||
| AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels }, | |||||
| numSamples); | |||||
| } | } | ||||
| static void convertToOboe (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved, int numSamples) | static void convertToOboe (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved, int numSamples) | ||||
| { | { | ||||
| const auto numChannels = audioBuffer.getNumChannels(); | const auto numChannels = audioBuffer.getNumChannels(); | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Int16, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(), | |||||
| numChannels, | |||||
| reinterpret_cast<uint16*> (dstInterleaved), | |||||
| numChannels, | |||||
| numSamples); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels }, | |||||
| AudioData::InterleavedDest<NativeInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels }, | |||||
| numSamples); | |||||
| } | } | ||||
| }; | }; | ||||
| @@ -87,6 +84,8 @@ struct OboeAudioIODeviceBufferHelpers<float> | |||||
| return false; | return false; | ||||
| } | } | ||||
| using Format = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| static void convertFromOboe (const float* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples) | static void convertFromOboe (const float* srcInterleaved, AudioBuffer<float>& audioBuffer, int numSamples) | ||||
| { | { | ||||
| auto numChannels = audioBuffer.getNumChannels(); | auto numChannels = audioBuffer.getNumChannels(); | ||||
| @@ -96,12 +95,9 @@ struct OboeAudioIODeviceBufferHelpers<float> | |||||
| // No need to convert, we instructed the buffer to point to the src data directly already | // No need to convert, we instructed the buffer to point to the src data directly already | ||||
| jassert (audioBuffer.getWritePointer (0) != srcInterleaved); | jassert (audioBuffer.getWritePointer (0) != srcInterleaved); | ||||
| AudioData::deinterleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (srcInterleaved, | |||||
| numChannels, | |||||
| audioBuffer.getArrayOfWritePointers(), | |||||
| numChannels, | |||||
| numSamples); | |||||
| AudioData::deinterleaveSamples (AudioData::InterleavedSource<Format> { srcInterleaved, numChannels }, | |||||
| AudioData::NonInterleavedDest<Format> { audioBuffer.getArrayOfWritePointers(), numChannels }, | |||||
| numSamples); | |||||
| } | } | ||||
| } | } | ||||
| @@ -114,12 +110,9 @@ struct OboeAudioIODeviceBufferHelpers<float> | |||||
| // No need to convert, we instructed the buffer to point to the src data directly already | // No need to convert, we instructed the buffer to point to the src data directly already | ||||
| jassert (audioBuffer.getReadPointer (0) != dstInterleaved); | jassert (audioBuffer.getReadPointer (0) != dstInterleaved); | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (audioBuffer.getArrayOfReadPointers(), | |||||
| numChannels, | |||||
| dstInterleaved, | |||||
| numChannels, | |||||
| numSamples); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<Format> { audioBuffer.getArrayOfReadPointers(), numChannels }, | |||||
| AudioData::InterleavedDest<Format> { dstInterleaved, numChannels }, | |||||
| numSamples); | |||||
| } | } | ||||
| } | } | ||||
| }; | }; | ||||
| @@ -194,28 +194,25 @@ struct BufferHelpers<int16> | |||||
| static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {} | static void prepareCallbackBuffer (AudioBuffer<float>&, int16*) {} | ||||
| using LittleEndianInt16 = AudioData::Format<AudioData::Int16, AudioData::LittleEndian>; | |||||
| using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer) | static void convertFromOpenSL (const int16* srcInterleaved, AudioBuffer<float>& audioBuffer) | ||||
| { | { | ||||
| const auto numChannels = audioBuffer.getNumChannels(); | const auto numChannels = audioBuffer.getNumChannels(); | ||||
| AudioData::deinterleaveSamples<AudioData::Int16, AudioData::LittleEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (reinterpret_cast<const uint16*> (srcInterleaved), | |||||
| numChannels, | |||||
| audioBuffer.getArrayOfWritePointers(), | |||||
| numChannels, | |||||
| audioBuffer.getNumSamples()); | |||||
| AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianInt16> { reinterpret_cast<const uint16*> (srcInterleaved), numChannels }, | |||||
| AudioData::NonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels }, | |||||
| audioBuffer.getNumSamples()); | |||||
| } | } | ||||
| static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved) | static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, int16* dstInterleaved) | ||||
| { | { | ||||
| const auto numChannels = audioBuffer.getNumChannels(); | const auto numChannels = audioBuffer.getNumChannels(); | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Int16, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(), | |||||
| numChannels, | |||||
| reinterpret_cast<uint16*> (dstInterleaved), | |||||
| numChannels, | |||||
| audioBuffer.getNumSamples()); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels }, | |||||
| AudioData::InterleavedDest<LittleEndianInt16> { reinterpret_cast<uint16*> (dstInterleaved), numChannels }, | |||||
| audioBuffer.getNumSamples()); | |||||
| } | } | ||||
| }; | }; | ||||
| @@ -244,6 +241,9 @@ struct BufferHelpers<float> | |||||
| audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples()); | audioBuffer.setDataToReferTo (&native, 1, audioBuffer.getNumSamples()); | ||||
| } | } | ||||
| using LittleEndianFloat32 = AudioData::Format<AudioData::Float32, AudioData::LittleEndian>; | |||||
| using NativeFloat32 = AudioData::Format<AudioData::Float32, AudioData::NativeEndian>; | |||||
| static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer) | static void convertFromOpenSL (const float* srcInterleaved, AudioBuffer<float>& audioBuffer) | ||||
| { | { | ||||
| const auto numChannels = audioBuffer.getNumChannels(); | const auto numChannels = audioBuffer.getNumChannels(); | ||||
| @@ -254,12 +254,9 @@ struct BufferHelpers<float> | |||||
| return; | return; | ||||
| } | } | ||||
| AudioData::deinterleaveSamples<AudioData::Float32, AudioData::LittleEndian, | |||||
| AudioData::Float32, AudioData::NativeEndian> (srcInterleaved, | |||||
| numChannels, | |||||
| audioBuffer.getArrayOfWritePointers(), | |||||
| numChannels, | |||||
| audioBuffer.getNumSamples()); | |||||
| AudioData::deinterleaveSamples (AudioData::InterleavedSource<LittleEndianFloat32> { srcInterleaved, numChannels }, | |||||
| AudioData::nonInterleavedDest<NativeFloat32> { audioBuffer.getArrayOfWritePointers(), numChannels }, | |||||
| audioBuffer.getNumSamples()); | |||||
| } | } | ||||
| static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved) | static void convertToOpenSL (const AudioBuffer<float>& audioBuffer, float* dstInterleaved) | ||||
| @@ -272,12 +269,9 @@ struct BufferHelpers<float> | |||||
| return; | return; | ||||
| } | } | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Float32, AudioData::LittleEndian> (audioBuffer.getArrayOfReadPointers(), | |||||
| numChannels, | |||||
| dstInterleaved, | |||||
| numChannels, | |||||
| audioBuffer.getNumSamples()); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<NativeFloat32> { audioBuffer.getArrayOfReadPointers(), numChannels }, | |||||
| AudioData::InterleavedDest<LittleEndianFloat32> { dstInterleaved, numChannels }, | |||||
| audioBuffer.getNumSamples()); | |||||
| } | } | ||||
| }; | }; | ||||
| @@ -132,10 +132,9 @@ private: | |||||
| source->source->getNextAudioBlock (info); | source->source->getNextAudioBlock (info); | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Int16, AudioData::LittleEndian> (tempBuffer.getArrayOfReadPointers(), 2, | |||||
| reinterpret_cast<uint16*> (buffer), 2, | |||||
| numSamples); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { tempBuffer.getArrayOfReadPointers(), 2 }, | |||||
| AudioData::InterleavedDest<AudioData::Int16, AudioData::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 }, | |||||
| numSamples); | |||||
| source->readPosition += numSamples; | source->readPosition += numSamples; | ||||
| } | } | ||||
| @@ -388,10 +388,9 @@ bool AudioCDBurner::addAudioTrack (AudioSource* audioSource, int numSamples) | |||||
| buffer.clear (bytesPerBlock); | buffer.clear (bytesPerBlock); | ||||
| AudioData::interleaveSamples<AudioData::Float32, AudioData::NativeEndian, | |||||
| AudioData::Int16, AudioData::LittleEndian> (sourceBuffer.getArrayOfReadPointers(), 2, | |||||
| reinterpret_cast<uint16*> (buffer), 2, | |||||
| samplesPerBlock); | |||||
| AudioData::interleaveSamples (AudioData::NonInterleavedSource<AudioData::Float32, AudioData::NativeEndian> { sourceBuffer.getArrayOfReadPointers(), 2 }, | |||||
| AudioData::InterleavedDest<AudioData::Int16, Audiodata::LittleEndian> { reinterpret_cast<uint16*> (buffer), 2 }, | |||||
| samplesPerBlock); | |||||
| hr = pimpl->redbook->AddAudioTrackBlocks (buffer, bytesPerBlock); | hr = pimpl->redbook->AddAudioTrackBlocks (buffer, bytesPerBlock); | ||||