Review code formatting

This commit is contained in:
Ray San
2017-12-20 11:37:43 +01:00
parent b63ffcfa0f
commit 1320044e94

View File

@ -227,7 +227,8 @@ void TraceLog(int msgType, const char *text, ...); // Show trace lo
typedef enum { AUDIO_BUFFER_USAGE_STATIC = 0, AUDIO_BUFFER_USAGE_STREAM } AudioBufferUsage; typedef enum { AUDIO_BUFFER_USAGE_STATIC = 0, AUDIO_BUFFER_USAGE_STREAM } AudioBufferUsage;
// Audio buffer structure // Audio buffer structure
typedef struct AudioBuffer { typedef struct AudioBuffer AudioBuffer;
struct AudioBuffer {
mal_dsp dsp; // For format conversion. mal_dsp dsp; // For format conversion.
float volume; float volume;
float pitch; float pitch;
@ -241,7 +242,7 @@ typedef struct AudioBuffer {
AudioBuffer *next; AudioBuffer *next;
AudioBuffer *prev; AudioBuffer *prev;
unsigned char buffer[1]; unsigned char buffer[1];
} AudioBuffer; };
void StopAudioBuffer(AudioBuffer *audioBuffer); void StopAudioBuffer(AudioBuffer *audioBuffer);
@ -256,38 +257,36 @@ static AudioBuffer* lastAudioBuffer = NULL;
static void TrackAudioBuffer(AudioBuffer* audioBuffer) static void TrackAudioBuffer(AudioBuffer* audioBuffer)
{ {
mal_mutex_lock(&audioLock); mal_mutex_lock(&audioLock);
{
if (firstAudioBuffer == NULL) firstAudioBuffer = audioBuffer;
else
{ {
if (firstAudioBuffer == NULL) {
firstAudioBuffer = audioBuffer;
} else {
lastAudioBuffer->next = audioBuffer; lastAudioBuffer->next = audioBuffer;
audioBuffer->prev = lastAudioBuffer; audioBuffer->prev = lastAudioBuffer;
} }
lastAudioBuffer = audioBuffer; lastAudioBuffer = audioBuffer;
} }
mal_mutex_unlock(&audioLock); mal_mutex_unlock(&audioLock);
} }
static void UntrackAudioBuffer(AudioBuffer* audioBuffer) static void UntrackAudioBuffer(AudioBuffer* audioBuffer)
{ {
mal_mutex_lock(&audioLock); mal_mutex_lock(&audioLock);
{
if (audioBuffer->prev == NULL) {
firstAudioBuffer = audioBuffer->next;
} else {
audioBuffer->prev->next = audioBuffer->next;
}
if (audioBuffer->next == NULL) { {
lastAudioBuffer = audioBuffer->prev; if (audioBuffer->prev == NULL) firstAudioBuffer = audioBuffer->next;
} else { else audioBuffer->prev->next = audioBuffer->next;
audioBuffer->next->prev = audioBuffer->prev;
} if (audioBuffer->next == NULL) lastAudioBuffer = audioBuffer->prev;
else audioBuffer->next->prev = audioBuffer->prev;
audioBuffer->prev = NULL; audioBuffer->prev = NULL;
audioBuffer->next = NULL; audioBuffer->next = NULL;
} }
mal_mutex_unlock(&audioLock); mal_mutex_unlock(&audioLock);
} }
@ -303,8 +302,10 @@ static void OnLog_MAL(mal_context* pContext, mal_device* pDevice, const char* me
// framesOut is both an input and an output. It will be initially filled with zeros outside of this function. // framesOut is both an input and an output. It will be initially filled with zeros outside of this function.
static void MixFrames(float* framesOut, const float* framesIn, mal_uint32 frameCount, float localVolume) static void MixFrames(float* framesOut, const float* framesIn, mal_uint32 frameCount, float localVolume)
{ {
for (mal_uint32 iFrame = 0; iFrame < frameCount; ++iFrame) { for (mal_uint32 iFrame = 0; iFrame < frameCount; ++iFrame)
for (mal_uint32 iChannel = 0; iChannel < device.channels; ++iChannel) { {
for (mal_uint32 iChannel = 0; iChannel < device.channels; ++iChannel)
{
float *frameOut = framesOut + (iFrame*device.channels); float *frameOut = framesOut + (iFrame*device.channels);
const float *frameIn = framesIn + (iFrame*device.channels); const float *frameIn = framesIn + (iFrame*device.channels);
@ -328,27 +329,28 @@ static mal_uint32 OnSendAudioDataToDevice(mal_device* pDevice, mal_uint32 frameC
for (AudioBuffer* audioBuffer = firstAudioBuffer; audioBuffer != NULL; audioBuffer = audioBuffer->next) for (AudioBuffer* audioBuffer = firstAudioBuffer; audioBuffer != NULL; audioBuffer = audioBuffer->next)
{ {
// Ignore stopped or paused sounds. // Ignore stopped or paused sounds.
if (!audioBuffer->playing || audioBuffer->paused) { if (!audioBuffer->playing || audioBuffer->paused) continue;
continue;
}
mal_uint32 framesRead = 0; mal_uint32 framesRead = 0;
for (;;) { for (;;)
if (framesRead > frameCount) { {
if (framesRead > frameCount)
{
TraceLog(LOG_DEBUG, "Mixed too many frames from audio buffer"); TraceLog(LOG_DEBUG, "Mixed too many frames from audio buffer");
break; break;
} }
if (framesRead == frameCount) {
break; if (framesRead == frameCount) break;
}
// Just read as much data as we can from the stream. // Just read as much data as we can from the stream.
mal_uint32 framesToRead = (frameCount - framesRead); mal_uint32 framesToRead = (frameCount - framesRead);
while (framesToRead > 0) { while (framesToRead > 0)
{
float tempBuffer[1024]; // 512 frames for stereo. float tempBuffer[1024]; // 512 frames for stereo.
mal_uint32 framesToReadRightNow = framesToRead; mal_uint32 framesToReadRightNow = framesToRead;
if (framesToReadRightNow > sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS) { if (framesToReadRightNow > sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS)
{
framesToReadRightNow = sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS; framesToReadRightNow = sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS;
} }
@ -357,7 +359,8 @@ static mal_uint32 OnSendAudioDataToDevice(mal_device* pDevice, mal_uint32 frameC
mal_bool32 flushDSP = !audioBuffer->looping; mal_bool32 flushDSP = !audioBuffer->looping;
mal_uint32 framesJustRead = mal_dsp_read_frames_ex(&audioBuffer->dsp, framesToReadRightNow, tempBuffer, flushDSP); mal_uint32 framesJustRead = mal_dsp_read_frames_ex(&audioBuffer->dsp, framesToReadRightNow, tempBuffer, flushDSP);
if (framesJustRead > 0) { if (framesJustRead > 0)
{
float *framesOut = (float *)pFramesOut + (framesRead*device.channels); float *framesOut = (float *)pFramesOut + (framesRead*device.channels);
float *framesIn = tempBuffer; float *framesIn = tempBuffer;
MixFrames(framesOut, framesIn, framesJustRead, audioBuffer->volume); MixFrames(framesOut, framesIn, framesJustRead, audioBuffer->volume);
@ -367,11 +370,15 @@ static mal_uint32 OnSendAudioDataToDevice(mal_device* pDevice, mal_uint32 frameC
} }
// If we weren't able to read all the frames we requested, break. // If we weren't able to read all the frames we requested, break.
if (framesJustRead < framesToReadRightNow) { if (framesJustRead < framesToReadRightNow)
if (!audioBuffer->looping) { {
if (!audioBuffer->looping)
{
StopAudioBuffer(audioBuffer); StopAudioBuffer(audioBuffer);
break; break;
} else { }
else
{
// Should never get here, but just for safety, move the cursor position back to the start and continue the loop. // Should never get here, but just for safety, move the cursor position back to the start and continue the loop.
audioBuffer->frameCursorPos = 0; audioBuffer->frameCursorPos = 0;
continue; continue;
@ -381,12 +388,11 @@ static mal_uint32 OnSendAudioDataToDevice(mal_device* pDevice, mal_uint32 frameC
// If for some reason we weren't able to read every frame we'll need to break from the loop. Not doing this could // If for some reason we weren't able to read every frame we'll need to break from the loop. Not doing this could
// theoretically put us into an infinite loop. // theoretically put us into an infinite loop.
if (framesToRead > 0) { if (framesToRead > 0) break;
break;
}
} }
} }
} }
mal_mutex_unlock(&audioLock); mal_mutex_unlock(&audioLock);
return frameCount; // We always output the same number of frames that were originally requested. return frameCount; // We always output the same number of frames that were originally requested.
@ -488,7 +494,8 @@ void InitAudioDevice(void)
void CloseAudioDevice(void) void CloseAudioDevice(void)
{ {
#if USE_MINI_AL #if USE_MINI_AL
if (!isAudioInitialized) { if (!isAudioInitialized)
{
TraceLog(LOG_WARNING, "Could not close audio device because it is not currently initialized"); TraceLog(LOG_WARNING, "Could not close audio device because it is not currently initialized");
return; return;
} }
@ -555,7 +562,9 @@ static mal_uint32 AudioBuffer_OnDSPRead(mal_dsp* pDSP, mal_uint32 frameCount, vo
mal_uint32 subBufferSizeInFrames = audioBuffer->bufferSizeInFrames/2; mal_uint32 subBufferSizeInFrames = audioBuffer->bufferSizeInFrames/2;
mal_uint32 currentSubBufferIndex = audioBuffer->frameCursorPos/subBufferSizeInFrames; mal_uint32 currentSubBufferIndex = audioBuffer->frameCursorPos/subBufferSizeInFrames;
if (currentSubBufferIndex > 1) {
if (currentSubBufferIndex > 1)
{
TraceLog(LOG_DEBUG, "Frame cursor position moved too far forward in audio stream"); TraceLog(LOG_DEBUG, "Frame cursor position moved too far forward in audio stream");
return 0; return 0;
} }
@ -573,49 +582,47 @@ static mal_uint32 AudioBuffer_OnDSPRead(mal_dsp* pDSP, mal_uint32 frameCount, vo
{ {
// We break from this loop differently depending on the buffer's usage. For static buffers, we simply fill as much data as we can. For // We break from this loop differently depending on the buffer's usage. For static buffers, we simply fill as much data as we can. For
// streaming buffers we only fill the halves of the buffer that are processed. Unprocessed halves must keep their audio data in-tact. // streaming buffers we only fill the halves of the buffer that are processed. Unprocessed halves must keep their audio data in-tact.
if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC) { if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC)
if (framesRead >= frameCount) { {
break; if (framesRead >= frameCount) break;
}
} else {
if (isSubBufferProcessed[currentSubBufferIndex]) {
break;
} }
else
{
if (isSubBufferProcessed[currentSubBufferIndex]) break;
} }
mal_uint32 totalFramesRemaining = (frameCount - framesRead); mal_uint32 totalFramesRemaining = (frameCount - framesRead);
if (totalFramesRemaining == 0) { if (totalFramesRemaining == 0) break;
break;
}
mal_uint32 framesRemainingInOutputBuffer; mal_uint32 framesRemainingInOutputBuffer;
if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC) { if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC)
{
framesRemainingInOutputBuffer = audioBuffer->bufferSizeInFrames - audioBuffer->frameCursorPos; framesRemainingInOutputBuffer = audioBuffer->bufferSizeInFrames - audioBuffer->frameCursorPos;
} else { }
else
{
mal_uint32 firstFrameIndexOfThisSubBuffer = subBufferSizeInFrames * currentSubBufferIndex; mal_uint32 firstFrameIndexOfThisSubBuffer = subBufferSizeInFrames * currentSubBufferIndex;
framesRemainingInOutputBuffer = subBufferSizeInFrames - (audioBuffer->frameCursorPos - firstFrameIndexOfThisSubBuffer); framesRemainingInOutputBuffer = subBufferSizeInFrames - (audioBuffer->frameCursorPos - firstFrameIndexOfThisSubBuffer);
} }
mal_uint32 framesToRead = totalFramesRemaining; mal_uint32 framesToRead = totalFramesRemaining;
if (framesToRead > framesRemainingInOutputBuffer) { if (framesToRead > framesRemainingInOutputBuffer) framesToRead = framesRemainingInOutputBuffer;
framesToRead = framesRemainingInOutputBuffer;
}
memcpy((unsigned char *)pFramesOut + (framesRead*frameSizeInBytes), audioBuffer->buffer + (audioBuffer->frameCursorPos*frameSizeInBytes), framesToRead*frameSizeInBytes); memcpy((unsigned char *)pFramesOut + (framesRead*frameSizeInBytes), audioBuffer->buffer + (audioBuffer->frameCursorPos*frameSizeInBytes), framesToRead*frameSizeInBytes);
audioBuffer->frameCursorPos = (audioBuffer->frameCursorPos + framesToRead) % audioBuffer->bufferSizeInFrames; audioBuffer->frameCursorPos = (audioBuffer->frameCursorPos + framesToRead) % audioBuffer->bufferSizeInFrames;
framesRead += framesToRead; framesRead += framesToRead;
// If we've read to the end of the buffer, mark it as processed. // If we've read to the end of the buffer, mark it as processed.
if (framesToRead == framesRemainingInOutputBuffer) { if (framesToRead == framesRemainingInOutputBuffer)
{
audioBuffer->isSubBufferProcessed[currentSubBufferIndex] = true; audioBuffer->isSubBufferProcessed[currentSubBufferIndex] = true;
isSubBufferProcessed[currentSubBufferIndex] = true; isSubBufferProcessed[currentSubBufferIndex] = true;
currentSubBufferIndex = (currentSubBufferIndex + 1) % 2; currentSubBufferIndex = (currentSubBufferIndex + 1) % 2;
// We need to break from this loop if we're not looping. // We need to break from this loop if we're not looping.
if (!audioBuffer->looping) { if (!audioBuffer->looping)
{
StopAudioBuffer(audioBuffer); StopAudioBuffer(audioBuffer);
break; break;
} }
@ -624,15 +631,14 @@ static mal_uint32 AudioBuffer_OnDSPRead(mal_dsp* pDSP, mal_uint32 frameCount, vo
// Zero-fill excess. // Zero-fill excess.
mal_uint32 totalFramesRemaining = (frameCount - framesRead); mal_uint32 totalFramesRemaining = (frameCount - framesRead);
if (totalFramesRemaining > 0) { if (totalFramesRemaining > 0)
{
memset((unsigned char*)pFramesOut + (framesRead*frameSizeInBytes), 0, totalFramesRemaining*frameSizeInBytes); memset((unsigned char*)pFramesOut + (framesRead*frameSizeInBytes), 0, totalFramesRemaining*frameSizeInBytes);
// For static buffers we can fill the remaining frames with silence for safety, but we don't want // For static buffers we can fill the remaining frames with silence for safety, but we don't want
// to report those frames as "read". The reason for this is that the caller uses the return value // to report those frames as "read". The reason for this is that the caller uses the return value
// to know whether or not a non-looping sound has finished playback. // to know whether or not a non-looping sound has finished playback.
if (audioBuffer->usage != AUDIO_BUFFER_USAGE_STATIC) { if (audioBuffer->usage != AUDIO_BUFFER_USAGE_STATIC) framesRead += totalFramesRemaining;
framesRead += totalFramesRemaining;
}
} }
return framesRead; return framesRead;
@ -658,7 +664,8 @@ AudioBuffer* CreateAudioBuffer(mal_format format, mal_uint32 channels, mal_uint3
dspConfig.sampleRateIn = sampleRate; dspConfig.sampleRateIn = sampleRate;
dspConfig.sampleRateOut = DEVICE_SAMPLE_RATE; dspConfig.sampleRateOut = DEVICE_SAMPLE_RATE;
mal_result resultMAL = mal_dsp_init(&dspConfig, AudioBuffer_OnDSPRead, audioBuffer, &audioBuffer->dsp); mal_result resultMAL = mal_dsp_init(&dspConfig, AudioBuffer_OnDSPRead, audioBuffer, &audioBuffer->dsp);
if (resultMAL != MAL_SUCCESS) { if (resultMAL != MAL_SUCCESS)
{
TraceLog(LOG_ERROR, "LoadSoundFromWave() : Failed to create data conversion pipeline"); TraceLog(LOG_ERROR, "LoadSoundFromWave() : Failed to create data conversion pipeline");
free(audioBuffer); free(audioBuffer);
return NULL; return NULL;
@ -734,10 +741,7 @@ void StopAudioBuffer(AudioBuffer* audioBuffer)
} }
// Don't do anything if the audio buffer is already stopped. // Don't do anything if the audio buffer is already stopped.
if (!IsAudioBufferPlaying(audioBuffer)) if (!IsAudioBufferPlaying(audioBuffer)) return;
{
return;
}
audioBuffer->playing = false; audioBuffer->playing = false;
audioBuffer->paused = false; audioBuffer->paused = false;
@ -874,23 +878,13 @@ Sound LoadSoundFromWave(Wave wave)
mal_uint32 frameCountIn = wave.sampleCount; // Is wave->sampleCount actually the frame count? That terminology needs to change, if so. mal_uint32 frameCountIn = wave.sampleCount; // Is wave->sampleCount actually the frame count? That terminology needs to change, if so.
mal_uint32 frameCount = mal_convert_frames(NULL, DEVICE_FORMAT, DEVICE_CHANNELS, DEVICE_SAMPLE_RATE, NULL, formatIn, wave.channels, wave.sampleRate, frameCountIn); mal_uint32 frameCount = mal_convert_frames(NULL, DEVICE_FORMAT, DEVICE_CHANNELS, DEVICE_SAMPLE_RATE, NULL, formatIn, wave.channels, wave.sampleRate, frameCountIn);
if (frameCount == 0) { if (frameCount == 0) TraceLog(LOG_ERROR, "LoadSoundFromWave() : Failed to get frame count for format conversion");
TraceLog(LOG_ERROR, "LoadSoundFromWave() : Failed to get frame count for format conversion");
}
AudioBuffer* audioBuffer = CreateAudioBuffer(DEVICE_FORMAT, DEVICE_CHANNELS, DEVICE_SAMPLE_RATE, frameCount, AUDIO_BUFFER_USAGE_STATIC); AudioBuffer* audioBuffer = CreateAudioBuffer(DEVICE_FORMAT, DEVICE_CHANNELS, DEVICE_SAMPLE_RATE, frameCount, AUDIO_BUFFER_USAGE_STATIC);
if (audioBuffer == NULL) if (audioBuffer == NULL) TraceLog(LOG_ERROR, "LoadSoundFromWave() : Failed to create audio buffer");
{
TraceLog(LOG_ERROR, "LoadSoundFromWave() : Failed to create audio buffer");
}
frameCount = mal_convert_frames(audioBuffer->buffer, audioBuffer->dsp.config.formatIn, audioBuffer->dsp.config.channelsIn, audioBuffer->dsp.config.sampleRateIn, wave.data, formatIn, wave.channels, wave.sampleRate, frameCountIn); frameCount = mal_convert_frames(audioBuffer->buffer, audioBuffer->dsp.config.formatIn, audioBuffer->dsp.config.channelsIn, audioBuffer->dsp.config.sampleRateIn, wave.data, formatIn, wave.channels, wave.sampleRate, frameCountIn);
if (frameCount == 0) if (frameCount == 0) TraceLog(LOG_ERROR, "LoadSoundFromWave() : Format conversion failed");
{
TraceLog(LOG_ERROR, "LoadSoundFromWave() : Format conversion failed");
}
sound.audioBuffer = audioBuffer; sound.audioBuffer = audioBuffer;
#else #else
@ -1121,7 +1115,8 @@ void WaveFormat(Wave *wave, int sampleRate, int sampleSize, int channels)
mal_uint32 frameCountIn = wave->sampleCount; // Is wave->sampleCount actually the frame count? That terminology needs to change, if so. mal_uint32 frameCountIn = wave->sampleCount; // Is wave->sampleCount actually the frame count? That terminology needs to change, if so.
mal_uint32 frameCount = mal_convert_frames(NULL, formatOut, channels, sampleRate, NULL, formatIn, wave->channels, wave->sampleRate, frameCountIn); mal_uint32 frameCount = mal_convert_frames(NULL, formatOut, channels, sampleRate, NULL, formatIn, wave->channels, wave->sampleRate, frameCountIn);
if (frameCount == 0) { if (frameCount == 0)
{
TraceLog(LOG_ERROR, "WaveFormat() : Failed to get frame count for format conversion."); TraceLog(LOG_ERROR, "WaveFormat() : Failed to get frame count for format conversion.");
return; return;
} }
@ -1129,7 +1124,8 @@ void WaveFormat(Wave *wave, int sampleRate, int sampleSize, int channels)
void *data = malloc(frameCount*channels*(sampleSize/8)); void *data = malloc(frameCount*channels*(sampleSize/8));
frameCount = mal_convert_frames(data, formatOut, channels, sampleRate, wave->data, formatIn, wave->channels, wave->sampleRate, frameCountIn); frameCount = mal_convert_frames(data, formatOut, channels, sampleRate, wave->data, formatIn, wave->channels, wave->sampleRate, frameCountIn);
if (frameCount == 0) { if (frameCount == 0)
{
TraceLog(LOG_ERROR, "WaveFormat() : Format conversion failed."); TraceLog(LOG_ERROR, "WaveFormat() : Format conversion failed.");
return; return;
} }
@ -1416,9 +1412,9 @@ void PlayMusicStream(Music music)
// // just make sure to play again on window restore // // just make sure to play again on window restore
// if (IsMusicPlaying(music)) PlayMusicStream(music); // if (IsMusicPlaying(music)) PlayMusicStream(music);
mal_uint32 frameCursorPos = audioBuffer->frameCursorPos; mal_uint32 frameCursorPos = audioBuffer->frameCursorPos;
{
PlayAudioStream(music->stream); // <-- This resets the cursor position. PlayAudioStream(music->stream); // <-- This resets the cursor position.
}
audioBuffer->frameCursorPos = frameCursorPos; audioBuffer->frameCursorPos = frameCursorPos;
#else #else
alSourcePlay(music->stream.source); alSourcePlay(music->stream.source);
@ -1566,10 +1562,7 @@ void UpdateMusicStream(Music music)
} }
else else
{ {
if (music->loopCount == -1) if (music->loopCount == -1) PlayMusicStream(music);
{
PlayMusicStream(music);
}
} }
} }
else else
@ -1756,9 +1749,7 @@ AudioStream InitAudioStream(unsigned int sampleRate, unsigned int sampleSize, un
// The size of a streaming buffer must be at least double the size of a period. // The size of a streaming buffer must be at least double the size of a period.
unsigned int periodSize = device.bufferSizeInFrames / device.periods; unsigned int periodSize = device.bufferSizeInFrames / device.periods;
unsigned int subBufferSize = AUDIO_BUFFER_SIZE; unsigned int subBufferSize = AUDIO_BUFFER_SIZE;
if (subBufferSize < periodSize) { if (subBufferSize < periodSize) subBufferSize = periodSize;
subBufferSize = periodSize;
}
AudioBuffer *audioBuffer = CreateAudioBuffer(formatIn, stream.channels, stream.sampleRate, subBufferSize*2, AUDIO_BUFFER_USAGE_STREAM); AudioBuffer *audioBuffer = CreateAudioBuffer(formatIn, stream.channels, stream.sampleRate, subBufferSize*2, AUDIO_BUFFER_USAGE_STREAM);
if (audioBuffer == NULL) if (audioBuffer == NULL)
@ -1885,16 +1876,15 @@ void UpdateAudioStream(AudioStream stream, const void *data, int samplesCount)
if (subBufferSizeInFrames >= (mal_uint32)samplesCount) if (subBufferSizeInFrames >= (mal_uint32)samplesCount)
{ {
mal_uint32 framesToWrite = subBufferSizeInFrames; mal_uint32 framesToWrite = subBufferSizeInFrames;
if (framesToWrite > (mal_uint32)samplesCount) { if (framesToWrite > (mal_uint32)samplesCount) framesToWrite = (mal_uint32)samplesCount;
framesToWrite = (mal_uint32)samplesCount;
}
mal_uint32 bytesToWrite = framesToWrite*stream.channels*(stream.sampleSize/8); mal_uint32 bytesToWrite = framesToWrite*stream.channels*(stream.sampleSize/8);
memcpy(subBuffer, data, bytesToWrite); memcpy(subBuffer, data, bytesToWrite);
// Any leftover frames should be filled with zeros. // Any leftover frames should be filled with zeros.
mal_uint32 leftoverFrameCount = subBufferSizeInFrames - framesToWrite; mal_uint32 leftoverFrameCount = subBufferSizeInFrames - framesToWrite;
if (leftoverFrameCount > 0) { if (leftoverFrameCount > 0)
{
memset(subBuffer + bytesToWrite, 0, leftoverFrameCount*stream.channels*(stream.sampleSize/8)); memset(subBuffer + bytesToWrite, 0, leftoverFrameCount*stream.channels*(stream.sampleSize/8));
} }