Merge pull request #166 from Bil152/develop

Code refractoring of music module to be more user-friendly (issue #144)
This commit is contained in:
Ray
2016-07-30 12:45:31 +02:00
committed by GitHub
3 changed files with 269 additions and 211 deletions

View File

@ -2,7 +2,7 @@
*
* raylib.audio
*
* Basic functions to manage Audio:
* Basic functions to manage Audio:
* Manage audio device (init/close)
* Load and Unload audio files
* Play/Stop/Pause/Resume loaded audio
@ -103,8 +103,8 @@
// Types and Structures Definition
//----------------------------------------------------------------------------------
// Used to create custom audio streams that are not bound to a specific file.
// There can be no more than 4 concurrent mixchannels in use.
// Used to create custom audio streams that are not bound to a specific file.
// There can be no more than 4 concurrent mixchannels in use.
// This is due to each active mixc being tied to a dedicated mix channel.
typedef struct MixChannel {
unsigned short sampleRate; // default is 48000
@ -112,7 +112,7 @@ typedef struct MixChannel {
unsigned char mixChannel; // 0-3 or mixA-mixD, each mix channel can receive up to one dedicated audio stream
bool floatingPoint; // if false then the short datatype is used instead
bool playing; // false if paused
ALenum alFormat; // OpenAL format specifier
ALuint alSource; // OpenAL source
ALuint alBuffer[MAX_STREAM_BUFFERS]; // OpenAL sample buffer
@ -125,7 +125,7 @@ typedef struct Music {
jar_xm_context_t *xmctx; // XM chiptune context
jar_mod_context_t modctx; // MOD chiptune context
MixChannel *mixc; // Mix channel
unsigned int totalSamplesLeft;
float totalLengthSeconds;
bool loop;
@ -149,7 +149,8 @@ typedef enum {
ERROR_UNABLE_TO_OPEN_RRES_FILE = 2048,
ERROR_INVALID_RRES_FILE = 4096,
ERROR_INVALID_RRES_RESOURCE = 8192,
ERROR_UNINITIALIZED_CHANNELS = 16384
ERROR_UNINITIALIZED_CHANNELS = 16384,
ERROR_UNINTIALIZED_MUSIC_BUFFER = 32768
} AudioError;
#if defined(AUDIO_STANDALONE)
@ -221,7 +222,7 @@ void CloseAudioDevice(void)
{
for (int index = 0; index < MAX_MUSIC_STREAMS; index++)
{
if (musicStreams[index].mixc) StopMusicStream(index); // Stop music streaming and close current stream
if (musicStreams[index].mixc) StopMusicStreamEx(index); // Stop music streaming and close current stream
}
ALCdevice *device;
@ -240,12 +241,12 @@ void CloseAudioDevice(void)
bool IsAudioDeviceReady(void)
{
ALCcontext *context = alcGetCurrentContext();
if (context == NULL) return false;
else
{
ALCdevice *device = alcGetContextsDevice(context);
if (device == NULL) return false;
else return true;
}
@ -256,13 +257,13 @@ bool IsAudioDeviceReady(void)
//----------------------------------------------------------------------------------
// Init mix channel for streaming
// The mixChannel is what audio muxing channel you want to operate on, 0-3 are the ones available.
// The mixChannel is what audio muxing channel you want to operate on, 0-3 are the ones available.
// Each mix channel can only be used one at a time.
static MixChannel *InitMixChannel(unsigned short sampleRate, unsigned char mixChannel, unsigned char channels, bool floatingPoint)
{
if (mixChannel >= MAX_MIX_CHANNELS) return NULL;
if (!IsAudioDeviceReady()) InitAudioDevice();
if (!mixChannels[mixChannel])
{
MixChannel *mixc = (MixChannel *)malloc(sizeof(MixChannel));
@ -271,7 +272,7 @@ static MixChannel *InitMixChannel(unsigned short sampleRate, unsigned char mixCh
mixc->mixChannel = mixChannel;
mixc->floatingPoint = floatingPoint;
mixChannels[mixChannel] = mixc;
// Setup OpenAL format
if (channels == 1)
{
@ -283,17 +284,17 @@ static MixChannel *InitMixChannel(unsigned short sampleRate, unsigned char mixCh
if (floatingPoint) mixc->alFormat = AL_FORMAT_STEREO_FLOAT32;
else mixc->alFormat = AL_FORMAT_STEREO16;
}
// Create an audio source
alGenSources(1, &mixc->alSource);
alSourcef(mixc->alSource, AL_PITCH, 1);
alSourcef(mixc->alSource, AL_GAIN, 1);
alSource3f(mixc->alSource, AL_POSITION, 0, 0, 0);
alSource3f(mixc->alSource, AL_VELOCITY, 0, 0, 0);
// Create Buffer
alGenBuffers(MAX_STREAM_BUFFERS, mixc->alBuffer);
// Fill buffers
for (int i = 0; i < MAX_STREAM_BUFFERS; i++)
{
@ -309,14 +310,14 @@ static MixChannel *InitMixChannel(unsigned short sampleRate, unsigned char mixCh
alBufferData(mixc->alBuffer[i], mixc->alFormat, pcm, MUSIC_BUFFER_SIZE_SHORT*sizeof(short), mixc->sampleRate);
}
}
alSourceQueueBuffers(mixc->alSource, MAX_STREAM_BUFFERS, mixc->alBuffer);
mixc->playing = true;
alSourcePlay(mixc->alSource);
return mixc;
}
return NULL;
}
@ -327,18 +328,18 @@ static void CloseMixChannel(MixChannel *mixc)
{
alSourceStop(mixc->alSource);
mixc->playing = false;
// Flush out all queued buffers
ALuint buffer = 0;
int queued = 0;
alGetSourcei(mixc->alSource, AL_BUFFERS_QUEUED, &queued);
while (queued > 0)
{
alSourceUnqueueBuffers(mixc->alSource, 1, &buffer);
queued--;
}
// Delete source and buffers
alDeleteSources(1, &mixc->alSource);
alDeleteBuffers(MAX_STREAM_BUFFERS, mixc->alBuffer);
@ -354,30 +355,30 @@ static void CloseMixChannel(MixChannel *mixc)
static int BufferMixChannel(MixChannel *mixc, void *data, int numberElements)
{
if (!mixc || (mixChannels[mixc->mixChannel] != mixc)) return 0; // When there is two channels there must be an even number of samples
if (!data || !numberElements)
{
if (!data || !numberElements)
{
// Pauses audio until data is given
if (mixc->playing)
{
alSourcePause(mixc->alSource);
mixc->playing = false;
}
return 0;
}
else if (!mixc->playing)
{
{
// Restart audio otherwise
alSourcePlay(mixc->alSource);
mixc->playing = true;
}
ALuint buffer = 0;
alSourceUnqueueBuffers(mixc->alSource, 1, &buffer);
if (!buffer) return 0;
if (mixc->floatingPoint)
{
// Process float buffers
@ -390,9 +391,9 @@ static int BufferMixChannel(MixChannel *mixc, void *data, int numberElements)
short *ptr = (short *)data;
alBufferData(buffer, mixc->alFormat, ptr, numberElements*sizeof(short), mixc->sampleRate);
}
alSourceQueueBuffers(mixc->alSource, 1, &buffer);
return numberElements;
}
@ -439,7 +440,7 @@ int InitRawMixChannel(int sampleRate, int channels, bool floatingPoint)
return -1;
}
}
if (InitMixChannel(sampleRate, mixIndex, channels, floatingPoint)) return mixIndex;
else
{
@ -455,13 +456,13 @@ int InitRawMixChannel(int sampleRate, int channels, bool floatingPoint)
int BufferRawAudioContext(int ctx, void *data, unsigned short numberElements)
{
int numBuffered = 0;
if (ctx >= 0)
{
MixChannel *mixc = mixChannels[ctx];
numBuffered = BufferMixChannel(mixc, data, numberElements);
}
return numBuffered;
}
@ -486,12 +487,12 @@ Sound LoadSound(char *fileName)
// Audio file loading
// NOTE: Buffer space is allocated inside function, Wave must be freed
if (strcmp(GetExtension(fileName), "wav") == 0) wave = LoadWAV(fileName);
else if (strcmp(GetExtension(fileName), "ogg") == 0) wave = LoadOGG(fileName);
if (strcmp(GetExtension(fileName),"wav") == 0) wave = LoadWAV(fileName);
else if (strcmp(GetExtension(fileName),"ogg") == 0) wave = LoadOGG(fileName);
else
{
TraceLog(WARNING, "[%s] Sound extension not recognized, it can't be loaded", fileName);
// TODO: Find a better way to register errors (similar to glGetError())
lastAudioError = ERROR_EXTENSION_NOT_RECOGNIZED;
}
@ -532,7 +533,7 @@ Sound LoadSound(char *fileName)
// Attach sound buffer to source
alSourcei(source, AL_BUFFER, buffer);
TraceLog(INFO, "[SND ID %i][BUFR ID %i] Sound file loaded successfully (SampleRate: %i, BitRate: %i, Channels: %i)", source, buffer, wave.sampleRate, wave.bitsPerSample, wave.channels);
TraceLog(INFO, "[%s] Sound file loaded successfully (SampleRate: %i, BitRate: %i, Channels: %i)", fileName, wave.sampleRate, wave.bitsPerSample, wave.channels);
// Unallocate WAV data
UnloadWave(wave);
@ -606,9 +607,9 @@ Sound LoadSoundFromRES(const char *rresName, int resId)
#if defined(AUDIO_STANDALONE)
TraceLog(WARNING, "Sound loading from rRES resource file not supported on standalone mode");
#else
bool found = false;
char id[4]; // rRES file identifier
unsigned char version; // rRES file version and subversion
char useless; // rRES header reserved data
@ -762,8 +763,8 @@ void UnloadSound(Sound sound)
{
alDeleteSources(1, &sound.source);
alDeleteBuffers(1, &sound.buffer);
TraceLog(INFO, "[SND ID %i][BUFR ID %i] Unloaded sound data from RAM", sound.source, sound.buffer);
TraceLog(INFO, "Unloaded sound data");
}
// Play a sound
@ -827,138 +828,182 @@ void SetSoundPitch(Sound sound, float pitch)
// Module Functions Definition - Music loading and stream playing (.OGG)
//----------------------------------------------------------------------------------
MusicBuffer LoadMusicBufferStream(char *fileName, int index)
{
MusicBuffer buffer = { 0 };
if(index > MAX_MUSIC_STREAMS)
{
TraceLog("[%s] index is greater than MAX_MUSIC_STREAMS", ERROR);
return; // error
}
buffer.fileName = fileName;
buffer.index = index;
if (musicStreams[buffer.index].stream || musicStreams[buffer.index].xmctx) return; // error
return buffer;
}
// Start music playing (open stream)
// returns 0 on success or error code
int PlayMusicStream(int index, char *fileName)
int PlayMusicStream(MusicBuffer musicBuffer)
{
if(musicBuffer.fileName == 0)
{
return ERROR_UNINTIALIZED_MUSIC_BUFFER;
}
int mixIndex;
if (musicStreams[index].stream || musicStreams[index].xmctx) return ERROR_UNINITIALIZED_CHANNELS; // error
for (mixIndex = 0; mixIndex < MAX_MIX_CHANNELS; mixIndex++) // find empty mix channel slot
{
if (mixChannels[mixIndex] == NULL) break;
else if (mixIndex == (MAX_MIX_CHANNELS - 1)) return ERROR_OUT_OF_MIX_CHANNELS; // error
}
if (strcmp(GetExtension(fileName), "ogg") == 0)
if (strcmp(GetExtension(musicBuffer.fileName),"ogg") == 0)
{
// Open audio stream
musicStreams[index].stream = stb_vorbis_open_filename(fileName, NULL, NULL);
musicStreams[musicBuffer.index].stream = stb_vorbis_open_filename(musicBuffer.fileName, NULL, NULL);
if (musicStreams[index].stream == NULL)
if (musicStreams[musicBuffer.index].stream == NULL)
{
TraceLog(WARNING, "[%s] OGG audio file could not be opened", fileName);
TraceLog(WARNING, "[%s] OGG audio file could not be opened", musicBuffer.fileName);
return ERROR_LOADING_OGG; // error
}
else
{
// Get file info
stb_vorbis_info info = stb_vorbis_get_info(musicStreams[index].stream);
stb_vorbis_info info = stb_vorbis_get_info(musicStreams[musicBuffer.index].stream);
TraceLog(DEBUG, "[%s] Ogg sample rate: %i", fileName, info.sample_rate);
TraceLog(DEBUG, "[%s] Ogg channels: %i", fileName, info.channels);
TraceLog(DEBUG, "[%s] Temp memory required: %i", fileName, info.temp_memory_required);
TraceLog(INFO, "[%s] Ogg sample rate: %i", musicBuffer.fileName, info.sample_rate);
TraceLog(INFO, "[%s] Ogg channels: %i", musicBuffer.fileName, info.channels);
TraceLog(DEBUG, "[%s] Temp memory required: %i", musicBuffer.fileName, info.temp_memory_required);
musicStreams[musicBuffer.index].loop = true; // We loop by default
musicStreams[musicBuffer.index].enabled = true;
musicStreams[musicBuffer.index].totalSamplesLeft = (unsigned int)stb_vorbis_stream_length_in_samples(musicStreams[musicBuffer.index].stream) * info.channels;
musicStreams[musicBuffer.index].totalLengthSeconds = stb_vorbis_stream_length_in_seconds(musicStreams[musicBuffer.index].stream);
musicStreams[index].loop = true; // We loop by default
musicStreams[index].enabled = true;
musicStreams[index].totalSamplesLeft = (unsigned int)stb_vorbis_stream_length_in_samples(musicStreams[index].stream) * info.channels;
musicStreams[index].totalLengthSeconds = stb_vorbis_stream_length_in_seconds(musicStreams[index].stream);
if (info.channels == 2)
{
musicStreams[index].mixc = InitMixChannel(info.sample_rate, mixIndex, 2, false);
musicStreams[index].mixc->playing = true;
musicStreams[musicBuffer.index].mixc = InitMixChannel(info.sample_rate, mixIndex, 2, false);
musicStreams[musicBuffer.index].mixc->playing = true;
}
else
{
musicStreams[index].mixc = InitMixChannel(info.sample_rate, mixIndex, 1, false);
musicStreams[index].mixc->playing = true;
musicStreams[musicBuffer.index].mixc = InitMixChannel(info.sample_rate, mixIndex, 1, false);
musicStreams[musicBuffer.index].mixc->playing = true;
}
if (!musicStreams[index].mixc) return ERROR_LOADING_OGG; // error
if (!musicStreams[musicBuffer.index].mixc) return ERROR_LOADING_OGG; // error
}
}
else if (strcmp(GetExtension(fileName), "xm") == 0)
else if (strcmp(GetExtension(musicBuffer.fileName),"xm") == 0)
{
// only stereo is supported for xm
if (!jar_xm_create_context_from_file(&musicStreams[index].xmctx, 48000, fileName))
if (!jar_xm_create_context_from_file(&musicStreams[musicBuffer.index].xmctx, 48000, musicBuffer.fileName))
{
musicStreams[index].chipTune = true;
musicStreams[index].loop = true;
jar_xm_set_max_loop_count(musicStreams[index].xmctx, 0); // infinite number of loops
musicStreams[index].totalSamplesLeft = (unsigned int)jar_xm_get_remaining_samples(musicStreams[index].xmctx);
musicStreams[index].totalLengthSeconds = ((float)musicStreams[index].totalSamplesLeft)/48000.0f;
musicStreams[index].enabled = true;
TraceLog(INFO, "[%s] XM number of samples: %i", fileName, musicStreams[index].totalSamplesLeft);
TraceLog(INFO, "[%s] XM track length: %11.6f sec", fileName, musicStreams[index].totalLengthSeconds);
musicStreams[index].mixc = InitMixChannel(48000, mixIndex, 2, true);
if (!musicStreams[index].mixc) return ERROR_XM_CONTEXT_CREATION; // error
musicStreams[index].mixc->playing = true;
musicStreams[musicBuffer.index].chipTune = true;
musicStreams[musicBuffer.index].loop = true;
jar_xm_set_max_loop_count(musicStreams[musicBuffer.index].xmctx, 0); // infinite number of loops
musicStreams[musicBuffer.index].totalSamplesLeft = (unsigned int)jar_xm_get_remaining_samples(musicStreams[musicBuffer.index].xmctx);
musicStreams[musicBuffer.index].totalLengthSeconds = ((float)musicStreams[musicBuffer.index].totalSamplesLeft)/48000.0f;
musicStreams[musicBuffer.index].enabled = true;
TraceLog(INFO, "[%s] XM number of samples: %i", musicBuffer.fileName, musicStreams[musicBuffer.index].totalSamplesLeft);
TraceLog(INFO, "[%s] XM track length: %11.6f sec", musicBuffer.fileName, musicStreams[musicBuffer.index].totalLengthSeconds);
musicStreams[musicBuffer.index].mixc = InitMixChannel(48000, mixIndex, 2, true);
if (!musicStreams[musicBuffer.index].mixc) return ERROR_XM_CONTEXT_CREATION; // error
musicStreams[musicBuffer.index].mixc->playing = true;
}
else
{
TraceLog(WARNING, "[%s] XM file could not be opened", fileName);
TraceLog(WARNING, "[%s] XM file could not be opened", musicBuffer.fileName);
return ERROR_LOADING_XM; // error
}
}
else if (strcmp(GetExtension(fileName), "mod") == 0)
else if (strcmp(GetExtension(musicBuffer.fileName),"mod") == 0)
{
jar_mod_init(&musicStreams[index].modctx);
if (jar_mod_load_file(&musicStreams[index].modctx, fileName))
jar_mod_init(&musicStreams[musicBuffer.index].modctx);
if (jar_mod_load_file(&musicStreams[musicBuffer.index].modctx, musicBuffer.fileName))
{
musicStreams[index].chipTune = true;
musicStreams[index].loop = true;
musicStreams[index].totalSamplesLeft = (unsigned int)jar_mod_max_samples(&musicStreams[index].modctx);
musicStreams[index].totalLengthSeconds = ((float)musicStreams[index].totalSamplesLeft)/48000.0f;
musicStreams[index].enabled = true;
TraceLog(INFO, "[%s] MOD number of samples: %i", fileName, musicStreams[index].totalSamplesLeft);
TraceLog(INFO, "[%s] MOD track length: %11.6f sec", fileName, musicStreams[index].totalLengthSeconds);
musicStreams[index].mixc = InitMixChannel(48000, mixIndex, 2, false);
if (!musicStreams[index].mixc) return ERROR_MOD_CONTEXT_CREATION; // error
musicStreams[index].mixc->playing = true;
musicStreams[musicBuffer.index].chipTune = true;
musicStreams[musicBuffer.index].loop = true;
musicStreams[musicBuffer.index].totalSamplesLeft = (unsigned int)jar_mod_max_samples(&musicStreams[musicBuffer.index].modctx);
musicStreams[musicBuffer.index].totalLengthSeconds = ((float)musicStreams[musicBuffer.index].totalSamplesLeft)/48000.0f;
musicStreams[musicBuffer.index].enabled = true;
TraceLog(INFO, "[%s] MOD number of samples: %i", musicBuffer.fileName, musicStreams[musicBuffer.index].totalSamplesLeft);
TraceLog(INFO, "[%s] MOD track length: %11.6f sec", musicBuffer.fileName, musicStreams[musicBuffer.index].totalLengthSeconds);
musicStreams[musicBuffer.index].mixc = InitMixChannel(48000, mixIndex, 2, false);
if (!musicStreams[musicBuffer.index].mixc) return ERROR_MOD_CONTEXT_CREATION; // error
musicStreams[musicBuffer.index].mixc->playing = true;
}
else
{
TraceLog(WARNING, "[%s] MOD file could not be opened", fileName);
TraceLog(WARNING, "[%s] MOD file could not be opened", musicBuffer.fileName);
return ERROR_LOADING_MOD; // error
}
}
else
{
TraceLog(WARNING, "[%s] Music extension not recognized, it can't be loaded", fileName);
TraceLog(WARNING, "[%s] Music extension not recognized, it can't be loaded", musicBuffer.fileName);
return ERROR_EXTENSION_NOT_RECOGNIZED; // error
}
return 0; // normal return
}
// Stop music playing for individual music index of musicStreams array (close stream)
void StopMusicStream(int index)
void StopMusicStream(MusicBuffer musicBuffer)
{
if (musicBuffer.index < MAX_MUSIC_STREAMS && musicStreams[musicBuffer.index].mixc)
{
CloseMixChannel(musicStreams[musicBuffer.index].mixc);
if (musicStreams[musicBuffer.index].xmctx)
jar_xm_free_context(musicStreams[musicBuffer.index].xmctx);
else if (musicStreams[musicBuffer.index].modctx.mod_loaded)
jar_mod_unload(&musicStreams[musicBuffer.index].modctx);
else
stb_vorbis_close(musicStreams[musicBuffer.index].stream);
musicStreams[musicBuffer.index].enabled = false;
if (musicStreams[musicBuffer.index].stream || musicStreams[musicBuffer.index].xmctx)
{
musicStreams[musicBuffer.index].stream = NULL;
musicStreams[musicBuffer.index].xmctx = NULL;
}
}
}
void StopMusicStreamEx(int index)
{
if (index < MAX_MUSIC_STREAMS && musicStreams[index].mixc)
{
CloseMixChannel(musicStreams[index].mixc);
if (musicStreams[index].xmctx)
jar_xm_free_context(musicStreams[index].xmctx);
else if (musicStreams[index].modctx.mod_loaded)
jar_mod_unload(&musicStreams[index].modctx);
else
stb_vorbis_close(musicStreams[index].stream);
musicStreams[index].enabled = false;
if (musicStreams[index].stream || musicStreams[index].xmctx)
{
musicStreams[index].stream = NULL;
@ -968,47 +1013,47 @@ void StopMusicStream(int index)
}
// Update (re-fill) music buffers if data already processed
void UpdateMusicStream(int index)
void UpdateMusicStream(MusicBuffer musicBuffer)
{
ALenum state;
bool active = true;
ALint processed = 0;
// Determine if music stream is ready to be written
alGetSourcei(musicStreams[index].mixc->alSource, AL_BUFFERS_PROCESSED, &processed);
if (musicStreams[index].mixc->playing && (index < MAX_MUSIC_STREAMS) && musicStreams[index].enabled && musicStreams[index].mixc && (processed > 0))
alGetSourcei(musicStreams[musicBuffer.index].mixc->alSource, AL_BUFFERS_PROCESSED, &processed);
if (musicStreams[musicBuffer.index].mixc->playing && (musicBuffer.index < MAX_MUSIC_STREAMS) && musicStreams[musicBuffer.index].enabled && musicStreams[musicBuffer.index].mixc && (processed > 0))
{
active = BufferMusicStream(index, processed);
if (!active && musicStreams[index].loop)
active = BufferMusicStream(musicBuffer.index, processed);
if (!active && musicStreams[musicBuffer.index].loop)
{
if (musicStreams[index].chipTune)
if (musicStreams[musicBuffer.index].chipTune)
{
if(musicStreams[index].modctx.mod_loaded) jar_mod_seek_start(&musicStreams[index].modctx);
musicStreams[index].totalSamplesLeft = musicStreams[index].totalLengthSeconds*48000.0f;
if(musicStreams[musicBuffer.index].modctx.mod_loaded) jar_mod_seek_start(&musicStreams[musicBuffer.index].modctx);
musicStreams[musicBuffer.index].totalSamplesLeft = musicStreams[musicBuffer.index].totalLengthSeconds*48000.0f;
}
else
{
stb_vorbis_seek_start(musicStreams[index].stream);
musicStreams[index].totalSamplesLeft = stb_vorbis_stream_length_in_samples(musicStreams[index].stream)*musicStreams[index].mixc->channels;
stb_vorbis_seek_start(musicStreams[musicBuffer.index].stream);
musicStreams[musicBuffer.index].totalSamplesLeft = stb_vorbis_stream_length_in_samples(musicStreams[musicBuffer.index].stream)*musicStreams[musicBuffer.index].mixc->channels;
}
// Determine if music stream is ready to be written
alGetSourcei(musicStreams[index].mixc->alSource, AL_BUFFERS_PROCESSED, &processed);
active = BufferMusicStream(index, processed);
alGetSourcei(musicStreams[musicBuffer.index].mixc->alSource, AL_BUFFERS_PROCESSED, &processed);
active = BufferMusicStream(musicBuffer.index, processed);
}
if (alGetError() != AL_NO_ERROR) TraceLog(WARNING, "Error buffering data...");
alGetSourcei(musicStreams[index].mixc->alSource, AL_SOURCE_STATE, &state);
if (state != AL_PLAYING && active) alSourcePlay(musicStreams[index].mixc->alSource);
alGetSourcei(musicStreams[musicBuffer.index].mixc->alSource, AL_SOURCE_STATE, &state);
if (state != AL_PLAYING && active) alSourcePlay(musicStreams[musicBuffer.index].mixc->alSource);
if (!active) StopMusicStream(musicBuffer);
if (!active) StopMusicStream(index);
}
}
@ -1016,57 +1061,57 @@ void UpdateMusicStream(int index)
int GetMusicStreamCount(void)
{
int musicCount = 0;
// Find empty music slot
for (int musicIndex = 0; musicIndex < MAX_MUSIC_STREAMS; musicIndex++)
{
if(musicStreams[musicIndex].stream != NULL || musicStreams[musicIndex].chipTune) musicCount++;
}
return musicCount;
}
// Pause music playing
void PauseMusicStream(int index)
void PauseMusicStream(MusicBuffer musicBuffer)
{
// Pause music stream if music available!
if (index < MAX_MUSIC_STREAMS && musicStreams[index].mixc && musicStreams[index].enabled)
if (musicBuffer.index < MAX_MUSIC_STREAMS && musicStreams[musicBuffer.index].mixc && musicStreams[musicBuffer.index].enabled)
{
TraceLog(INFO, "Pausing music stream");
alSourcePause(musicStreams[index].mixc->alSource);
musicStreams[index].mixc->playing = false;
alSourcePause(musicStreams[musicBuffer.index].mixc->alSource);
musicStreams[musicBuffer.index].mixc->playing = false;
}
}
// Resume music playing
void ResumeMusicStream(int index)
void ResumeMusicStream(MusicBuffer musicBuffer)
{
// Resume music playing... if music available!
ALenum state;
if (index < MAX_MUSIC_STREAMS && musicStreams[index].mixc)
if (musicBuffer.index < MAX_MUSIC_STREAMS && musicStreams[musicBuffer.index].mixc)
{
alGetSourcei(musicStreams[index].mixc->alSource, AL_SOURCE_STATE, &state);
alGetSourcei(musicStreams[musicBuffer.index].mixc->alSource, AL_SOURCE_STATE, &state);
if (state == AL_PAUSED)
{
TraceLog(INFO, "Resuming music stream");
alSourcePlay(musicStreams[index].mixc->alSource);
musicStreams[index].mixc->playing = true;
alSourcePlay(musicStreams[musicBuffer.index].mixc->alSource);
musicStreams[musicBuffer.index].mixc->playing = true;
}
}
}
// Check if any music is playing
bool IsMusicPlaying(int index)
bool IsMusicPlaying(MusicBuffer musicBuffer)
{
bool playing = false;
ALint state;
if (index < MAX_MUSIC_STREAMS && musicStreams[index].mixc)
if (musicBuffer.index < MAX_MUSIC_STREAMS && musicStreams[musicBuffer.index].mixc)
{
alGetSourcei(musicStreams[index].mixc->alSource, AL_SOURCE_STATE, &state);
alGetSourcei(musicStreams[musicBuffer.index].mixc->alSource, AL_SOURCE_STATE, &state);
if (state == AL_PLAYING) playing = true;
}
@ -1074,57 +1119,57 @@ bool IsMusicPlaying(int index)
}
// Set volume for music
void SetMusicVolume(int index, float volume)
void SetMusicVolume(MusicBuffer musicBuffer, float volume)
{
if (index < MAX_MUSIC_STREAMS && musicStreams[index].mixc)
if (musicBuffer.index < MAX_MUSIC_STREAMS && musicStreams[musicBuffer.index].mixc)
{
alSourcef(musicStreams[index].mixc->alSource, AL_GAIN, volume);
alSourcef(musicStreams[musicBuffer.index].mixc->alSource, AL_GAIN, volume);
}
}
// Set pitch for music
void SetMusicPitch(int index, float pitch)
void SetMusicPitch(MusicBuffer musicBuffer, float pitch)
{
if (index < MAX_MUSIC_STREAMS && musicStreams[index].mixc)
if (musicBuffer.index < MAX_MUSIC_STREAMS && musicStreams[musicBuffer.index].mixc)
{
alSourcef(musicStreams[index].mixc->alSource, AL_PITCH, pitch);
alSourcef(musicStreams[musicBuffer.index].mixc->alSource, AL_PITCH, pitch);
}
}
// Get music time length (in seconds)
float GetMusicTimeLength(int index)
float GetMusicTimeLength(MusicBuffer musicBuffer)
{
float totalSeconds;
if (musicStreams[index].chipTune) totalSeconds = (float)musicStreams[index].totalLengthSeconds;
else totalSeconds = stb_vorbis_stream_length_in_seconds(musicStreams[index].stream);
if (musicStreams[musicBuffer.index].chipTune) totalSeconds = (float)musicStreams[musicBuffer.index].totalLengthSeconds;
else totalSeconds = stb_vorbis_stream_length_in_seconds(musicStreams[musicBuffer.index].stream);
return totalSeconds;
}
// Get current music time played (in seconds)
float GetMusicTimePlayed(int index)
float GetMusicTimePlayed(MusicBuffer musicBuffer)
{
float secondsPlayed = 0.0f;
if (index < MAX_MUSIC_STREAMS && musicStreams[index].mixc)
if (musicBuffer.index < MAX_MUSIC_STREAMS && musicStreams[musicBuffer.index].mixc)
{
if (musicStreams[index].chipTune && musicStreams[index].xmctx)
if (musicStreams[musicBuffer.index].chipTune && musicStreams[musicBuffer.index].xmctx)
{
uint64_t samples;
jar_xm_get_position(musicStreams[index].xmctx, NULL, NULL, NULL, &samples);
secondsPlayed = (float)samples/(48000.0f*musicStreams[index].mixc->channels); // Not sure if this is the correct value
jar_xm_get_position(musicStreams[musicBuffer.index].xmctx, NULL, NULL, NULL, &samples);
secondsPlayed = (float)samples/(48000.0f*musicStreams[musicBuffer.index].mixc->channels); // Not sure if this is the correct value
}
else if(musicStreams[index].chipTune && musicStreams[index].modctx.mod_loaded)
else if(musicStreams[musicBuffer.index].chipTune && musicStreams[musicBuffer.index].modctx.mod_loaded)
{
long numsamp = jar_mod_current_samples(&musicStreams[index].modctx);
long numsamp = jar_mod_current_samples(&musicStreams[musicBuffer.index].modctx);
secondsPlayed = (float)numsamp/(48000.0f);
}
else
{
int totalSamples = stb_vorbis_stream_length_in_samples(musicStreams[index].stream)*musicStreams[index].mixc->channels;
int samplesPlayed = totalSamples - musicStreams[index].totalSamplesLeft;
secondsPlayed = (float)samplesPlayed/(musicStreams[index].mixc->sampleRate*musicStreams[index].mixc->channels);
int totalSamples = stb_vorbis_stream_length_in_samples(musicStreams[musicBuffer.index].stream)*musicStreams[musicBuffer.index].mixc->channels;
int samplesPlayed = totalSamples - musicStreams[musicBuffer.index].totalSamplesLeft;
secondsPlayed = (float)samplesPlayed/(musicStreams[musicBuffer.index].mixc->sampleRate*musicStreams[musicBuffer.index].mixc->channels);
}
}
@ -1140,10 +1185,10 @@ static bool BufferMusicStream(int index, int numBuffers)
{
short pcm[MUSIC_BUFFER_SIZE_SHORT];
float pcmf[MUSIC_BUFFER_SIZE_FLOAT];
int size = 0; // Total size of data steamed in L+R samples for xm floats, individual L or R for ogg shorts
bool active = true; // We can get more data from stream (not finished)
if (musicStreams[index].chipTune) // There is no end of stream for xmfiles, once the end is reached zeros are generated for non looped chiptunes.
{
for (int i = 0; i < numBuffers; i++)
@ -1152,7 +1197,7 @@ static bool BufferMusicStream(int index, int numBuffers)
{
if (musicStreams[index].totalSamplesLeft >= MUSIC_BUFFER_SIZE_SHORT) size = MUSIC_BUFFER_SIZE_SHORT/2;
else size = musicStreams[index].totalSamplesLeft/2;
jar_mod_fillbuffer(&musicStreams[index].modctx, pcm, size, 0 );
BufferMixChannel(musicStreams[index].mixc, pcm, size*2);
}
@ -1160,13 +1205,13 @@ static bool BufferMusicStream(int index, int numBuffers)
{
if (musicStreams[index].totalSamplesLeft >= MUSIC_BUFFER_SIZE_FLOAT) size = MUSIC_BUFFER_SIZE_FLOAT/2;
else size = musicStreams[index].totalSamplesLeft/2;
jar_xm_generate_samples(musicStreams[index].xmctx, pcmf, size); // reads 2*readlen shorts and moves them to buffer+size memory location
BufferMixChannel(musicStreams[index].mixc, pcmf, size*2);
}
musicStreams[index].totalSamplesLeft -= size;
if (musicStreams[index].totalSamplesLeft <= 0)
{
active = false;
@ -1178,13 +1223,13 @@ static bool BufferMusicStream(int index, int numBuffers)
{
if (musicStreams[index].totalSamplesLeft >= MUSIC_BUFFER_SIZE_SHORT) size = MUSIC_BUFFER_SIZE_SHORT;
else size = musicStreams[index].totalSamplesLeft;
for (int i = 0; i < numBuffers; i++)
{
int streamedBytes = stb_vorbis_get_samples_short_interleaved(musicStreams[index].stream, musicStreams[index].mixc->channels, pcm, size);
BufferMixChannel(musicStreams[index].mixc, pcm, streamedBytes * musicStreams[index].mixc->channels);
musicStreams[index].totalSamplesLeft -= streamedBytes * musicStreams[index].mixc->channels;
if (musicStreams[index].totalSamplesLeft <= 0)
{
active = false;
@ -1371,8 +1416,8 @@ static Wave LoadOGG(char *fileName)
static void UnloadWave(Wave wave)
{
free(wave.data);
TraceLog(INFO, "Unloaded wave data from RAM");
TraceLog(INFO, "Unloaded wave data");
}
// Some required functions for audio standalone module version