7 Commits

Author SHA1 Message Date
Ray
19e6352d37 Update shapes_easings_testbed.c 2026-02-20 18:16:49 +01:00
Ray
d03a59ca3e Update core_directory_files.c 2026-02-20 16:36:13 +01:00
Ray
2454b3ed4b REVIEWED: TextReplace() and TextLength(), avoid using strcpy() 2026-02-20 16:27:08 +01:00
Ray
d996bf2bbd Update textures_screen_buffer.c 2026-02-20 16:06:59 +01:00
Ray
f33823cefe Update textures_screen_buffer.c 2026-02-20 15:55:38 +01:00
0aacd330d4 [raudio] Remove usage of ma_data_converter_get_required_input_frame_count() (#5568)
* Audio: Remove use of ma_data_converter_get_required_input_frame_count().

This function is being removed from miniaudio. To make this work with
the current architecture of raylib it requires the use of a cache.

This commit implements a generic solution that works across all
AudioBuffer types (static, streams and callback based), but the static
case could be optimized to avoid the cache by incorporating the
functionality of ReadAudioBufferFramesInInternalFormat() into
ReadAudioBufferFramesInMixingFormat(). It would be unpractical to avoid
the cache with streams and callback-based AudioBuffers however so this
commit sticks with a generic solution.

* Audio: Correct usage of miniaudio's dynamic rate adjustment.

This affects pitch shifting. The output rate is being modified with
ma_data_converter_set_rate(), but then that value is being used in the
computation of the output rate the next time SetAudioBufferPitch() which
results in a cascade. The correct way to do this is to use an anchored
output rate as the basis for the calculation after pitch shifting. In
this case, it's the device's sample rate that acts as the anchor.

* Audio: Optimize memory usage for data conversion.

This reduces the per-AudioBuffer conversion cache from 256 PCM frames
down to 8.
2026-02-20 13:46:41 +01:00
Ray
ce617cd814 Update rlgl.h 2026-02-20 11:46:46 +01:00
6 changed files with 128 additions and 90 deletions

View File

@ -81,27 +81,6 @@ int main(void)
GuiListViewEx((Rectangle){ 0, 50, GetScreenWidth(), GetScreenHeight() - 50 },
files.paths, files.count, &listScrollIndex, &listItemActive, &listItemFocused);
/*
for (int i = 0; i < (int)files.count; i++)
{
Color color = Fade(LIGHTGRAY, 0.3f);
if (!IsPathFile(files.paths[i]) && DirectoryExists(files.paths[i]))
{
if (GuiButton((Rectangle){0.0f, 85.0f + 40.0f*(float)i, screenWidth, 40}, ""))
{
TextCopy(directory, files.paths[i]);
UnloadDirectoryFiles(files);
files = LoadDirectoryFiles(directory);
continue;
}
}
DrawRectangle(0, 85 + 40*i, screenWidth, 40, color);
DrawText(GetFileName(files.paths[i]), 120, 100 + 40*i, 10, GRAY);
}
*/
EndDrawing();
//----------------------------------------------------------------------------------
}

View File

@ -17,7 +17,7 @@
#include "raylib.h"
#include "reasings.h" // Required for easing functions
#include "reasings.h" // Required for: easing functions
#define FONT_SIZE 20

View File

@ -52,6 +52,7 @@ int main(void)
float hue = t*t;
float saturation = t;
float value = t;
palette[i] = ColorFromHSV(250.0f + 150.0f*hue, saturation, value);
}
@ -66,11 +67,9 @@ int main(void)
// Grow flameRoot
for (int x = 2; x < flameWidth; x++)
{
unsigned char flame = flameRootBuffer[x];
if (flame == 255) continue;
int flame = (int)flameRootBuffer[x];
flame += GetRandomValue(0, 2);
if (flame > 255) flame = 255;
flameRootBuffer[x] = flame;
flameRootBuffer[x] = (flame > 255)? 255: (unsigned char)flame;
}
// Transfer flameRoot to indexBuffer
@ -83,8 +82,7 @@ int main(void)
// Clear top row, because it can't move any higher
for (int x = 0; x < imageWidth; x++)
{
if (indexBuffer[x] == 0) continue;
indexBuffer[x] = 0;
if (indexBuffer[x] != 0) indexBuffer[x] = 0;
}
// Skip top row, it is already cleared
@ -94,20 +92,24 @@ int main(void)
{
unsigned int i = x + y*imageWidth;
unsigned char colorIndex = indexBuffer[i];
if (colorIndex == 0) continue;
if (colorIndex != 0)
{
// Move pixel a row above
indexBuffer[i] = 0;
int moveX = GetRandomValue(0, 2) - 1;
int newX = x + moveX;
if (newX < 0 || newX >= imageWidth) continue;
if ((newX > 0) && (newX < imageWidth))
{
unsigned int iabove = i - imageWidth + moveX;
int decay = GetRandomValue(0, 3);
colorIndex -= (decay < colorIndex)? decay : colorIndex;
indexBuffer[iabove] = colorIndex;
}
}
}
}
// Update screenImage with palette colors
for (int y = 1; y < imageHeight; y++)
@ -117,6 +119,7 @@ int main(void)
unsigned int i = x + y*imageWidth;
unsigned char colorIndex = indexBuffer[i];
Color col = palette[colorIndex];
ImageDrawPixel(&screenImage, x, y, col);
}
}

View File

@ -295,6 +295,10 @@ typedef struct tagBITMAPINFOHEADER {
#define MAX_AUDIO_BUFFER_POOL_CHANNELS 16 // Audio pool channels
#endif
#ifndef AUDIO_BUFFER_RESIDUAL_CAPACITY
#define AUDIO_BUFFER_RESIDUAL_CAPACITY 8 // In PCM frames. For resampling and pitch shifting.
#endif
//----------------------------------------------------------------------------------
// Types and Structures Definition
//----------------------------------------------------------------------------------
@ -337,6 +341,8 @@ typedef enum {
// Audio buffer struct
struct rAudioBuffer {
ma_data_converter converter; // Audio data converter
unsigned char* converterResidual; // Cached residual input frames for use by the converter
unsigned int converterResidualCount; // The number of valid frames sitting in converterResidual
AudioCallback callback; // Audio buffer callback for buffer filling on audio threads
rAudioProcessor *processor; // Audio processor
@ -586,6 +592,15 @@ AudioBuffer *LoadAudioBuffer(ma_format format, ma_uint32 channels, ma_uint32 sam
return NULL;
}
// A cache for use by the converter is necessary when resampling because
// when generating output frames a different number of input frames will
// be consumed. Any residual input frames need to be kept track of to
// ensure there are no discontinuities. Since raylib supports pitch
// shifting, which is done through resampling, a cache will always be
// required. This will be kept relatively small to avoid too much wastage.
audioBuffer->converterResidualCount = 0;
audioBuffer->converterResidual = (unsigned char*)RL_CALLOC(AUDIO_BUFFER_RESIDUAL_CAPACITY*ma_get_bytes_per_frame(format, channels), 1);
// Init audio buffer values
audioBuffer->volume = 1.0f;
audioBuffer->pitch = 1.0f;
@ -621,6 +636,7 @@ void UnloadAudioBuffer(AudioBuffer *buffer)
{
UntrackAudioBuffer(buffer);
ma_data_converter_uninit(&buffer->converter, NULL);
RL_FREE(buffer->converterResidual);
RL_FREE(buffer->data);
RL_FREE(buffer);
}
@ -705,7 +721,7 @@ void SetAudioBufferPitch(AudioBuffer *buffer, float pitch)
// Note that this changes the duration of the sound:
// - higher pitches will make the sound faster
// - lower pitches make it slower
ma_uint32 outputSampleRate = (ma_uint32)((float)buffer->converter.sampleRateOut/pitch);
ma_uint32 outputSampleRate = (ma_uint32)((float)AUDIO.System.device.sampleRate/pitch);
ma_data_converter_set_rate(&buffer->converter, buffer->converter.sampleRateIn, outputSampleRate);
buffer->pitch = pitch;
@ -2456,38 +2472,78 @@ static ma_uint32 ReadAudioBufferFramesInMixingFormat(AudioBuffer *audioBuffer, f
// NOTE: Continuously converting data from the AudioBuffer's internal format to the mixing format,
// which should be defined by the output format of the data converter.
// This is done until frameCount frames have been output.
// The important detail to remember is that more data than required should neeveer be read,
// for the specified number of output frames.
// This can be achieved with ma_data_converter_get_required_input_frame_count()
ma_uint32 bpf = ma_get_bytes_per_frame(audioBuffer->converter.formatIn, audioBuffer->converter.channelsIn);
ma_uint8 inputBuffer[4096] = { 0 };
ma_uint32 inputBufferFrameCap = sizeof(inputBuffer)/ma_get_bytes_per_frame(audioBuffer->converter.formatIn, audioBuffer->converter.channelsIn);
ma_uint32 inputBufferFrameCap = sizeof(inputBuffer)/bpf;
ma_uint32 totalOutputFramesProcessed = 0;
while (totalOutputFramesProcessed < frameCount)
{
float *runningFramesOut = framesOut + (totalOutputFramesProcessed*audioBuffer->converter.channelsOut);
ma_uint64 outputFramesToProcessThisIteration = frameCount - totalOutputFramesProcessed;
ma_uint64 inputFramesToProcessThisIteration = 0;
(void)ma_data_converter_get_required_input_frame_count(&audioBuffer->converter, outputFramesToProcessThisIteration, &inputFramesToProcessThisIteration);
if (inputFramesToProcessThisIteration > inputBufferFrameCap)
// Process any residual input frames from the previous read first.
if (audioBuffer->converterResidualCount > 0)
{
inputFramesToProcessThisIteration = inputBufferFrameCap;
ma_uint64 inputFramesProcessedThisIteration = audioBuffer->converterResidualCount;
ma_uint64 outputFramesProcessedThisIteration = outputFramesToProcessThisIteration;
ma_data_converter_process_pcm_frames(&audioBuffer->converter, audioBuffer->converterResidual, &inputFramesProcessedThisIteration, runningFramesOut, &outputFramesProcessedThisIteration);
// Make sure the data in the cache is consumed. This can be optimized to use a cursor instead of a memmove().
memmove(audioBuffer->converterResidual, audioBuffer->converterResidual + inputFramesProcessedThisIteration*bpf, (size_t)(AUDIO_BUFFER_RESIDUAL_CAPACITY - inputFramesProcessedThisIteration) * bpf);
audioBuffer->converterResidualCount -= (ma_uint32)inputFramesProcessedThisIteration; // Safe cast
totalOutputFramesProcessed += (ma_uint32)outputFramesProcessedThisIteration; // Safe cast
}
else
{
// Getting here means there are no residual frames from the previous read. Fresh data can now be
// pulled from the AudioBuffer and processed.
//
// A best guess needs to be used made to determine how many input frames to pull from the
// buffer. There are three possible outcomes: 1) exact; 2) underestimated; 3) overestimated.
//
// When the guess is exactly correct or underestimated there is nothing special to handle - it'll be
// handled naturally by the loop.
//
// When the guess is overestimated, that's when it gets more complicated. In this case, any overflow
// needs to be stored in a buffer for later processing by the next read.
ma_uint32 estimatedInputFrameCount = (ma_uint32)(((float)audioBuffer->converter.resampler.sampleRateIn / audioBuffer->converter.resampler.sampleRateOut) * outputFramesToProcessThisIteration);
if (estimatedInputFrameCount == 0)
{
estimatedInputFrameCount = 1; // Make sure at least one input frame is read.
}
float *runningFramesOut = framesOut + (totalOutputFramesProcessed*audioBuffer->converter.channelsOut);
if (estimatedInputFrameCount > inputBufferFrameCap)
{
estimatedInputFrameCount = inputBufferFrameCap;
}
// At this point we can convert the data to our mixing format
ma_uint64 inputFramesProcessedThisIteration = ReadAudioBufferFramesInInternalFormat(audioBuffer, inputBuffer, (ma_uint32)inputFramesToProcessThisIteration);
estimatedInputFrameCount = ReadAudioBufferFramesInInternalFormat(audioBuffer, inputBuffer, estimatedInputFrameCount);
ma_uint64 inputFramesProcessedThisIteration = estimatedInputFrameCount;
ma_uint64 outputFramesProcessedThisIteration = outputFramesToProcessThisIteration;
ma_data_converter_process_pcm_frames(&audioBuffer->converter, inputBuffer, &inputFramesProcessedThisIteration, runningFramesOut, &outputFramesProcessedThisIteration);
totalOutputFramesProcessed += (ma_uint32)outputFramesProcessedThisIteration; // Safe cast
if (estimatedInputFrameCount > inputFramesProcessedThisIteration)
{
// Getting here means the estimated input frame count was overestimated. The residual needs
// be stored for later use.
ma_uint64 residualFrameCount = estimatedInputFrameCount - inputFramesProcessedThisIteration;
if (inputFramesProcessedThisIteration < inputFramesToProcessThisIteration) break; // Ran out of input data
// A safety check to make sure the capacity of the residual cache is not exceeded.
if (residualFrameCount > AUDIO_BUFFER_RESIDUAL_CAPACITY)
{
residualFrameCount = AUDIO_BUFFER_RESIDUAL_CAPACITY;
}
// This should never be hit, but added here for safety
// Ensures we get out of the loop when no input nor output frames are processed
if ((inputFramesProcessedThisIteration == 0) && (outputFramesProcessedThisIteration == 0)) break;
memcpy(audioBuffer->converterResidual, inputBuffer + inputFramesProcessedThisIteration*bpf, (size_t)(residualFrameCount * bpf));
audioBuffer->converterResidualCount = residualFrameCount;
}
totalOutputFramesProcessed += (ma_uint32)outputFramesProcessedThisIteration;
}
}
return totalOutputFramesProcessed;

View File

@ -3422,7 +3422,7 @@ unsigned int rlLoadTextureDepth(int width, int height, bool useRenderBuffer)
// Possible formats: GL_DEPTH_COMPONENT16, GL_DEPTH_COMPONENT24, GL_DEPTH_COMPONENT32 and GL_DEPTH_COMPONENT32F
unsigned int glInternalFormat = GL_DEPTH_COMPONENT;
#if (defined(GRAPHICS_API_OPENGL_ES2) || defined(GRAPHICS_API_OPENGL_ES3))
#if defined(GRAPHICS_API_OPENGL_ES2)
// WARNING: WebGL platform requires unsized internal format definition (GL_DEPTH_COMPONENT)
// while other platforms using OpenGL ES 2.0 require/support sized internal formats depending on the GPU capabilities
if (!RLGL.ExtSupported.texDepthWebGL || useRenderBuffer)

View File

@ -1497,15 +1497,14 @@ void UnloadTextLines(char **lines, int lineCount)
}
// Get text length in bytes, check for \0 character
// NOTE: Alternative: use strlen(text)
unsigned int TextLength(const char *text)
{
unsigned int length = 0;
if (text != NULL)
{
// NOTE: Alternative: use strlen(text)
while (*text++) length++;
while (text[length] != '\0') length++;
}
return length;
@ -1718,7 +1717,7 @@ char *TextReplace(const char *text, const char *search, const char *replacement)
{
char *result = NULL;
if ((text != NULL) && (search != NULL))
if ((text != NULL) && (search != NULL) && (search[0] != '\0'))
{
if (replacement == NULL) replacement = "";
@ -1732,8 +1731,6 @@ char *TextReplace(const char *text, const char *search, const char *replacement)
textLen = TextLength(text);
searchLen = TextLength(search);
if (searchLen == 0) return NULL; // Empty search causes infinite loop during count
replaceLen = TextLength(replacement);
// Count the number of replacements needed
@ -1742,15 +1739,15 @@ char *TextReplace(const char *text, const char *search, const char *replacement)
// Allocate returning string and point temp to it
int tempLen = textLen + (replaceLen - searchLen)*count + 1;
temp = result = (char *)RL_MALLOC(tempLen);
if (!result) return NULL; // Memory could not be allocated
temp = result = (char *)RL_CALLOC(tempLen, sizeof(char));
if (result != NULL) // Memory was allocated
{
// First time through the loop, all the variable are set correctly from here on,
// - 'temp' points to the end of the result string
// - 'insertPoint' points to the next occurrence of replace in text
// - 'text' points to the remainder of text after "end of replace"
while (count--)
while (count > 0)
{
insertPoint = (char *)strstr(text, search);
lastReplacePos = (int)(insertPoint - text);
@ -1764,12 +1761,15 @@ char *TextReplace(const char *text, const char *search, const char *replacement)
temp += replaceLen;
}
text += lastReplacePos + searchLen; // Move to next "end of replace"
text += (lastReplacePos + searchLen); // Move to next "end of replace"
count--;
}
// Copy remaind text part after replacement to result (pointed by moving temp)
strcpy(temp, text); // OK
//strncpy(temp, text, tempLen - 1); // WRONG
// NOTE: Text pointer internal copy has been updated along the process
strncpy(temp, text, TextLength(text));
}
}
return result;