mirror of
https://github.com/raysan5/raylib.git
synced 2025-09-08 20:38:15 +00:00
Update miniaudio.h
This commit is contained in:
508
src/external/miniaudio.h
vendored
508
src/external/miniaudio.h
vendored
@@ -1,6 +1,6 @@
|
|||||||
/*
|
/*
|
||||||
Audio playback and capture library. Choice of public domain or MIT-0. See license statements at the end of this file.
|
Audio playback and capture library. Choice of public domain or MIT-0. See license statements at the end of this file.
|
||||||
miniaudio - v0.10.32 - TBD
|
miniaudio - v0.10.32 - 2020-02-23
|
||||||
|
|
||||||
David Reid - mackron@gmail.com
|
David Reid - mackron@gmail.com
|
||||||
|
|
||||||
@@ -1510,7 +1510,7 @@ extern "C" {
|
|||||||
|
|
||||||
#define MA_VERSION_MAJOR 0
|
#define MA_VERSION_MAJOR 0
|
||||||
#define MA_VERSION_MINOR 10
|
#define MA_VERSION_MINOR 10
|
||||||
#define MA_VERSION_REVISION 31
|
#define MA_VERSION_REVISION 32
|
||||||
#define MA_VERSION_STRING MA_XSTRINGIFY(MA_VERSION_MAJOR) "." MA_XSTRINGIFY(MA_VERSION_MINOR) "." MA_XSTRINGIFY(MA_VERSION_REVISION)
|
#define MA_VERSION_STRING MA_XSTRINGIFY(MA_VERSION_MAJOR) "." MA_XSTRINGIFY(MA_VERSION_MINOR) "." MA_XSTRINGIFY(MA_VERSION_REVISION)
|
||||||
|
|
||||||
#if defined(_MSC_VER) && !defined(__clang__)
|
#if defined(_MSC_VER) && !defined(__clang__)
|
||||||
@@ -1981,7 +1981,8 @@ typedef enum
|
|||||||
ma_thread_priority_default = 0
|
ma_thread_priority_default = 0
|
||||||
} ma_thread_priority;
|
} ma_thread_priority;
|
||||||
|
|
||||||
typedef unsigned char ma_spinlock;
|
/* Spinlocks are 32-bit for compatibility reasons. */
|
||||||
|
typedef ma_uint32 ma_spinlock;
|
||||||
|
|
||||||
#if defined(MA_WIN32)
|
#if defined(MA_WIN32)
|
||||||
typedef ma_handle ma_thread;
|
typedef ma_handle ma_thread;
|
||||||
@@ -3588,6 +3589,32 @@ struct ma_context_config
|
|||||||
ma_backend_callbacks custom;
|
ma_backend_callbacks custom;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* WASAPI specific structure for some commands which must run on a common thread due to bugs in WASAPI. */
|
||||||
|
typedef struct
|
||||||
|
{
|
||||||
|
int code;
|
||||||
|
ma_event* pEvent; /* This will be signalled when the event is complete. */
|
||||||
|
union
|
||||||
|
{
|
||||||
|
struct
|
||||||
|
{
|
||||||
|
int _unused;
|
||||||
|
} quit;
|
||||||
|
struct
|
||||||
|
{
|
||||||
|
ma_device_type deviceType;
|
||||||
|
void* pAudioClient;
|
||||||
|
void** ppAudioClientService;
|
||||||
|
ma_result result; /* The result from creating the audio client service. */
|
||||||
|
} createAudioClient;
|
||||||
|
struct
|
||||||
|
{
|
||||||
|
ma_device* pDevice;
|
||||||
|
ma_device_type deviceType;
|
||||||
|
} releaseAudioClient;
|
||||||
|
} data;
|
||||||
|
} ma_context_command__wasapi;
|
||||||
|
|
||||||
struct ma_context
|
struct ma_context
|
||||||
{
|
{
|
||||||
ma_backend_callbacks callbacks;
|
ma_backend_callbacks callbacks;
|
||||||
@@ -3609,7 +3636,12 @@ struct ma_context
|
|||||||
#ifdef MA_SUPPORT_WASAPI
|
#ifdef MA_SUPPORT_WASAPI
|
||||||
struct
|
struct
|
||||||
{
|
{
|
||||||
int _unused;
|
ma_thread commandThread;
|
||||||
|
ma_mutex commandLock;
|
||||||
|
ma_semaphore commandSem;
|
||||||
|
ma_uint32 commandIndex;
|
||||||
|
ma_uint32 commandCount;
|
||||||
|
ma_context_command__wasapi commands[4];
|
||||||
} wasapi;
|
} wasapi;
|
||||||
#endif
|
#endif
|
||||||
#ifdef MA_SUPPORT_DSOUND
|
#ifdef MA_SUPPORT_DSOUND
|
||||||
@@ -4072,8 +4104,8 @@ struct ma_device
|
|||||||
ma_performance_profile originalPerformanceProfile;
|
ma_performance_profile originalPerformanceProfile;
|
||||||
ma_uint32 periodSizeInFramesPlayback;
|
ma_uint32 periodSizeInFramesPlayback;
|
||||||
ma_uint32 periodSizeInFramesCapture;
|
ma_uint32 periodSizeInFramesCapture;
|
||||||
MA_ATOMIC ma_bool8 isStartedCapture; /* Can be read and written simultaneously across different threads. Must be used atomically. */
|
MA_ATOMIC ma_bool32 isStartedCapture; /* Can be read and written simultaneously across different threads. Must be used atomically, and must be 32-bit. */
|
||||||
MA_ATOMIC ma_bool8 isStartedPlayback; /* Can be read and written simultaneously across different threads. Must be used atomically. */
|
MA_ATOMIC ma_bool32 isStartedPlayback; /* Can be read and written simultaneously across different threads. Must be used atomically, and must be 32-bit. */
|
||||||
ma_bool8 noAutoConvertSRC; /* When set to true, disables the use of AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM. */
|
ma_bool8 noAutoConvertSRC; /* When set to true, disables the use of AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM. */
|
||||||
ma_bool8 noDefaultQualitySRC; /* When set to true, disables the use of AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY. */
|
ma_bool8 noDefaultQualitySRC; /* When set to true, disables the use of AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY. */
|
||||||
ma_bool8 noHardwareOffloading;
|
ma_bool8 noHardwareOffloading;
|
||||||
@@ -4230,7 +4262,7 @@ struct ma_device
|
|||||||
ma_uint32 currentPeriodFramesRemainingCapture;
|
ma_uint32 currentPeriodFramesRemainingCapture;
|
||||||
ma_uint64 lastProcessedFramePlayback;
|
ma_uint64 lastProcessedFramePlayback;
|
||||||
ma_uint64 lastProcessedFrameCapture;
|
ma_uint64 lastProcessedFrameCapture;
|
||||||
MA_ATOMIC ma_bool8 isStarted; /* Read and written by multiple threads. Must be used atomically. */
|
MA_ATOMIC ma_bool32 isStarted; /* Read and written by multiple threads. Must be used atomically, and must be 32-bit for compiler compatibility. */
|
||||||
} null_device;
|
} null_device;
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
@@ -7863,7 +7895,7 @@ static ma_result ma_result_from_errno(int e)
|
|||||||
|
|
||||||
MA_API ma_result ma_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
MA_API ma_result ma_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
||||||
{
|
{
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
errno_t err;
|
errno_t err;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -7875,7 +7907,7 @@ MA_API ma_result ma_fopen(FILE** ppFile, const char* pFilePath, const char* pOpe
|
|||||||
return MA_INVALID_ARGS;
|
return MA_INVALID_ARGS;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
||||||
if (err != 0) {
|
if (err != 0) {
|
||||||
return ma_result_from_errno(err);
|
return ma_result_from_errno(err);
|
||||||
@@ -10155,11 +10187,11 @@ static MA_INLINE ma_result ma_spinlock_lock_ex(volatile ma_spinlock* pSpinlock,
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (;;) {
|
for (;;) {
|
||||||
if (c89atomic_flag_test_and_set_explicit(pSpinlock, c89atomic_memory_order_acquire) == 0) {
|
if (c89atomic_exchange_explicit_32(pSpinlock, 1, c89atomic_memory_order_acquire) == 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
while (c89atomic_load_explicit_8(pSpinlock, c89atomic_memory_order_relaxed) == 1) {
|
while (c89atomic_load_explicit_32(pSpinlock, c89atomic_memory_order_relaxed) == 1) {
|
||||||
if (yield) {
|
if (yield) {
|
||||||
ma_yield();
|
ma_yield();
|
||||||
}
|
}
|
||||||
@@ -10185,7 +10217,7 @@ MA_API ma_result ma_spinlock_unlock(volatile ma_spinlock* pSpinlock)
|
|||||||
return MA_INVALID_ARGS;
|
return MA_INVALID_ARGS;
|
||||||
}
|
}
|
||||||
|
|
||||||
c89atomic_flag_clear_explicit(pSpinlock, c89atomic_memory_order_release);
|
c89atomic_store_explicit_32(pSpinlock, 0, c89atomic_memory_order_release);
|
||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -12457,7 +12489,7 @@ static ma_result ma_device_start__null(ma_device* pDevice)
|
|||||||
|
|
||||||
ma_device_do_operation__null(pDevice, MA_DEVICE_OP_START__NULL);
|
ma_device_do_operation__null(pDevice, MA_DEVICE_OP_START__NULL);
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->null_device.isStarted, MA_TRUE);
|
c89atomic_exchange_32(&pDevice->null_device.isStarted, MA_TRUE);
|
||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -12467,7 +12499,7 @@ static ma_result ma_device_stop__null(ma_device* pDevice)
|
|||||||
|
|
||||||
ma_device_do_operation__null(pDevice, MA_DEVICE_OP_SUSPEND__NULL);
|
ma_device_do_operation__null(pDevice, MA_DEVICE_OP_SUSPEND__NULL);
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->null_device.isStarted, MA_FALSE);
|
c89atomic_exchange_32(&pDevice->null_device.isStarted, MA_FALSE);
|
||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -12481,7 +12513,7 @@ static ma_result ma_device_write__null(ma_device* pDevice, const void* pPCMFrame
|
|||||||
*pFramesWritten = 0;
|
*pFramesWritten = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasStartedOnEntry = c89atomic_load_8(&pDevice->null_device.isStarted);
|
wasStartedOnEntry = c89atomic_load_32(&pDevice->null_device.isStarted);
|
||||||
|
|
||||||
/* Keep going until everything has been read. */
|
/* Keep going until everything has been read. */
|
||||||
totalPCMFramesProcessed = 0;
|
totalPCMFramesProcessed = 0;
|
||||||
@@ -12507,7 +12539,7 @@ static ma_result ma_device_write__null(ma_device* pDevice, const void* pPCMFrame
|
|||||||
if (pDevice->null_device.currentPeriodFramesRemainingPlayback == 0) {
|
if (pDevice->null_device.currentPeriodFramesRemainingPlayback == 0) {
|
||||||
pDevice->null_device.currentPeriodFramesRemainingPlayback = 0;
|
pDevice->null_device.currentPeriodFramesRemainingPlayback = 0;
|
||||||
|
|
||||||
if (!c89atomic_load_8(&pDevice->null_device.isStarted) && !wasStartedOnEntry) {
|
if (!c89atomic_load_32(&pDevice->null_device.isStarted) && !wasStartedOnEntry) {
|
||||||
result = ma_device_start__null(pDevice);
|
result = ma_device_start__null(pDevice);
|
||||||
if (result != MA_SUCCESS) {
|
if (result != MA_SUCCESS) {
|
||||||
break;
|
break;
|
||||||
@@ -12527,7 +12559,7 @@ static ma_result ma_device_write__null(ma_device* pDevice, const void* pPCMFrame
|
|||||||
ma_uint64 currentFrame;
|
ma_uint64 currentFrame;
|
||||||
|
|
||||||
/* Stop waiting if the device has been stopped. */
|
/* Stop waiting if the device has been stopped. */
|
||||||
if (!c89atomic_load_8(&pDevice->null_device.isStarted)) {
|
if (!c89atomic_load_32(&pDevice->null_device.isStarted)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -12598,7 +12630,7 @@ static ma_result ma_device_read__null(ma_device* pDevice, void* pPCMFrames, ma_u
|
|||||||
ma_uint64 currentFrame;
|
ma_uint64 currentFrame;
|
||||||
|
|
||||||
/* Stop waiting if the device has been stopped. */
|
/* Stop waiting if the device has been stopped. */
|
||||||
if (!c89atomic_load_8(&pDevice->null_device.isStarted)) {
|
if (!c89atomic_load_32(&pDevice->null_device.isStarted)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -13812,6 +13844,189 @@ typedef ma_IUnknown ma_WASAPIDeviceInterface;
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
#define MA_CONTEXT_COMMAND_QUIT__WASAPI 1
|
||||||
|
#define MA_CONTEXT_COMMAND_CREATE_IAUDIOCLIENT__WASAPI 2
|
||||||
|
#define MA_CONTEXT_COMMAND_RELEASE_IAUDIOCLIENT__WASAPI 3
|
||||||
|
|
||||||
|
static ma_context_command__wasapi ma_context_init_command__wasapi(int code)
|
||||||
|
{
|
||||||
|
ma_context_command__wasapi cmd;
|
||||||
|
|
||||||
|
MA_ZERO_OBJECT(&cmd);
|
||||||
|
cmd.code = code;
|
||||||
|
|
||||||
|
return cmd;
|
||||||
|
}
|
||||||
|
|
||||||
|
static ma_result ma_context_post_command__wasapi(ma_context* pContext, const ma_context_command__wasapi* pCmd)
|
||||||
|
{
|
||||||
|
/* For now we are doing everything synchronously, but I might relax this later if the need arises. */
|
||||||
|
ma_result result;
|
||||||
|
ma_bool32 isUsingLocalEvent = MA_FALSE;
|
||||||
|
ma_event localEvent;
|
||||||
|
|
||||||
|
MA_ASSERT(pContext != NULL);
|
||||||
|
MA_ASSERT(pCmd != NULL);
|
||||||
|
|
||||||
|
if (pCmd->pEvent == NULL) {
|
||||||
|
isUsingLocalEvent = MA_TRUE;
|
||||||
|
|
||||||
|
result = ma_event_init(&localEvent);
|
||||||
|
if (result != MA_SUCCESS) {
|
||||||
|
return result; /* Failed to create the event for this command. */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Here is where we add the command to the list. If there's not enough room we'll spin until there is. */
|
||||||
|
ma_mutex_lock(&pContext->wasapi.commandLock);
|
||||||
|
{
|
||||||
|
ma_uint32 index;
|
||||||
|
|
||||||
|
/* Spin until we've got some space available. */
|
||||||
|
while (pContext->wasapi.commandCount == ma_countof(pContext->wasapi.commands)) {
|
||||||
|
ma_yield();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Space is now available. Can safely add to the list. */
|
||||||
|
index = (pContext->wasapi.commandIndex + pContext->wasapi.commandCount) % ma_countof(pContext->wasapi.commands);
|
||||||
|
pContext->wasapi.commands[index] = *pCmd;
|
||||||
|
pContext->wasapi.commands[index].pEvent = &localEvent;
|
||||||
|
pContext->wasapi.commandCount += 1;
|
||||||
|
|
||||||
|
/* Now that the command has been added, release the semaphore so ma_context_next_command__wasapi() can return. */
|
||||||
|
ma_semaphore_release(&pContext->wasapi.commandSem);
|
||||||
|
}
|
||||||
|
ma_mutex_unlock(&pContext->wasapi.commandLock);
|
||||||
|
|
||||||
|
if (isUsingLocalEvent) {
|
||||||
|
ma_event_wait(&localEvent);
|
||||||
|
ma_event_uninit(&localEvent);
|
||||||
|
}
|
||||||
|
|
||||||
|
return MA_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
static ma_result ma_context_next_command__wasapi(ma_context* pContext, ma_context_command__wasapi* pCmd)
|
||||||
|
{
|
||||||
|
ma_result result = MA_SUCCESS;
|
||||||
|
|
||||||
|
MA_ASSERT(pContext != NULL);
|
||||||
|
MA_ASSERT(pCmd != NULL);
|
||||||
|
|
||||||
|
result = ma_semaphore_wait(&pContext->wasapi.commandSem);
|
||||||
|
if (result == MA_SUCCESS) {
|
||||||
|
ma_mutex_lock(&pContext->wasapi.commandLock);
|
||||||
|
{
|
||||||
|
*pCmd = pContext->wasapi.commands[pContext->wasapi.commandIndex];
|
||||||
|
pContext->wasapi.commandIndex = (pContext->wasapi.commandIndex + 1) % ma_countof(pContext->wasapi.commands);
|
||||||
|
pContext->wasapi.commandCount -= 1;
|
||||||
|
}
|
||||||
|
ma_mutex_unlock(&pContext->wasapi.commandLock);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
static ma_thread_result MA_THREADCALL ma_context_command_thread__wasapi(void* pUserData)
|
||||||
|
{
|
||||||
|
ma_result result;
|
||||||
|
ma_context* pContext = (ma_context*)pUserData;
|
||||||
|
MA_ASSERT(pContext != NULL);
|
||||||
|
|
||||||
|
for (;;) {
|
||||||
|
ma_context_command__wasapi cmd;
|
||||||
|
result = ma_context_next_command__wasapi(pContext, &cmd);
|
||||||
|
if (result != MA_SUCCESS) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (cmd.code)
|
||||||
|
{
|
||||||
|
case MA_CONTEXT_COMMAND_QUIT__WASAPI:
|
||||||
|
{
|
||||||
|
/* Do nothing. Handled after the switch. */
|
||||||
|
} break;
|
||||||
|
|
||||||
|
case MA_CONTEXT_COMMAND_CREATE_IAUDIOCLIENT__WASAPI:
|
||||||
|
{
|
||||||
|
if (cmd.data.createAudioClient.deviceType == ma_device_type_playback) {
|
||||||
|
result = ma_result_from_HRESULT(ma_IAudioClient_GetService((ma_IAudioClient*)cmd.data.createAudioClient.pAudioClient, &MA_IID_IAudioRenderClient, cmd.data.createAudioClient.ppAudioClientService));
|
||||||
|
} else {
|
||||||
|
result = ma_result_from_HRESULT(ma_IAudioClient_GetService((ma_IAudioClient*)cmd.data.createAudioClient.pAudioClient, &MA_IID_IAudioCaptureClient, cmd.data.createAudioClient.ppAudioClientService));
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
|
||||||
|
case MA_CONTEXT_COMMAND_RELEASE_IAUDIOCLIENT__WASAPI:
|
||||||
|
{
|
||||||
|
if (cmd.data.releaseAudioClient.deviceType == ma_device_type_playback) {
|
||||||
|
if (cmd.data.releaseAudioClient.pDevice->wasapi.pAudioClientPlayback != NULL) {
|
||||||
|
ma_IAudioClient_Release((ma_IAudioClient*)cmd.data.releaseAudioClient.pDevice->wasapi.pAudioClientPlayback);
|
||||||
|
cmd.data.releaseAudioClient.pDevice->wasapi.pAudioClientPlayback = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cmd.data.releaseAudioClient.deviceType == ma_device_type_capture) {
|
||||||
|
if (cmd.data.releaseAudioClient.pDevice->wasapi.pAudioClientCapture != NULL) {
|
||||||
|
ma_IAudioClient_Release((ma_IAudioClient*)cmd.data.releaseAudioClient.pDevice->wasapi.pAudioClientCapture);
|
||||||
|
cmd.data.releaseAudioClient.pDevice->wasapi.pAudioClientCapture = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
/* Unknown command. Ignore it, but trigger an assert in debug mode so we're aware of it. */
|
||||||
|
MA_ASSERT(MA_FALSE);
|
||||||
|
} break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cmd.pEvent != NULL) {
|
||||||
|
ma_event_signal(cmd.pEvent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cmd.code == MA_CONTEXT_COMMAND_QUIT__WASAPI) {
|
||||||
|
break; /* Received a quit message. Get out of here. */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (ma_thread_result)0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static ma_result ma_device_create_IAudioClient_service__wasapi(ma_context* pContext, ma_device_type deviceType, ma_IAudioClient* pAudioClient, void** ppAudioClientService)
|
||||||
|
{
|
||||||
|
ma_result result;
|
||||||
|
ma_context_command__wasapi cmd = ma_context_init_command__wasapi(MA_CONTEXT_COMMAND_CREATE_IAUDIOCLIENT__WASAPI);
|
||||||
|
cmd.data.createAudioClient.deviceType = deviceType;
|
||||||
|
cmd.data.createAudioClient.pAudioClient = (void*)pAudioClient;
|
||||||
|
cmd.data.createAudioClient.ppAudioClientService = ppAudioClientService;
|
||||||
|
cmd.data.createAudioClient.result = MA_SUCCESS;
|
||||||
|
|
||||||
|
result = ma_context_post_command__wasapi(pContext, &cmd); /* This will not return until the command has actually been run. */
|
||||||
|
if (result != MA_SUCCESS) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
return cmd.data.createAudioClient.result;
|
||||||
|
}
|
||||||
|
|
||||||
|
#if 0 /* Not used at the moment, but leaving here for future use. */
|
||||||
|
static ma_result ma_device_release_IAudioClient_service__wasapi(ma_device* pDevice, ma_device_type deviceType)
|
||||||
|
{
|
||||||
|
ma_result result;
|
||||||
|
ma_context_command__wasapi cmd = ma_context_init_command__wasapi(MA_CONTEXT_COMMAND_RELEASE_IAUDIOCLIENT__WASAPI);
|
||||||
|
cmd.data.releaseAudioClient.pDevice = pDevice;
|
||||||
|
cmd.data.releaseAudioClient.deviceType = deviceType;
|
||||||
|
|
||||||
|
result = ma_context_post_command__wasapi(pDevice->pContext, &cmd); /* This will not return until the command has actually been run. */
|
||||||
|
if (result != MA_SUCCESS) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
return MA_SUCCESS;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
static void ma_add_native_data_format_to_device_info_from_WAVEFORMATEX(const WAVEFORMATEX* pWF, ma_share_mode shareMode, ma_device_info* pInfo)
|
static void ma_add_native_data_format_to_device_info_from_WAVEFORMATEX(const WAVEFORMATEX* pWF, ma_share_mode shareMode, ma_device_info* pInfo)
|
||||||
{
|
{
|
||||||
MA_ASSERT(pWF != NULL);
|
MA_ASSERT(pWF != NULL);
|
||||||
@@ -13832,9 +14047,6 @@ static ma_result ma_context_get_device_info_from_IAudioClient__wasapi(ma_context
|
|||||||
{
|
{
|
||||||
HRESULT hr;
|
HRESULT hr;
|
||||||
WAVEFORMATEX* pWF = NULL;
|
WAVEFORMATEX* pWF = NULL;
|
||||||
#ifdef MA_WIN32_DESKTOP
|
|
||||||
ma_IPropertyStore *pProperties;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
MA_ASSERT(pAudioClient != NULL);
|
MA_ASSERT(pAudioClient != NULL);
|
||||||
MA_ASSERT(pInfo != NULL);
|
MA_ASSERT(pInfo != NULL);
|
||||||
@@ -13848,11 +14060,14 @@ static ma_result ma_context_get_device_info_from_IAudioClient__wasapi(ma_context
|
|||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Exlcusive Mode. We repeatedly call IsFormatSupported() here. This is not currently support on
|
Exlcusive Mode. We repeatedly call IsFormatSupported() here. This is not currently supported on
|
||||||
UWP. Failure to retrieve the exclusive mode format is not considered an error, so from here
|
UWP. Failure to retrieve the exclusive mode format is not considered an error, so from here on
|
||||||
on out, MA_SUCCESS is guaranteed to be returned.
|
out, MA_SUCCESS is guaranteed to be returned.
|
||||||
*/
|
*/
|
||||||
#ifdef MA_WIN32_DESKTOP
|
#ifdef MA_WIN32_DESKTOP
|
||||||
|
{
|
||||||
|
ma_IPropertyStore *pProperties;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
The first thing to do is get the format from PKEY_AudioEngine_DeviceFormat. This should give us a channel count we assume is
|
The first thing to do is get the format from PKEY_AudioEngine_DeviceFormat. This should give us a channel count we assume is
|
||||||
correct which will simplify our searching.
|
correct which will simplify our searching.
|
||||||
@@ -13943,7 +14158,8 @@ static ma_result ma_context_get_device_info_from_IAudioClient__wasapi(ma_context
|
|||||||
} else {
|
} else {
|
||||||
ma_context_post_error(pContext, NULL, MA_LOG_LEVEL_WARNING, "[WASAPI] Failed to open property store for device info retrieval.", ma_result_from_HRESULT(hr));
|
ma_context_post_error(pContext, NULL, MA_LOG_LEVEL_WARNING, "[WASAPI] Failed to open property store for device info retrieval.", ma_result_from_HRESULT(hr));
|
||||||
}
|
}
|
||||||
#endif
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
@@ -14831,14 +15047,16 @@ static ma_result ma_device_init_internal__wasapi(ma_context* pContext, ma_device
|
|||||||
|
|
||||||
pData->usingAudioClient3 = wasInitializedUsingIAudioClient3;
|
pData->usingAudioClient3 = wasInitializedUsingIAudioClient3;
|
||||||
|
|
||||||
|
|
||||||
if (deviceType == ma_device_type_playback) {
|
if (deviceType == ma_device_type_playback) {
|
||||||
hr = ma_IAudioClient_GetService((ma_IAudioClient*)pData->pAudioClient, &MA_IID_IAudioRenderClient, (void**)&pData->pRenderClient);
|
result = ma_device_create_IAudioClient_service__wasapi(pContext, deviceType, (ma_IAudioClient*)pData->pAudioClient, (void**)&pData->pRenderClient);
|
||||||
} else {
|
} else {
|
||||||
hr = ma_IAudioClient_GetService((ma_IAudioClient*)pData->pAudioClient, &MA_IID_IAudioCaptureClient, (void**)&pData->pCaptureClient);
|
result = ma_device_create_IAudioClient_service__wasapi(pContext, deviceType, (ma_IAudioClient*)pData->pAudioClient, (void**)&pData->pCaptureClient);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FAILED(hr)) {
|
/*if (FAILED(hr)) {*/
|
||||||
errorMsg = "[WASAPI] Failed to get audio client service.", result = ma_result_from_HRESULT(hr);
|
if (result != MA_SUCCESS) {
|
||||||
|
errorMsg = "[WASAPI] Failed to get audio client service.";
|
||||||
goto done;
|
goto done;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -14927,6 +15145,42 @@ static ma_result ma_device_reinit__wasapi(ma_device* pDevice, ma_device_type dev
|
|||||||
return MA_INVALID_ARGS;
|
return MA_INVALID_ARGS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
Before reinitializing the device we need to free the previous audio clients.
|
||||||
|
|
||||||
|
There's a known memory leak here. We will be calling this from the routing change callback that
|
||||||
|
is fired by WASAPI. If we attempt to release the IAudioClient we will deadlock. In my opinion
|
||||||
|
this is a bug. I'm not sure what I need to do to handle this cleanly, but I think we'll probably
|
||||||
|
need some system where we post an event, but delay the execution of it until the callback has
|
||||||
|
returned. I'm not sure how to do this reliably, however. I have set up some infrastructure for
|
||||||
|
a command thread which might be useful for this.
|
||||||
|
*/
|
||||||
|
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
|
||||||
|
if (pDevice->wasapi.pCaptureClient) {
|
||||||
|
ma_IAudioCaptureClient_Release((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient);
|
||||||
|
pDevice->wasapi.pCaptureClient = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pDevice->wasapi.pAudioClientCapture) {
|
||||||
|
/*ma_device_release_IAudioClient_service__wasapi(pDevice, ma_device_type_capture);*/
|
||||||
|
pDevice->wasapi.pAudioClientCapture = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (deviceType == ma_device_type_playback) {
|
||||||
|
if (pDevice->wasapi.pRenderClient) {
|
||||||
|
ma_IAudioRenderClient_Release((ma_IAudioRenderClient*)pDevice->wasapi.pRenderClient);
|
||||||
|
pDevice->wasapi.pRenderClient = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pDevice->wasapi.pAudioClientPlayback) {
|
||||||
|
/*ma_device_release_IAudioClient_service__wasapi(pDevice, ma_device_type_playback);*/
|
||||||
|
pDevice->wasapi.pAudioClientPlayback = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
if (deviceType == ma_device_type_playback) {
|
if (deviceType == ma_device_type_playback) {
|
||||||
data.formatIn = pDevice->playback.format;
|
data.formatIn = pDevice->playback.format;
|
||||||
data.channelsIn = pDevice->playback.channels;
|
data.channelsIn = pDevice->playback.channels;
|
||||||
@@ -14954,16 +15208,6 @@ static ma_result ma_device_reinit__wasapi(ma_device* pDevice, ma_device_type dev
|
|||||||
|
|
||||||
/* At this point we have some new objects ready to go. We need to uninitialize the previous ones and then set the new ones. */
|
/* At this point we have some new objects ready to go. We need to uninitialize the previous ones and then set the new ones. */
|
||||||
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
|
if (deviceType == ma_device_type_capture || deviceType == ma_device_type_loopback) {
|
||||||
if (pDevice->wasapi.pCaptureClient) {
|
|
||||||
ma_IAudioCaptureClient_Release((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient);
|
|
||||||
pDevice->wasapi.pCaptureClient = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pDevice->wasapi.pAudioClientCapture) {
|
|
||||||
/*ma_IAudioClient_Release((ma_IAudioClient*)pDevice->wasapi.pAudioClientCapture);*/
|
|
||||||
pDevice->wasapi.pAudioClientCapture = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
pDevice->wasapi.pAudioClientCapture = data.pAudioClient;
|
pDevice->wasapi.pAudioClientCapture = data.pAudioClient;
|
||||||
pDevice->wasapi.pCaptureClient = data.pCaptureClient;
|
pDevice->wasapi.pCaptureClient = data.pCaptureClient;
|
||||||
|
|
||||||
@@ -14985,16 +15229,6 @@ static ma_result ma_device_reinit__wasapi(ma_device* pDevice, ma_device_type dev
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (deviceType == ma_device_type_playback) {
|
if (deviceType == ma_device_type_playback) {
|
||||||
if (pDevice->wasapi.pRenderClient) {
|
|
||||||
ma_IAudioRenderClient_Release((ma_IAudioRenderClient*)pDevice->wasapi.pRenderClient);
|
|
||||||
pDevice->wasapi.pRenderClient = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pDevice->wasapi.pAudioClientPlayback) {
|
|
||||||
/*ma_IAudioClient_Release((ma_IAudioClient*)pDevice->wasapi.pAudioClientPlayback);*/
|
|
||||||
pDevice->wasapi.pAudioClientPlayback = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
pDevice->wasapi.pAudioClientPlayback = data.pAudioClient;
|
pDevice->wasapi.pAudioClientPlayback = data.pAudioClient;
|
||||||
pDevice->wasapi.pRenderClient = data.pRenderClient;
|
pDevice->wasapi.pRenderClient = data.pRenderClient;
|
||||||
|
|
||||||
@@ -15229,8 +15463,8 @@ static ma_result ma_device_init__wasapi(ma_device* pDevice, const ma_device_conf
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->wasapi.isStartedCapture, MA_FALSE);
|
c89atomic_exchange_32(&pDevice->wasapi.isStartedCapture, MA_FALSE);
|
||||||
c89atomic_exchange_8(&pDevice->wasapi.isStartedPlayback, MA_FALSE);
|
c89atomic_exchange_32(&pDevice->wasapi.isStartedPlayback, MA_FALSE);
|
||||||
|
|
||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
@@ -15328,7 +15562,7 @@ static ma_result ma_device_start__wasapi(ma_device* pDevice)
|
|||||||
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to start internal capture device.", ma_result_from_HRESULT(hr));
|
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to start internal capture device.", ma_result_from_HRESULT(hr));
|
||||||
}
|
}
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->wasapi.isStartedCapture, MA_TRUE);
|
c89atomic_exchange_32(&pDevice->wasapi.isStartedCapture, MA_TRUE);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
||||||
@@ -15357,7 +15591,7 @@ static ma_result ma_device_stop__wasapi(ma_device* pDevice)
|
|||||||
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to reset internal capture device.", ma_result_from_HRESULT(hr));
|
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to reset internal capture device.", ma_result_from_HRESULT(hr));
|
||||||
}
|
}
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->wasapi.isStartedCapture, MA_FALSE);
|
c89atomic_exchange_32(&pDevice->wasapi.isStartedCapture, MA_FALSE);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
||||||
@@ -15365,7 +15599,7 @@ static ma_result ma_device_stop__wasapi(ma_device* pDevice)
|
|||||||
The buffer needs to be drained before stopping the device. Not doing this will result in the last few frames not getting output to
|
The buffer needs to be drained before stopping the device. Not doing this will result in the last few frames not getting output to
|
||||||
the speakers. This is a problem for very short sounds because it'll result in a significant portion of it not getting played.
|
the speakers. This is a problem for very short sounds because it'll result in a significant portion of it not getting played.
|
||||||
*/
|
*/
|
||||||
if (c89atomic_load_8(&pDevice->wasapi.isStartedPlayback)) {
|
if (c89atomic_load_32(&pDevice->wasapi.isStartedPlayback)) {
|
||||||
/* We need to make sure we put a timeout here or else we'll risk getting stuck in a deadlock in some cases. */
|
/* We need to make sure we put a timeout here or else we'll risk getting stuck in a deadlock in some cases. */
|
||||||
DWORD waitTime = pDevice->wasapi.actualPeriodSizeInFramesPlayback / pDevice->playback.internalSampleRate;
|
DWORD waitTime = pDevice->wasapi.actualPeriodSizeInFramesPlayback / pDevice->playback.internalSampleRate;
|
||||||
|
|
||||||
@@ -15410,7 +15644,7 @@ static ma_result ma_device_stop__wasapi(ma_device* pDevice)
|
|||||||
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to reset internal playback device.", ma_result_from_HRESULT(hr));
|
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to reset internal playback device.", ma_result_from_HRESULT(hr));
|
||||||
}
|
}
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->wasapi.isStartedPlayback, MA_FALSE);
|
c89atomic_exchange_32(&pDevice->wasapi.isStartedPlayback, MA_FALSE);
|
||||||
}
|
}
|
||||||
|
|
||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
@@ -15732,7 +15966,7 @@ static ma_result ma_device_data_loop__wasapi(ma_device* pDevice)
|
|||||||
mappedDeviceBufferSizeInFramesPlayback = 0;
|
mappedDeviceBufferSizeInFramesPlayback = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!c89atomic_load_8(&pDevice->wasapi.isStartedPlayback)) {
|
if (!c89atomic_load_32(&pDevice->wasapi.isStartedPlayback)) {
|
||||||
ma_uint32 startThreshold = pDevice->playback.internalPeriodSizeInFrames * 1;
|
ma_uint32 startThreshold = pDevice->playback.internalPeriodSizeInFrames * 1;
|
||||||
|
|
||||||
/* Prevent a deadlock. If we don't clamp against the actual buffer size we'll never end up starting the playback device which will result in a deadlock. */
|
/* Prevent a deadlock. If we don't clamp against the actual buffer size we'll never end up starting the playback device which will result in a deadlock. */
|
||||||
@@ -15748,7 +15982,7 @@ static ma_result ma_device_data_loop__wasapi(ma_device* pDevice)
|
|||||||
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to start internal playback device.", ma_result_from_HRESULT(hr));
|
return ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to start internal playback device.", ma_result_from_HRESULT(hr));
|
||||||
}
|
}
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->wasapi.isStartedPlayback, MA_TRUE);
|
c89atomic_exchange_32(&pDevice->wasapi.isStartedPlayback, MA_TRUE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -15897,7 +16131,7 @@ static ma_result ma_device_data_loop__wasapi(ma_device* pDevice)
|
|||||||
framesWrittenToPlaybackDevice += framesAvailablePlayback;
|
framesWrittenToPlaybackDevice += framesAvailablePlayback;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!c89atomic_load_8(&pDevice->wasapi.isStartedPlayback)) {
|
if (!c89atomic_load_32(&pDevice->wasapi.isStartedPlayback)) {
|
||||||
hr = ma_IAudioClient_Start((ma_IAudioClient*)pDevice->wasapi.pAudioClientPlayback);
|
hr = ma_IAudioClient_Start((ma_IAudioClient*)pDevice->wasapi.pAudioClientPlayback);
|
||||||
if (FAILED(hr)) {
|
if (FAILED(hr)) {
|
||||||
ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to start internal playback device.", ma_result_from_HRESULT(hr));
|
ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to start internal playback device.", ma_result_from_HRESULT(hr));
|
||||||
@@ -15905,7 +16139,7 @@ static ma_result ma_device_data_loop__wasapi(ma_device* pDevice)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
c89atomic_exchange_8(&pDevice->wasapi.isStartedPlayback, MA_TRUE);
|
c89atomic_exchange_32(&pDevice->wasapi.isStartedPlayback, MA_TRUE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Make sure we don't wait on the event before we've started the device or we may end up deadlocking. */
|
/* Make sure we don't wait on the event before we've started the device or we may end up deadlocking. */
|
||||||
@@ -15952,11 +16186,21 @@ static ma_result ma_device_data_loop_wakeup__wasapi(ma_device* pDevice)
|
|||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static ma_result ma_context_uninit__wasapi(ma_context* pContext)
|
static ma_result ma_context_uninit__wasapi(ma_context* pContext)
|
||||||
{
|
{
|
||||||
MA_ASSERT(pContext != NULL);
|
MA_ASSERT(pContext != NULL);
|
||||||
MA_ASSERT(pContext->backend == ma_backend_wasapi);
|
MA_ASSERT(pContext->backend == ma_backend_wasapi);
|
||||||
(void)pContext;
|
|
||||||
|
if (pContext->wasapi.commandThread != NULL) {
|
||||||
|
ma_context_command__wasapi cmd = ma_context_init_command__wasapi(MA_CONTEXT_COMMAND_QUIT__WASAPI);
|
||||||
|
ma_context_post_command__wasapi(pContext, &cmd);
|
||||||
|
ma_thread_wait(&pContext->wasapi.commandThread);
|
||||||
|
|
||||||
|
/* Only after the thread has been terminated can we uninitialize the sync objects for the command thread. */
|
||||||
|
ma_semaphore_uninit(&pContext->wasapi.commandSem);
|
||||||
|
ma_mutex_uninit(&pContext->wasapi.commandLock);
|
||||||
|
}
|
||||||
|
|
||||||
return MA_SUCCESS;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
@@ -16013,6 +16257,52 @@ static ma_result ma_context_init__wasapi(ma_context* pContext, const ma_context_
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Annoyingly, WASAPI does not allow you to release an IAudioClient object from a different thread
|
||||||
|
than the one that retrieved it with GetService(). This can result in a deadlock in two
|
||||||
|
situations:
|
||||||
|
|
||||||
|
1) When calling ma_device_uninit() from a different thread to ma_device_init(); and
|
||||||
|
2) When uninitializing and reinitializing the internal IAudioClient object in response to
|
||||||
|
automatic stream routing.
|
||||||
|
|
||||||
|
We could define ma_device_uninit() such that it must be called on the same thread as
|
||||||
|
ma_device_init(). We could also just not release the IAudioClient when performing automatic
|
||||||
|
stream routing to avoid the deadlock. Neither of these are acceptable solutions in my view so
|
||||||
|
we're going to have to work around this with a worker thread. This is not ideal, but I can't
|
||||||
|
think of a better way to do this.
|
||||||
|
|
||||||
|
More information about this can be found here:
|
||||||
|
|
||||||
|
https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nn-audioclient-iaudiorenderclient
|
||||||
|
|
||||||
|
Note this section:
|
||||||
|
|
||||||
|
When releasing an IAudioRenderClient interface instance, the client must call the interface's
|
||||||
|
Release method from the same thread as the call to IAudioClient::GetService that created the
|
||||||
|
object.
|
||||||
|
*/
|
||||||
|
{
|
||||||
|
result = ma_mutex_init(&pContext->wasapi.commandLock);
|
||||||
|
if (result != MA_SUCCESS) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
result = ma_semaphore_init(0, &pContext->wasapi.commandSem);
|
||||||
|
if (result != MA_SUCCESS) {
|
||||||
|
ma_mutex_uninit(&pContext->wasapi.commandLock);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
result = ma_thread_create(&pContext->wasapi.commandThread, ma_thread_priority_normal, 0, ma_context_command_thread__wasapi, pContext);
|
||||||
|
if (result != MA_SUCCESS) {
|
||||||
|
ma_semaphore_uninit(&pContext->wasapi.commandSem);
|
||||||
|
ma_mutex_uninit(&pContext->wasapi.commandLock);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
pCallbacks->onContextInit = ma_context_init__wasapi;
|
pCallbacks->onContextInit = ma_context_init__wasapi;
|
||||||
pCallbacks->onContextUninit = ma_context_uninit__wasapi;
|
pCallbacks->onContextUninit = ma_context_uninit__wasapi;
|
||||||
pCallbacks->onContextEnumerateDevices = ma_context_enumerate_devices__wasapi;
|
pCallbacks->onContextEnumerateDevices = ma_context_enumerate_devices__wasapi;
|
||||||
@@ -16026,7 +16316,7 @@ static ma_result ma_context_init__wasapi(ma_context* pContext, const ma_context_
|
|||||||
pCallbacks->onDeviceDataLoop = ma_device_data_loop__wasapi;
|
pCallbacks->onDeviceDataLoop = ma_device_data_loop__wasapi;
|
||||||
pCallbacks->onDeviceDataLoopWakeup = ma_device_data_loop_wakeup__wasapi;
|
pCallbacks->onDeviceDataLoopWakeup = ma_device_data_loop_wakeup__wasapi;
|
||||||
|
|
||||||
return result;
|
return MA_SUCCESS;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -25652,7 +25942,7 @@ static ma_result ma_device__untrack__coreaudio(ma_device* pDevice)
|
|||||||
|
|
||||||
-(void)remove_handler
|
-(void)remove_handler
|
||||||
{
|
{
|
||||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:@"AVAudioSessionRouteChangeNotification" object:nil];
|
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVAudioSessionRouteChangeNotification object:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
-(void)handle_route_change:(NSNotification*)pNotification
|
-(void)handle_route_change:(NSNotification*)pNotification
|
||||||
@@ -31270,7 +31560,7 @@ static ma_result ma_device_init_by_type__webaudio(ma_device* pDevice, const ma_d
|
|||||||
/* We're going to calculate some stuff in C just to simplify the JS code. */
|
/* We're going to calculate some stuff in C just to simplify the JS code. */
|
||||||
channels = (pDescriptor->channels > 0) ? pDescriptor->channels : MA_DEFAULT_CHANNELS;
|
channels = (pDescriptor->channels > 0) ? pDescriptor->channels : MA_DEFAULT_CHANNELS;
|
||||||
sampleRate = (pDescriptor->sampleRate > 0) ? pDescriptor->sampleRate : MA_DEFAULT_SAMPLE_RATE;
|
sampleRate = (pDescriptor->sampleRate > 0) ? pDescriptor->sampleRate : MA_DEFAULT_SAMPLE_RATE;
|
||||||
periodSizeInFrames = ma_calculate_period_size_in_frames_from_descriptor__webaudio(pDescriptor, pDescriptor->sampleRate, pConfig->performanceProfile);
|
periodSizeInFrames = ma_calculate_period_size_in_frames_from_descriptor__webaudio(pDescriptor, sampleRate, pConfig->performanceProfile);
|
||||||
|
|
||||||
|
|
||||||
/* We create the device on the JavaScript side and reference it using an index. We use this to make it possible to reference the device between JavaScript and C. */
|
/* We create the device on the JavaScript side and reference it using an index. We use this to make it possible to reference the device between JavaScript and C. */
|
||||||
@@ -31290,6 +31580,7 @@ static ma_result ma_device_init_by_type__webaudio(ma_device* pDevice, const ma_d
|
|||||||
/* The AudioContext must be created in a suspended state. */
|
/* The AudioContext must be created in a suspended state. */
|
||||||
device.webaudio = new (window.AudioContext || window.webkitAudioContext)({sampleRate:sampleRate});
|
device.webaudio = new (window.AudioContext || window.webkitAudioContext)({sampleRate:sampleRate});
|
||||||
device.webaudio.suspend();
|
device.webaudio.suspend();
|
||||||
|
device.state = 1; /* MA_STATE_STOPPED */
|
||||||
|
|
||||||
/*
|
/*
|
||||||
We need an intermediary buffer which we use for JavaScript and C interop. This buffer stores interleaved f32 PCM data. Because it's passed between
|
We need an intermediary buffer which we use for JavaScript and C interop. This buffer stores interleaved f32 PCM data. Because it's passed between
|
||||||
@@ -31497,13 +31788,17 @@ static ma_result ma_device_start__webaudio(ma_device* pDevice)
|
|||||||
|
|
||||||
if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) {
|
if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) {
|
||||||
EM_ASM({
|
EM_ASM({
|
||||||
miniaudio.get_device_by_index($0).webaudio.resume();
|
var device = miniaudio.get_device_by_index($0);
|
||||||
|
device.webaudio.resume();
|
||||||
|
device.state = 2; /* MA_STATE_STARTED */
|
||||||
}, pDevice->webaudio.indexCapture);
|
}, pDevice->webaudio.indexCapture);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
||||||
EM_ASM({
|
EM_ASM({
|
||||||
miniaudio.get_device_by_index($0).webaudio.resume();
|
var device = miniaudio.get_device_by_index($0);
|
||||||
|
device.webaudio.resume();
|
||||||
|
device.state = 2; /* MA_STATE_STARTED */
|
||||||
}, pDevice->webaudio.indexPlayback);
|
}, pDevice->webaudio.indexPlayback);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31526,13 +31821,17 @@ static ma_result ma_device_stop__webaudio(ma_device* pDevice)
|
|||||||
|
|
||||||
if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) {
|
if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) {
|
||||||
EM_ASM({
|
EM_ASM({
|
||||||
miniaudio.get_device_by_index($0).webaudio.suspend();
|
var device = miniaudio.get_device_by_index($0);
|
||||||
|
device.webaudio.suspend();
|
||||||
|
device.state = 1; /* MA_STATE_STOPPED */
|
||||||
}, pDevice->webaudio.indexCapture);
|
}, pDevice->webaudio.indexCapture);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
|
||||||
EM_ASM({
|
EM_ASM({
|
||||||
miniaudio.get_device_by_index($0).webaudio.suspend();
|
var device = miniaudio.get_device_by_index($0);
|
||||||
|
device.webaudio.suspend();
|
||||||
|
device.state = 1; /* MA_STATE_STOPPED */
|
||||||
}, pDevice->webaudio.indexPlayback);
|
}, pDevice->webaudio.indexPlayback);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31612,6 +31911,26 @@ static ma_result ma_context_init__webaudio(ma_context* pContext, const ma_contex
|
|||||||
miniaudio.get_device_by_index = function(deviceIndex) {
|
miniaudio.get_device_by_index = function(deviceIndex) {
|
||||||
return miniaudio.devices[deviceIndex];
|
return miniaudio.devices[deviceIndex];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
miniaudio.unlock_event_types = (function(){
|
||||||
|
return ['touchstart', 'touchend', 'click'];
|
||||||
|
})();
|
||||||
|
|
||||||
|
miniaudio.unlock = function() {
|
||||||
|
for(var i = 0; i < miniaudio.devices.length; ++i) {
|
||||||
|
var device = miniaudio.devices[i];
|
||||||
|
if (device != null && device.webaudio != null && device.state === 2 /* MA_STATE_STARTED */) {
|
||||||
|
device.webaudio.resume();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
miniaudio.unlock_event_types.map(function(event_type) {
|
||||||
|
document.removeEventListener(event_type, miniaudio.unlock, true);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
miniaudio.unlock_event_types.map(function(event_type) {
|
||||||
|
document.addEventListener(event_type, miniaudio.unlock, true);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return 1;
|
return 1;
|
||||||
@@ -44086,7 +44405,7 @@ extern "C" {
|
|||||||
#define DRWAV_XSTRINGIFY(x) DRWAV_STRINGIFY(x)
|
#define DRWAV_XSTRINGIFY(x) DRWAV_STRINGIFY(x)
|
||||||
#define DRWAV_VERSION_MAJOR 0
|
#define DRWAV_VERSION_MAJOR 0
|
||||||
#define DRWAV_VERSION_MINOR 12
|
#define DRWAV_VERSION_MINOR 12
|
||||||
#define DRWAV_VERSION_REVISION 17
|
#define DRWAV_VERSION_REVISION 19
|
||||||
#define DRWAV_VERSION_STRING DRWAV_XSTRINGIFY(DRWAV_VERSION_MAJOR) "." DRWAV_XSTRINGIFY(DRWAV_VERSION_MINOR) "." DRWAV_XSTRINGIFY(DRWAV_VERSION_REVISION)
|
#define DRWAV_VERSION_STRING DRWAV_XSTRINGIFY(DRWAV_VERSION_MAJOR) "." DRWAV_XSTRINGIFY(DRWAV_VERSION_MINOR) "." DRWAV_XSTRINGIFY(DRWAV_VERSION_REVISION)
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
typedef signed char drwav_int8;
|
typedef signed char drwav_int8;
|
||||||
@@ -44459,7 +44778,7 @@ extern "C" {
|
|||||||
#define DRFLAC_XSTRINGIFY(x) DRFLAC_STRINGIFY(x)
|
#define DRFLAC_XSTRINGIFY(x) DRFLAC_STRINGIFY(x)
|
||||||
#define DRFLAC_VERSION_MAJOR 0
|
#define DRFLAC_VERSION_MAJOR 0
|
||||||
#define DRFLAC_VERSION_MINOR 12
|
#define DRFLAC_VERSION_MINOR 12
|
||||||
#define DRFLAC_VERSION_REVISION 26
|
#define DRFLAC_VERSION_REVISION 28
|
||||||
#define DRFLAC_VERSION_STRING DRFLAC_XSTRINGIFY(DRFLAC_VERSION_MAJOR) "." DRFLAC_XSTRINGIFY(DRFLAC_VERSION_MINOR) "." DRFLAC_XSTRINGIFY(DRFLAC_VERSION_REVISION)
|
#define DRFLAC_VERSION_STRING DRFLAC_XSTRINGIFY(DRFLAC_VERSION_MAJOR) "." DRFLAC_XSTRINGIFY(DRFLAC_VERSION_MINOR) "." DRFLAC_XSTRINGIFY(DRFLAC_VERSION_REVISION)
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
typedef signed char drflac_int8;
|
typedef signed char drflac_int8;
|
||||||
@@ -44820,7 +45139,7 @@ extern "C" {
|
|||||||
#define DRMP3_XSTRINGIFY(x) DRMP3_STRINGIFY(x)
|
#define DRMP3_XSTRINGIFY(x) DRMP3_STRINGIFY(x)
|
||||||
#define DRMP3_VERSION_MAJOR 0
|
#define DRMP3_VERSION_MAJOR 0
|
||||||
#define DRMP3_VERSION_MINOR 6
|
#define DRMP3_VERSION_MINOR 6
|
||||||
#define DRMP3_VERSION_REVISION 25
|
#define DRMP3_VERSION_REVISION 27
|
||||||
#define DRMP3_VERSION_STRING DRMP3_XSTRINGIFY(DRMP3_VERSION_MAJOR) "." DRMP3_XSTRINGIFY(DRMP3_VERSION_MINOR) "." DRMP3_XSTRINGIFY(DRMP3_VERSION_REVISION)
|
#define DRMP3_VERSION_STRING DRMP3_XSTRINGIFY(DRMP3_VERSION_MAJOR) "." DRMP3_XSTRINGIFY(DRMP3_VERSION_MINOR) "." DRMP3_XSTRINGIFY(DRMP3_VERSION_REVISION)
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
typedef signed char drmp3_int8;
|
typedef signed char drmp3_int8;
|
||||||
@@ -49756,7 +50075,6 @@ DRWAV_PRIVATE drwav_bool32 drwav_init_write__internal(drwav* pWav, const drwav_d
|
|||||||
runningPos += drwav__write_u32ne_to_le(pWav, pWav->fmt.avgBytesPerSec);
|
runningPos += drwav__write_u32ne_to_le(pWav, pWav->fmt.avgBytesPerSec);
|
||||||
runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.blockAlign);
|
runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.blockAlign);
|
||||||
runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.bitsPerSample);
|
runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.bitsPerSample);
|
||||||
pWav->dataChunkDataPos = runningPos;
|
|
||||||
if (pFormat->container == drwav_container_riff) {
|
if (pFormat->container == drwav_container_riff) {
|
||||||
drwav_uint32 chunkSizeDATA = (drwav_uint32)initialDataChunkSize;
|
drwav_uint32 chunkSizeDATA = (drwav_uint32)initialDataChunkSize;
|
||||||
runningPos += drwav__write(pWav, "data", 4);
|
runningPos += drwav__write(pWav, "data", 4);
|
||||||
@@ -49769,12 +50087,12 @@ DRWAV_PRIVATE drwav_bool32 drwav_init_write__internal(drwav* pWav, const drwav_d
|
|||||||
runningPos += drwav__write(pWav, "data", 4);
|
runningPos += drwav__write(pWav, "data", 4);
|
||||||
runningPos += drwav__write_u32ne_to_le(pWav, 0xFFFFFFFF);
|
runningPos += drwav__write_u32ne_to_le(pWav, 0xFFFFFFFF);
|
||||||
}
|
}
|
||||||
(void)runningPos;
|
|
||||||
pWav->container = pFormat->container;
|
pWav->container = pFormat->container;
|
||||||
pWav->channels = (drwav_uint16)pFormat->channels;
|
pWav->channels = (drwav_uint16)pFormat->channels;
|
||||||
pWav->sampleRate = pFormat->sampleRate;
|
pWav->sampleRate = pFormat->sampleRate;
|
||||||
pWav->bitsPerSample = (drwav_uint16)pFormat->bitsPerSample;
|
pWav->bitsPerSample = (drwav_uint16)pFormat->bitsPerSample;
|
||||||
pWav->translatedFormatTag = (drwav_uint16)pFormat->format;
|
pWav->translatedFormatTag = (drwav_uint16)pFormat->format;
|
||||||
|
pWav->dataChunkDataPos = runningPos;
|
||||||
return DRWAV_TRUE;
|
return DRWAV_TRUE;
|
||||||
}
|
}
|
||||||
DRWAV_API drwav_bool32 drwav_init_write(drwav* pWav, const drwav_data_format* pFormat, drwav_write_proc onWrite, drwav_seek_proc onSeek, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks)
|
DRWAV_API drwav_bool32 drwav_init_write(drwav* pWav, const drwav_data_format* pFormat, drwav_write_proc onWrite, drwav_seek_proc onSeek, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks)
|
||||||
@@ -50220,7 +50538,7 @@ DRWAV_PRIVATE drwav_result drwav_result_from_errno(int e)
|
|||||||
}
|
}
|
||||||
DRWAV_PRIVATE drwav_result drwav_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
DRWAV_PRIVATE drwav_result drwav_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
||||||
{
|
{
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
errno_t err;
|
errno_t err;
|
||||||
#endif
|
#endif
|
||||||
if (ppFile != NULL) {
|
if (ppFile != NULL) {
|
||||||
@@ -50229,7 +50547,7 @@ DRWAV_PRIVATE drwav_result drwav_fopen(FILE** ppFile, const char* pFilePath, con
|
|||||||
if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) {
|
if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) {
|
||||||
return DRWAV_INVALID_ARGS;
|
return DRWAV_INVALID_ARGS;
|
||||||
}
|
}
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
||||||
if (err != 0) {
|
if (err != 0) {
|
||||||
return drwav_result_from_errno(err);
|
return drwav_result_from_errno(err);
|
||||||
@@ -50598,7 +50916,7 @@ DRWAV_API drwav_result drwav_uninit(drwav* pWav)
|
|||||||
drwav_uint32 riffChunkSize = drwav__riff_chunk_size_riff(pWav->dataChunkDataSize);
|
drwav_uint32 riffChunkSize = drwav__riff_chunk_size_riff(pWav->dataChunkDataSize);
|
||||||
drwav__write_u32ne_to_le(pWav, riffChunkSize);
|
drwav__write_u32ne_to_le(pWav, riffChunkSize);
|
||||||
}
|
}
|
||||||
if (pWav->onSeek(pWav->pUserData, (int)pWav->dataChunkDataPos + 4, drwav_seek_origin_start)) {
|
if (pWav->onSeek(pWav->pUserData, (int)pWav->dataChunkDataPos - 4, drwav_seek_origin_start)) {
|
||||||
drwav_uint32 dataChunkSize = drwav__data_chunk_size_riff(pWav->dataChunkDataSize);
|
drwav_uint32 dataChunkSize = drwav__data_chunk_size_riff(pWav->dataChunkDataSize);
|
||||||
drwav__write_u32ne_to_le(pWav, dataChunkSize);
|
drwav__write_u32ne_to_le(pWav, dataChunkSize);
|
||||||
}
|
}
|
||||||
@@ -50607,7 +50925,7 @@ DRWAV_API drwav_result drwav_uninit(drwav* pWav)
|
|||||||
drwav_uint64 riffChunkSize = drwav__riff_chunk_size_w64(pWav->dataChunkDataSize);
|
drwav_uint64 riffChunkSize = drwav__riff_chunk_size_w64(pWav->dataChunkDataSize);
|
||||||
drwav__write_u64ne_to_le(pWav, riffChunkSize);
|
drwav__write_u64ne_to_le(pWav, riffChunkSize);
|
||||||
}
|
}
|
||||||
if (pWav->onSeek(pWav->pUserData, (int)pWav->dataChunkDataPos + 16, drwav_seek_origin_start)) {
|
if (pWav->onSeek(pWav->pUserData, (int)pWav->dataChunkDataPos - 8, drwav_seek_origin_start)) {
|
||||||
drwav_uint64 dataChunkSize = drwav__data_chunk_size_w64(pWav->dataChunkDataSize);
|
drwav_uint64 dataChunkSize = drwav__data_chunk_size_w64(pWav->dataChunkDataSize);
|
||||||
drwav__write_u64ne_to_le(pWav, dataChunkSize);
|
drwav__write_u64ne_to_le(pWav, dataChunkSize);
|
||||||
}
|
}
|
||||||
@@ -50897,7 +51215,8 @@ DRWAV_PRIVATE drwav_uint64 drwav_read_pcm_frames_s16__msadpcm(drwav* pWav, drwav
|
|||||||
drwav_uint64 totalFramesRead = 0;
|
drwav_uint64 totalFramesRead = 0;
|
||||||
DRWAV_ASSERT(pWav != NULL);
|
DRWAV_ASSERT(pWav != NULL);
|
||||||
DRWAV_ASSERT(framesToRead > 0);
|
DRWAV_ASSERT(framesToRead > 0);
|
||||||
while (framesToRead > 0 && pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) {
|
while (pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) {
|
||||||
|
DRWAV_ASSERT(framesToRead > 0);
|
||||||
if (pWav->msadpcm.cachedFrameCount == 0 && pWav->msadpcm.bytesRemainingInBlock == 0) {
|
if (pWav->msadpcm.cachedFrameCount == 0 && pWav->msadpcm.bytesRemainingInBlock == 0) {
|
||||||
if (pWav->channels == 1) {
|
if (pWav->channels == 1) {
|
||||||
drwav_uint8 header[7];
|
drwav_uint8 header[7];
|
||||||
@@ -50947,7 +51266,7 @@ DRWAV_PRIVATE drwav_uint64 drwav_read_pcm_frames_s16__msadpcm(drwav* pWav, drwav
|
|||||||
pWav->msadpcm.cachedFrameCount -= 1;
|
pWav->msadpcm.cachedFrameCount -= 1;
|
||||||
}
|
}
|
||||||
if (framesToRead == 0) {
|
if (framesToRead == 0) {
|
||||||
return totalFramesRead;
|
break;
|
||||||
}
|
}
|
||||||
if (pWav->msadpcm.cachedFrameCount == 0) {
|
if (pWav->msadpcm.cachedFrameCount == 0) {
|
||||||
if (pWav->msadpcm.bytesRemainingInBlock == 0) {
|
if (pWav->msadpcm.bytesRemainingInBlock == 0) {
|
||||||
@@ -51043,7 +51362,8 @@ DRWAV_PRIVATE drwav_uint64 drwav_read_pcm_frames_s16__ima(drwav* pWav, drwav_uin
|
|||||||
};
|
};
|
||||||
DRWAV_ASSERT(pWav != NULL);
|
DRWAV_ASSERT(pWav != NULL);
|
||||||
DRWAV_ASSERT(framesToRead > 0);
|
DRWAV_ASSERT(framesToRead > 0);
|
||||||
while (framesToRead > 0 && pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) {
|
while (pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) {
|
||||||
|
DRWAV_ASSERT(framesToRead > 0);
|
||||||
if (pWav->ima.cachedFrameCount == 0 && pWav->ima.bytesRemainingInBlock == 0) {
|
if (pWav->ima.cachedFrameCount == 0 && pWav->ima.bytesRemainingInBlock == 0) {
|
||||||
if (pWav->channels == 1) {
|
if (pWav->channels == 1) {
|
||||||
drwav_uint8 header[4];
|
drwav_uint8 header[4];
|
||||||
@@ -51094,7 +51414,7 @@ DRWAV_PRIVATE drwav_uint64 drwav_read_pcm_frames_s16__ima(drwav* pWav, drwav_uin
|
|||||||
pWav->ima.cachedFrameCount -= 1;
|
pWav->ima.cachedFrameCount -= 1;
|
||||||
}
|
}
|
||||||
if (framesToRead == 0) {
|
if (framesToRead == 0) {
|
||||||
return totalFramesRead;
|
break;
|
||||||
}
|
}
|
||||||
if (pWav->ima.cachedFrameCount == 0) {
|
if (pWav->ima.cachedFrameCount == 0) {
|
||||||
if (pWav->ima.bytesRemainingInBlock == 0) {
|
if (pWav->ima.bytesRemainingInBlock == 0) {
|
||||||
@@ -55352,7 +55672,8 @@ static drflac_bool32 drflac__read_next_flac_frame_header(drflac_bs* bs, drflac_u
|
|||||||
DRFLAC_ASSERT(blockSize > 0);
|
DRFLAC_ASSERT(blockSize > 0);
|
||||||
if (blockSize == 1) {
|
if (blockSize == 1) {
|
||||||
header->blockSizeInPCMFrames = 192;
|
header->blockSizeInPCMFrames = 192;
|
||||||
} else if (blockSize >= 2 && blockSize <= 5) {
|
} else if (blockSize <= 5) {
|
||||||
|
DRFLAC_ASSERT(blockSize >= 2);
|
||||||
header->blockSizeInPCMFrames = 576 * (1 << (blockSize - 2));
|
header->blockSizeInPCMFrames = 576 * (1 << (blockSize - 2));
|
||||||
} else if (blockSize == 6) {
|
} else if (blockSize == 6) {
|
||||||
if (!drflac__read_uint16(bs, 8, &header->blockSizeInPCMFrames)) {
|
if (!drflac__read_uint16(bs, 8, &header->blockSizeInPCMFrames)) {
|
||||||
@@ -57771,7 +58092,7 @@ static drflac_result drflac_result_from_errno(int e)
|
|||||||
}
|
}
|
||||||
static drflac_result drflac_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
static drflac_result drflac_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
||||||
{
|
{
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
errno_t err;
|
errno_t err;
|
||||||
#endif
|
#endif
|
||||||
if (ppFile != NULL) {
|
if (ppFile != NULL) {
|
||||||
@@ -57780,7 +58101,7 @@ static drflac_result drflac_fopen(FILE** ppFile, const char* pFilePath, const ch
|
|||||||
if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) {
|
if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) {
|
||||||
return DRFLAC_INVALID_ARGS;
|
return DRFLAC_INVALID_ARGS;
|
||||||
}
|
}
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
||||||
if (err != 0) {
|
if (err != 0) {
|
||||||
return drflac_result_from_errno(err);
|
return drflac_result_from_errno(err);
|
||||||
@@ -63081,7 +63402,7 @@ static drmp3_result drmp3_result_from_errno(int e)
|
|||||||
}
|
}
|
||||||
static drmp3_result drmp3_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
static drmp3_result drmp3_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode)
|
||||||
{
|
{
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
errno_t err;
|
errno_t err;
|
||||||
#endif
|
#endif
|
||||||
if (ppFile != NULL) {
|
if (ppFile != NULL) {
|
||||||
@@ -63090,7 +63411,7 @@ static drmp3_result drmp3_fopen(FILE** ppFile, const char* pFilePath, const char
|
|||||||
if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) {
|
if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) {
|
||||||
return DRMP3_INVALID_ARGS;
|
return DRMP3_INVALID_ARGS;
|
||||||
}
|
}
|
||||||
#if _MSC_VER && _MSC_VER >= 1400
|
#if defined(_MSC_VER) && _MSC_VER >= 1400
|
||||||
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
err = fopen_s(ppFile, pFilePath, pOpenMode);
|
||||||
if (err != 0) {
|
if (err != 0) {
|
||||||
return drmp3_result_from_errno(err);
|
return drmp3_result_from_errno(err);
|
||||||
@@ -64149,9 +64470,19 @@ The following miscellaneous changes have also been made.
|
|||||||
/*
|
/*
|
||||||
REVISION HISTORY
|
REVISION HISTORY
|
||||||
================
|
================
|
||||||
v0.10.32 - TBD
|
v0.10.32 - 2020-02-23
|
||||||
- WASAPI: Fix a deadlock in exclusive mode.
|
- WASAPI: Fix a deadlock in exclusive mode.
|
||||||
|
- WASAPI: No longer return an error from ma_context_get_device_info() when an exclusive mode format
|
||||||
|
cannot be retrieved.
|
||||||
|
- WASAPI: Attempt to fix some bugs with device uninitialization.
|
||||||
- PulseAudio: Yet another refactor, this time to remove the dependency on `pa_threaded_mainloop`.
|
- PulseAudio: Yet another refactor, this time to remove the dependency on `pa_threaded_mainloop`.
|
||||||
|
- Web Audio: Fix a bug on Chrome and any other browser using the same engine.
|
||||||
|
- Web Audio: Automatically start the device on some user input if the device has been started. This
|
||||||
|
is to work around Google's policy of not starting audio if no user input has yet been performed.
|
||||||
|
- Fix a bug where thread handles are not being freed.
|
||||||
|
- Fix some static analysis warnings in FLAC, WAV and MP3 decoders.
|
||||||
|
- Fix a warning due to referencing _MSC_VER when it is undefined.
|
||||||
|
- Update to latest version of c89atomic.
|
||||||
- Internal refactoring to migrate over to the new backend callback system for the following backends:
|
- Internal refactoring to migrate over to the new backend callback system for the following backends:
|
||||||
- PulseAudio
|
- PulseAudio
|
||||||
- ALSA
|
- ALSA
|
||||||
@@ -64161,9 +64492,6 @@ v0.10.32 - TBD
|
|||||||
- OSS
|
- OSS
|
||||||
- audio(4)
|
- audio(4)
|
||||||
- sndio
|
- sndio
|
||||||
- Fix a bug where thread handles are not being freed.
|
|
||||||
- Fix some static analysis warnings in FLAC, WAV and MP3 decoders.
|
|
||||||
- Update to latest version of c89atomic.
|
|
||||||
|
|
||||||
v0.10.31 - 2021-01-17
|
v0.10.31 - 2021-01-17
|
||||||
- Make some functions const correct.
|
- Make some functions const correct.
|
||||||
|
Reference in New Issue
Block a user