mirror of
https://github.com/raysan5/raylib.git
synced 2025-10-09 19:36:33 +00:00
Develop branch integration (#1091)
* [core] REDESIGNED: Implement global context * [rlgl] REDESIGNED: Implement global context * Reviewed globals for Android * Review Android globals usage * Update Android globals * Bump raylib version to 3.0 !!! * [raudio] REDESIGNED: Implement global context * [raudio] Reorder functions * [core] Tweaks on descriptions * Issues with SUPPORT_MOUSE_GESTURES * [camera] Use global context * REDESIGN: Move shapes drawing texture/rec to RLGL context * Review some issues on standalone mode * Update to use global context * [GAME] Upload RE-PAIR game from GGJ2020 -WIP- * Update game: RE-PAIR * [utils] TRACELOG macros proposal * Update config.h
This commit is contained in:
238
src/camera.h
238
src/camera.h
@@ -197,19 +197,31 @@ typedef enum {
|
||||
MOVE_DOWN
|
||||
} CameraMove;
|
||||
|
||||
typedef struct {
|
||||
int mode; // Current camera mode
|
||||
float targetDistance; // Camera distance from position to target
|
||||
float playerEyesPosition; // Default player eyes position from ground (in meters)
|
||||
Vector2 angle; // Camera angle in plane XZ
|
||||
|
||||
int moveControl[6];
|
||||
int smoothZoomControl; // raylib: KEY_LEFT_CONTROL
|
||||
int altControl; // raylib: KEY_LEFT_ALT
|
||||
int panControl; // raylib: MOUSE_MIDDLE_BUTTON
|
||||
} CameraData;
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Global Variables Definition
|
||||
//----------------------------------------------------------------------------------
|
||||
static Vector2 cameraAngle = { 0.0f, 0.0f }; // Camera angle in plane XZ
|
||||
static float cameraTargetDistance = 0.0f; // Camera distance from position to target
|
||||
static float playerEyesPosition = 1.85f; // Default player eyes position from ground (in meters)
|
||||
|
||||
static int cameraMoveControl[6] = { 'W', 'S', 'D', 'A', 'E', 'Q' };
|
||||
static int cameraPanControlKey = 2; // raylib: MOUSE_MIDDLE_BUTTON
|
||||
static int cameraAltControlKey = 342; // raylib: KEY_LEFT_ALT
|
||||
static int cameraSmoothZoomControlKey = 341; // raylib: KEY_LEFT_CONTROL
|
||||
|
||||
static int cameraMode = CAMERA_CUSTOM; // Current camera mode
|
||||
static CameraData CAMERA = {
|
||||
.mode = 0,
|
||||
.targetDistance = 0,
|
||||
.playerEyesPosition = 1.85f,
|
||||
.angle = { 0 },
|
||||
.moveControl = { 'W', 'S', 'D', 'A', 'E', 'Q' },
|
||||
.smoothZoomControl = 341,
|
||||
.altControl = 342,
|
||||
.panControl = 2
|
||||
};
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Module specific Functions Declaration
|
||||
@@ -241,19 +253,19 @@ void SetCameraMode(Camera camera, int mode)
|
||||
float dy = v2.y - v1.y;
|
||||
float dz = v2.z - v1.z;
|
||||
|
||||
cameraTargetDistance = sqrtf(dx*dx + dy*dy + dz*dz);
|
||||
CAMERA.targetDistance = sqrtf(dx*dx + dy*dy + dz*dz);
|
||||
|
||||
// Camera angle calculation
|
||||
cameraAngle.x = atan2f(dx, dz); // Camera angle in plane XZ (0 aligned with Z, move positive CCW)
|
||||
cameraAngle.y = atan2f(dy, sqrtf(dx*dx + dz*dz)); // Camera angle in plane XY (0 aligned with X, move positive CW)
|
||||
CAMERA.angle.x = atan2f(dx, dz); // Camera angle in plane XZ (0 aligned with Z, move positive CCW)
|
||||
CAMERA.angle.y = atan2f(dy, sqrtf(dx*dx + dz*dz)); // Camera angle in plane XY (0 aligned with X, move positive CW)
|
||||
|
||||
playerEyesPosition = camera.position.y;
|
||||
CAMERA.playerEyesPosition = camera.position.y;
|
||||
|
||||
// Lock cursor for first person and third person cameras
|
||||
if ((mode == CAMERA_FIRST_PERSON) || (mode == CAMERA_THIRD_PERSON)) DisableCursor();
|
||||
else EnableCursor();
|
||||
|
||||
cameraMode = mode;
|
||||
CAMERA.mode = mode;
|
||||
}
|
||||
|
||||
// Update camera depending on selected mode
|
||||
@@ -267,7 +279,7 @@ void UpdateCamera(Camera *camera)
|
||||
static int swingCounter = 0; // Used for 1st person swinging movement
|
||||
static Vector2 previousMousePosition = { 0.0f, 0.0f };
|
||||
|
||||
// TODO: Compute cameraTargetDistance and cameraAngle here
|
||||
// TODO: Compute CAMERA.targetDistance and CAMERA.angle here
|
||||
|
||||
// Mouse movement detection
|
||||
Vector2 mousePositionDelta = { 0.0f, 0.0f };
|
||||
@@ -275,20 +287,20 @@ void UpdateCamera(Camera *camera)
|
||||
int mouseWheelMove = GetMouseWheelMove();
|
||||
|
||||
// Keys input detection
|
||||
bool panKey = IsMouseButtonDown(cameraPanControlKey);
|
||||
bool altKey = IsKeyDown(cameraAltControlKey);
|
||||
bool szoomKey = IsKeyDown(cameraSmoothZoomControlKey);
|
||||
bool panKey = IsMouseButtonDown(CAMERA.panControl);
|
||||
bool altKey = IsKeyDown(CAMERA.altControl);
|
||||
bool szoomKey = IsKeyDown(CAMERA.smoothZoomControl);
|
||||
|
||||
bool direction[6] = { IsKeyDown(cameraMoveControl[MOVE_FRONT]),
|
||||
IsKeyDown(cameraMoveControl[MOVE_BACK]),
|
||||
IsKeyDown(cameraMoveControl[MOVE_RIGHT]),
|
||||
IsKeyDown(cameraMoveControl[MOVE_LEFT]),
|
||||
IsKeyDown(cameraMoveControl[MOVE_UP]),
|
||||
IsKeyDown(cameraMoveControl[MOVE_DOWN]) };
|
||||
bool direction[6] = { IsKeyDown(CAMERA.moveControl[MOVE_FRONT]),
|
||||
IsKeyDown(CAMERA.moveControl[MOVE_BACK]),
|
||||
IsKeyDown(CAMERA.moveControl[MOVE_RIGHT]),
|
||||
IsKeyDown(CAMERA.moveControl[MOVE_LEFT]),
|
||||
IsKeyDown(CAMERA.moveControl[MOVE_UP]),
|
||||
IsKeyDown(CAMERA.moveControl[MOVE_DOWN]) };
|
||||
|
||||
// TODO: Consider touch inputs for camera
|
||||
|
||||
if (cameraMode != CAMERA_CUSTOM)
|
||||
if (CAMERA.mode != CAMERA_CUSTOM)
|
||||
{
|
||||
mousePositionDelta.x = mousePosition.x - previousMousePosition.x;
|
||||
mousePositionDelta.y = mousePosition.y - previousMousePosition.y;
|
||||
@@ -297,58 +309,58 @@ void UpdateCamera(Camera *camera)
|
||||
}
|
||||
|
||||
// Support for multiple automatic camera modes
|
||||
switch (cameraMode)
|
||||
switch (CAMERA.mode)
|
||||
{
|
||||
case CAMERA_FREE:
|
||||
{
|
||||
// Camera zoom
|
||||
if ((cameraTargetDistance < CAMERA_FREE_DISTANCE_MAX_CLAMP) && (mouseWheelMove < 0))
|
||||
if ((CAMERA.targetDistance < CAMERA_FREE_DISTANCE_MAX_CLAMP) && (mouseWheelMove < 0))
|
||||
{
|
||||
cameraTargetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
CAMERA.targetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
|
||||
if (cameraTargetDistance > CAMERA_FREE_DISTANCE_MAX_CLAMP) cameraTargetDistance = CAMERA_FREE_DISTANCE_MAX_CLAMP;
|
||||
if (CAMERA.targetDistance > CAMERA_FREE_DISTANCE_MAX_CLAMP) CAMERA.targetDistance = CAMERA_FREE_DISTANCE_MAX_CLAMP;
|
||||
}
|
||||
// Camera looking down
|
||||
// TODO: Review, weird comparisson of cameraTargetDistance == 120.0f?
|
||||
else if ((camera->position.y > camera->target.y) && (cameraTargetDistance == CAMERA_FREE_DISTANCE_MAX_CLAMP) && (mouseWheelMove < 0))
|
||||
// TODO: Review, weird comparisson of CAMERA.targetDistance == 120.0f?
|
||||
else if ((camera->position.y > camera->target.y) && (CAMERA.targetDistance == CAMERA_FREE_DISTANCE_MAX_CLAMP) && (mouseWheelMove < 0))
|
||||
{
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
}
|
||||
else if ((camera->position.y > camera->target.y) && (camera->target.y >= 0))
|
||||
{
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
|
||||
// if (camera->target.y < 0) camera->target.y = -0.001;
|
||||
}
|
||||
else if ((camera->position.y > camera->target.y) && (camera->target.y < 0) && (mouseWheelMove > 0))
|
||||
{
|
||||
cameraTargetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
if (cameraTargetDistance < CAMERA_FREE_DISTANCE_MIN_CLAMP) cameraTargetDistance = CAMERA_FREE_DISTANCE_MIN_CLAMP;
|
||||
CAMERA.targetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
if (CAMERA.targetDistance < CAMERA_FREE_DISTANCE_MIN_CLAMP) CAMERA.targetDistance = CAMERA_FREE_DISTANCE_MIN_CLAMP;
|
||||
}
|
||||
// Camera looking up
|
||||
// TODO: Review, weird comparisson of cameraTargetDistance == 120.0f?
|
||||
else if ((camera->position.y < camera->target.y) && (cameraTargetDistance == CAMERA_FREE_DISTANCE_MAX_CLAMP) && (mouseWheelMove < 0))
|
||||
// TODO: Review, weird comparisson of CAMERA.targetDistance == 120.0f?
|
||||
else if ((camera->position.y < camera->target.y) && (CAMERA.targetDistance == CAMERA_FREE_DISTANCE_MAX_CLAMP) && (mouseWheelMove < 0))
|
||||
{
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
}
|
||||
else if ((camera->position.y < camera->target.y) && (camera->target.y <= 0))
|
||||
{
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/cameraTargetDistance;
|
||||
camera->target.x += mouseWheelMove*(camera->target.x - camera->position.x)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.y += mouseWheelMove*(camera->target.y - camera->position.y)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
camera->target.z += mouseWheelMove*(camera->target.z - camera->position.z)*CAMERA_MOUSE_SCROLL_SENSITIVITY/CAMERA.targetDistance;
|
||||
|
||||
// if (camera->target.y > 0) camera->target.y = 0.001;
|
||||
}
|
||||
else if ((camera->position.y < camera->target.y) && (camera->target.y > 0) && (mouseWheelMove > 0))
|
||||
{
|
||||
cameraTargetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
if (cameraTargetDistance < CAMERA_FREE_DISTANCE_MIN_CLAMP) cameraTargetDistance = CAMERA_FREE_DISTANCE_MIN_CLAMP;
|
||||
CAMERA.targetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
if (CAMERA.targetDistance < CAMERA_FREE_DISTANCE_MIN_CLAMP) CAMERA.targetDistance = CAMERA_FREE_DISTANCE_MIN_CLAMP;
|
||||
}
|
||||
|
||||
// Input keys checks
|
||||
@@ -359,78 +371,78 @@ void UpdateCamera(Camera *camera)
|
||||
if (szoomKey)
|
||||
{
|
||||
// Camera smooth zoom
|
||||
cameraTargetDistance += (mousePositionDelta.y*CAMERA_FREE_SMOOTH_ZOOM_SENSITIVITY);
|
||||
CAMERA.targetDistance += (mousePositionDelta.y*CAMERA_FREE_SMOOTH_ZOOM_SENSITIVITY);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Camera rotation
|
||||
cameraAngle.x += mousePositionDelta.x*-CAMERA_FREE_MOUSE_SENSITIVITY;
|
||||
cameraAngle.y += mousePositionDelta.y*-CAMERA_FREE_MOUSE_SENSITIVITY;
|
||||
CAMERA.angle.x += mousePositionDelta.x*-CAMERA_FREE_MOUSE_SENSITIVITY;
|
||||
CAMERA.angle.y += mousePositionDelta.y*-CAMERA_FREE_MOUSE_SENSITIVITY;
|
||||
|
||||
// Angle clamp
|
||||
if (cameraAngle.y > CAMERA_FREE_MIN_CLAMP*DEG2RAD) cameraAngle.y = CAMERA_FREE_MIN_CLAMP*DEG2RAD;
|
||||
else if (cameraAngle.y < CAMERA_FREE_MAX_CLAMP*DEG2RAD) cameraAngle.y = CAMERA_FREE_MAX_CLAMP*DEG2RAD;
|
||||
if (CAMERA.angle.y > CAMERA_FREE_MIN_CLAMP*DEG2RAD) CAMERA.angle.y = CAMERA_FREE_MIN_CLAMP*DEG2RAD;
|
||||
else if (CAMERA.angle.y < CAMERA_FREE_MAX_CLAMP*DEG2RAD) CAMERA.angle.y = CAMERA_FREE_MAX_CLAMP*DEG2RAD;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Camera panning
|
||||
camera->target.x += ((mousePositionDelta.x*CAMERA_FREE_MOUSE_SENSITIVITY)*cosf(cameraAngle.x) + (mousePositionDelta.y*CAMERA_FREE_MOUSE_SENSITIVITY)*sinf(cameraAngle.x)*sinf(cameraAngle.y))*(cameraTargetDistance/CAMERA_FREE_PANNING_DIVIDER);
|
||||
camera->target.y += ((mousePositionDelta.y*CAMERA_FREE_MOUSE_SENSITIVITY)*cosf(cameraAngle.y))*(cameraTargetDistance/CAMERA_FREE_PANNING_DIVIDER);
|
||||
camera->target.z += ((mousePositionDelta.x*-CAMERA_FREE_MOUSE_SENSITIVITY)*sinf(cameraAngle.x) + (mousePositionDelta.y*CAMERA_FREE_MOUSE_SENSITIVITY)*cosf(cameraAngle.x)*sinf(cameraAngle.y))*(cameraTargetDistance/CAMERA_FREE_PANNING_DIVIDER);
|
||||
camera->target.x += ((mousePositionDelta.x*CAMERA_FREE_MOUSE_SENSITIVITY)*cosf(CAMERA.angle.x) + (mousePositionDelta.y*CAMERA_FREE_MOUSE_SENSITIVITY)*sinf(CAMERA.angle.x)*sinf(CAMERA.angle.y))*(CAMERA.targetDistance/CAMERA_FREE_PANNING_DIVIDER);
|
||||
camera->target.y += ((mousePositionDelta.y*CAMERA_FREE_MOUSE_SENSITIVITY)*cosf(CAMERA.angle.y))*(CAMERA.targetDistance/CAMERA_FREE_PANNING_DIVIDER);
|
||||
camera->target.z += ((mousePositionDelta.x*-CAMERA_FREE_MOUSE_SENSITIVITY)*sinf(CAMERA.angle.x) + (mousePositionDelta.y*CAMERA_FREE_MOUSE_SENSITIVITY)*cosf(CAMERA.angle.x)*sinf(CAMERA.angle.y))*(CAMERA.targetDistance/CAMERA_FREE_PANNING_DIVIDER);
|
||||
}
|
||||
}
|
||||
|
||||
// Update camera position with changes
|
||||
camera->position.x = -sinf(cameraAngle.x)*cameraTargetDistance*cosf(cameraAngle.y) + camera->target.x;
|
||||
camera->position.y = -sinf(cameraAngle.y)*cameraTargetDistance + camera->target.y;
|
||||
camera->position.z = -cosf(cameraAngle.x)*cameraTargetDistance*cosf(cameraAngle.y) + camera->target.z;
|
||||
camera->position.x = -sinf(CAMERA.angle.x)*CAMERA.targetDistance*cosf(CAMERA.angle.y) + camera->target.x;
|
||||
camera->position.y = -sinf(CAMERA.angle.y)*CAMERA.targetDistance + camera->target.y;
|
||||
camera->position.z = -cosf(CAMERA.angle.x)*CAMERA.targetDistance*cosf(CAMERA.angle.y) + camera->target.z;
|
||||
} break;
|
||||
case CAMERA_ORBITAL:
|
||||
{
|
||||
cameraAngle.x += CAMERA_ORBITAL_SPEED; // Camera orbit angle
|
||||
cameraTargetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY); // Camera zoom
|
||||
CAMERA.angle.x += CAMERA_ORBITAL_SPEED; // Camera orbit angle
|
||||
CAMERA.targetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY); // Camera zoom
|
||||
|
||||
// Camera distance clamp
|
||||
if (cameraTargetDistance < CAMERA_THIRD_PERSON_DISTANCE_CLAMP) cameraTargetDistance = CAMERA_THIRD_PERSON_DISTANCE_CLAMP;
|
||||
if (CAMERA.targetDistance < CAMERA_THIRD_PERSON_DISTANCE_CLAMP) CAMERA.targetDistance = CAMERA_THIRD_PERSON_DISTANCE_CLAMP;
|
||||
|
||||
// Update camera position with changes
|
||||
camera->position.x = sinf(cameraAngle.x)*cameraTargetDistance*cosf(cameraAngle.y) + camera->target.x;
|
||||
camera->position.y = ((cameraAngle.y <= 0.0f)? 1 : -1)*sinf(cameraAngle.y)*cameraTargetDistance*sinf(cameraAngle.y) + camera->target.y;
|
||||
camera->position.z = cosf(cameraAngle.x)*cameraTargetDistance*cosf(cameraAngle.y) + camera->target.z;
|
||||
camera->position.x = sinf(CAMERA.angle.x)*CAMERA.targetDistance*cosf(CAMERA.angle.y) + camera->target.x;
|
||||
camera->position.y = ((CAMERA.angle.y <= 0.0f)? 1 : -1)*sinf(CAMERA.angle.y)*CAMERA.targetDistance*sinf(CAMERA.angle.y) + camera->target.y;
|
||||
camera->position.z = cosf(CAMERA.angle.x)*CAMERA.targetDistance*cosf(CAMERA.angle.y) + camera->target.z;
|
||||
|
||||
} break;
|
||||
case CAMERA_FIRST_PERSON:
|
||||
{
|
||||
camera->position.x += (sinf(cameraAngle.x)*direction[MOVE_BACK] -
|
||||
sinf(cameraAngle.x)*direction[MOVE_FRONT] -
|
||||
cosf(cameraAngle.x)*direction[MOVE_LEFT] +
|
||||
cosf(cameraAngle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
camera->position.x += (sinf(CAMERA.angle.x)*direction[MOVE_BACK] -
|
||||
sinf(CAMERA.angle.x)*direction[MOVE_FRONT] -
|
||||
cosf(CAMERA.angle.x)*direction[MOVE_LEFT] +
|
||||
cosf(CAMERA.angle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
|
||||
camera->position.y += (sinf(cameraAngle.y)*direction[MOVE_FRONT] -
|
||||
sinf(cameraAngle.y)*direction[MOVE_BACK] +
|
||||
camera->position.y += (sinf(CAMERA.angle.y)*direction[MOVE_FRONT] -
|
||||
sinf(CAMERA.angle.y)*direction[MOVE_BACK] +
|
||||
1.0f*direction[MOVE_UP] - 1.0f*direction[MOVE_DOWN])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
|
||||
camera->position.z += (cosf(cameraAngle.x)*direction[MOVE_BACK] -
|
||||
cosf(cameraAngle.x)*direction[MOVE_FRONT] +
|
||||
sinf(cameraAngle.x)*direction[MOVE_LEFT] -
|
||||
sinf(cameraAngle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
camera->position.z += (cosf(CAMERA.angle.x)*direction[MOVE_BACK] -
|
||||
cosf(CAMERA.angle.x)*direction[MOVE_FRONT] +
|
||||
sinf(CAMERA.angle.x)*direction[MOVE_LEFT] -
|
||||
sinf(CAMERA.angle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
|
||||
bool isMoving = false; // Required for swinging
|
||||
|
||||
for (int i = 0; i < 6; i++) if (direction[i]) { isMoving = true; break; }
|
||||
|
||||
// Camera orientation calculation
|
||||
cameraAngle.x += (mousePositionDelta.x*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
cameraAngle.y += (mousePositionDelta.y*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
CAMERA.angle.x += (mousePositionDelta.x*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
CAMERA.angle.y += (mousePositionDelta.y*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
|
||||
// Angle clamp
|
||||
if (cameraAngle.y > CAMERA_FIRST_PERSON_MIN_CLAMP*DEG2RAD) cameraAngle.y = CAMERA_FIRST_PERSON_MIN_CLAMP*DEG2RAD;
|
||||
else if (cameraAngle.y < CAMERA_FIRST_PERSON_MAX_CLAMP*DEG2RAD) cameraAngle.y = CAMERA_FIRST_PERSON_MAX_CLAMP*DEG2RAD;
|
||||
if (CAMERA.angle.y > CAMERA_FIRST_PERSON_MIN_CLAMP*DEG2RAD) CAMERA.angle.y = CAMERA_FIRST_PERSON_MIN_CLAMP*DEG2RAD;
|
||||
else if (CAMERA.angle.y < CAMERA_FIRST_PERSON_MAX_CLAMP*DEG2RAD) CAMERA.angle.y = CAMERA_FIRST_PERSON_MAX_CLAMP*DEG2RAD;
|
||||
|
||||
// Recalculate camera target considering translation and rotation
|
||||
Matrix translation = MatrixTranslate(0, 0, (cameraTargetDistance/CAMERA_FREE_PANNING_DIVIDER));
|
||||
Matrix rotation = MatrixRotateXYZ((Vector3){ PI*2 - cameraAngle.y, PI*2 - cameraAngle.x, 0 });
|
||||
Matrix translation = MatrixTranslate(0, 0, (CAMERA.targetDistance/CAMERA_FREE_PANNING_DIVIDER));
|
||||
Matrix rotation = MatrixRotateXYZ((Vector3){ PI*2 - CAMERA.angle.y, PI*2 - CAMERA.angle.x, 0 });
|
||||
Matrix transform = MatrixMultiply(translation, rotation);
|
||||
|
||||
camera->target.x = camera->position.x - transform.m12;
|
||||
@@ -441,7 +453,7 @@ void UpdateCamera(Camera *camera)
|
||||
|
||||
// Camera position update
|
||||
// NOTE: On CAMERA_FIRST_PERSON player Y-movement is limited to player 'eyes position'
|
||||
camera->position.y = playerEyesPosition - sinf(swingCounter/CAMERA_FIRST_PERSON_STEP_TRIGONOMETRIC_DIVIDER)/CAMERA_FIRST_PERSON_STEP_DIVIDER;
|
||||
camera->position.y = CAMERA.playerEyesPosition - sinf(swingCounter/CAMERA_FIRST_PERSON_STEP_TRIGONOMETRIC_DIVIDER)/CAMERA_FIRST_PERSON_STEP_DIVIDER;
|
||||
|
||||
camera->up.x = sinf(swingCounter/(CAMERA_FIRST_PERSON_STEP_TRIGONOMETRIC_DIVIDER*2))/CAMERA_FIRST_PERSON_WAVING_DIVIDER;
|
||||
camera->up.z = -sinf(swingCounter/(CAMERA_FIRST_PERSON_STEP_TRIGONOMETRIC_DIVIDER*2))/CAMERA_FIRST_PERSON_WAVING_DIVIDER;
|
||||
@@ -450,39 +462,39 @@ void UpdateCamera(Camera *camera)
|
||||
} break;
|
||||
case CAMERA_THIRD_PERSON:
|
||||
{
|
||||
camera->position.x += (sinf(cameraAngle.x)*direction[MOVE_BACK] -
|
||||
sinf(cameraAngle.x)*direction[MOVE_FRONT] -
|
||||
cosf(cameraAngle.x)*direction[MOVE_LEFT] +
|
||||
cosf(cameraAngle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
camera->position.x += (sinf(CAMERA.angle.x)*direction[MOVE_BACK] -
|
||||
sinf(CAMERA.angle.x)*direction[MOVE_FRONT] -
|
||||
cosf(CAMERA.angle.x)*direction[MOVE_LEFT] +
|
||||
cosf(CAMERA.angle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
|
||||
camera->position.y += (sinf(cameraAngle.y)*direction[MOVE_FRONT] -
|
||||
sinf(cameraAngle.y)*direction[MOVE_BACK] +
|
||||
camera->position.y += (sinf(CAMERA.angle.y)*direction[MOVE_FRONT] -
|
||||
sinf(CAMERA.angle.y)*direction[MOVE_BACK] +
|
||||
1.0f*direction[MOVE_UP] - 1.0f*direction[MOVE_DOWN])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
|
||||
camera->position.z += (cosf(cameraAngle.x)*direction[MOVE_BACK] -
|
||||
cosf(cameraAngle.x)*direction[MOVE_FRONT] +
|
||||
sinf(cameraAngle.x)*direction[MOVE_LEFT] -
|
||||
sinf(cameraAngle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
camera->position.z += (cosf(CAMERA.angle.x)*direction[MOVE_BACK] -
|
||||
cosf(CAMERA.angle.x)*direction[MOVE_FRONT] +
|
||||
sinf(CAMERA.angle.x)*direction[MOVE_LEFT] -
|
||||
sinf(CAMERA.angle.x)*direction[MOVE_RIGHT])/PLAYER_MOVEMENT_SENSITIVITY;
|
||||
|
||||
// Camera orientation calculation
|
||||
cameraAngle.x += (mousePositionDelta.x*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
cameraAngle.y += (mousePositionDelta.y*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
CAMERA.angle.x += (mousePositionDelta.x*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
CAMERA.angle.y += (mousePositionDelta.y*-CAMERA_MOUSE_MOVE_SENSITIVITY);
|
||||
|
||||
// Angle clamp
|
||||
if (cameraAngle.y > CAMERA_THIRD_PERSON_MIN_CLAMP*DEG2RAD) cameraAngle.y = CAMERA_THIRD_PERSON_MIN_CLAMP*DEG2RAD;
|
||||
else if (cameraAngle.y < CAMERA_THIRD_PERSON_MAX_CLAMP*DEG2RAD) cameraAngle.y = CAMERA_THIRD_PERSON_MAX_CLAMP*DEG2RAD;
|
||||
if (CAMERA.angle.y > CAMERA_THIRD_PERSON_MIN_CLAMP*DEG2RAD) CAMERA.angle.y = CAMERA_THIRD_PERSON_MIN_CLAMP*DEG2RAD;
|
||||
else if (CAMERA.angle.y < CAMERA_THIRD_PERSON_MAX_CLAMP*DEG2RAD) CAMERA.angle.y = CAMERA_THIRD_PERSON_MAX_CLAMP*DEG2RAD;
|
||||
|
||||
// Camera zoom
|
||||
cameraTargetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
CAMERA.targetDistance -= (mouseWheelMove*CAMERA_MOUSE_SCROLL_SENSITIVITY);
|
||||
|
||||
// Camera distance clamp
|
||||
if (cameraTargetDistance < CAMERA_THIRD_PERSON_DISTANCE_CLAMP) cameraTargetDistance = CAMERA_THIRD_PERSON_DISTANCE_CLAMP;
|
||||
if (CAMERA.targetDistance < CAMERA_THIRD_PERSON_DISTANCE_CLAMP) CAMERA.targetDistance = CAMERA_THIRD_PERSON_DISTANCE_CLAMP;
|
||||
|
||||
// TODO: It seems camera->position is not correctly updated or some rounding issue makes the camera move straight to camera->target...
|
||||
camera->position.x = sinf(cameraAngle.x)*cameraTargetDistance*cosf(cameraAngle.y) + camera->target.x;
|
||||
if (cameraAngle.y <= 0.0f) camera->position.y = sinf(cameraAngle.y)*cameraTargetDistance*sinf(cameraAngle.y) + camera->target.y;
|
||||
else camera->position.y = -sinf(cameraAngle.y)*cameraTargetDistance*sinf(cameraAngle.y) + camera->target.y;
|
||||
camera->position.z = cosf(cameraAngle.x)*cameraTargetDistance*cosf(cameraAngle.y) + camera->target.z;
|
||||
camera->position.x = sinf(CAMERA.angle.x)*CAMERA.targetDistance*cosf(CAMERA.angle.y) + camera->target.x;
|
||||
if (CAMERA.angle.y <= 0.0f) camera->position.y = sinf(CAMERA.angle.y)*CAMERA.targetDistance*sinf(CAMERA.angle.y) + camera->target.y;
|
||||
else camera->position.y = -sinf(CAMERA.angle.y)*CAMERA.targetDistance*sinf(CAMERA.angle.y) + camera->target.y;
|
||||
camera->position.z = cosf(CAMERA.angle.x)*CAMERA.targetDistance*cosf(CAMERA.angle.y) + camera->target.z;
|
||||
|
||||
} break;
|
||||
default: break;
|
||||
@@ -490,23 +502,23 @@ void UpdateCamera(Camera *camera)
|
||||
}
|
||||
|
||||
// Set camera pan key to combine with mouse movement (free camera)
|
||||
void SetCameraPanControl(int panKey) { cameraPanControlKey = panKey; }
|
||||
void SetCameraPanControl(int panKey) { CAMERA.panControl = panKey; }
|
||||
|
||||
// Set camera alt key to combine with mouse movement (free camera)
|
||||
void SetCameraAltControl(int altKey) { cameraAltControlKey = altKey; }
|
||||
void SetCameraAltControl(int altKey) { CAMERA.altControl = altKey; }
|
||||
|
||||
// Set camera smooth zoom key to combine with mouse (free camera)
|
||||
void SetCameraSmoothZoomControl(int szoomKey) { cameraSmoothZoomControlKey = szoomKey; }
|
||||
void SetCameraSmoothZoomControl(int szoomKey) { CAMERA.smoothZoomControl = szoomKey; }
|
||||
|
||||
// Set camera move controls (1st person and 3rd person cameras)
|
||||
void SetCameraMoveControls(int frontKey, int backKey, int rightKey, int leftKey, int upKey, int downKey)
|
||||
{
|
||||
cameraMoveControl[MOVE_FRONT] = frontKey;
|
||||
cameraMoveControl[MOVE_BACK] = backKey;
|
||||
cameraMoveControl[MOVE_RIGHT] = rightKey;
|
||||
cameraMoveControl[MOVE_LEFT] = leftKey;
|
||||
cameraMoveControl[MOVE_UP] = upKey;
|
||||
cameraMoveControl[MOVE_DOWN] = downKey;
|
||||
CAMERA.moveControl[MOVE_FRONT] = frontKey;
|
||||
CAMERA.moveControl[MOVE_BACK] = backKey;
|
||||
CAMERA.moveControl[MOVE_RIGHT] = rightKey;
|
||||
CAMERA.moveControl[MOVE_LEFT] = leftKey;
|
||||
CAMERA.moveControl[MOVE_UP] = upKey;
|
||||
CAMERA.moveControl[MOVE_DOWN] = downKey;
|
||||
}
|
||||
|
||||
#endif // CAMERA_IMPLEMENTATION
|
||||
|
@@ -25,7 +25,7 @@
|
||||
*
|
||||
**********************************************************************************************/
|
||||
|
||||
#define RAYLIB_VERSION "2.6-dev"
|
||||
#define RAYLIB_VERSION "3.0"
|
||||
|
||||
// Edit to control what features Makefile'd raylib is compiled with
|
||||
#if defined(RAYLIB_CMAKE)
|
||||
|
1573
src/core.c
1573
src/core.c
File diff suppressed because it is too large
Load Diff
285
src/gestures.h
285
src/gestures.h
@@ -149,75 +149,76 @@ float GetGesturePinchAngle(void); // Get gesture pinch ang
|
||||
#undef _POSIX_C_SOURCE
|
||||
#define _POSIX_C_SOURCE 199309L // Required for CLOCK_MONOTONIC if compiled with c99 without gnu ext.
|
||||
#endif
|
||||
#include <sys/time.h> // Required for: timespec
|
||||
#include <time.h> // Required for: clock_gettime()
|
||||
#include <sys/time.h> // Required for: timespec
|
||||
#include <time.h> // Required for: clock_gettime()
|
||||
|
||||
#include <math.h> // Required for: atan2(), sqrt()
|
||||
#include <stdint.h> // Required for: uint64_t
|
||||
#include <math.h> // Required for: atan2(), sqrt()
|
||||
#include <stdint.h> // Required for: uint64_t
|
||||
#endif
|
||||
|
||||
#if defined(__APPLE__) // macOS also defines __MACH__
|
||||
#include <mach/clock.h> // Required for: clock_get_time()
|
||||
#include <mach/mach.h> // Required for: mach_timespec_t
|
||||
#if defined(__APPLE__) // macOS also defines __MACH__
|
||||
#include <mach/clock.h> // Required for: clock_get_time()
|
||||
#include <mach/mach.h> // Required for: mach_timespec_t
|
||||
#endif
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Defines and Macros
|
||||
//----------------------------------------------------------------------------------
|
||||
#define FORCE_TO_SWIPE 0.0005f // Measured in normalized screen units/time
|
||||
#define MINIMUM_DRAG 0.015f // Measured in normalized screen units (0.0f to 1.0f)
|
||||
#define MINIMUM_PINCH 0.005f // Measured in normalized screen units (0.0f to 1.0f)
|
||||
#define TAP_TIMEOUT 300 // Time in milliseconds
|
||||
#define PINCH_TIMEOUT 300 // Time in milliseconds
|
||||
#define DOUBLETAP_RANGE 0.03f // Measured in normalized screen units (0.0f to 1.0f)
|
||||
#define FORCE_TO_SWIPE 0.0005f // Measured in normalized screen units/time
|
||||
#define MINIMUM_DRAG 0.015f // Measured in normalized screen units (0.0f to 1.0f)
|
||||
#define MINIMUM_PINCH 0.005f // Measured in normalized screen units (0.0f to 1.0f)
|
||||
#define TAP_TIMEOUT 300 // Time in milliseconds
|
||||
#define PINCH_TIMEOUT 300 // Time in milliseconds
|
||||
#define DOUBLETAP_RANGE 0.03f // Measured in normalized screen units (0.0f to 1.0f)
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Types and Structures Definition
|
||||
//----------------------------------------------------------------------------------
|
||||
// ...
|
||||
|
||||
typedef struct {
|
||||
int current; // Current detected gesture
|
||||
unsigned int enabledFlags; // Enabled gestures flags
|
||||
struct {
|
||||
int firstId; // Touch id for first touch point
|
||||
int pointCount; // Touch points counter
|
||||
double eventTime; // Time stamp when an event happened
|
||||
Vector2 upPosition; // Touch up position
|
||||
Vector2 downPositionA; // First touch down position
|
||||
Vector2 downPositionB; // Second touch down position
|
||||
Vector2 downDragPosition; // Touch drag position
|
||||
Vector2 moveDownPositionA; // First touch down position on move
|
||||
Vector2 moveDownPositionB; // Second touch down position on move
|
||||
int tapCounter; // TAP counter (one tap implies TOUCH_DOWN and TOUCH_UP actions)
|
||||
} Touch;
|
||||
struct {
|
||||
bool resetRequired; // HOLD reset to get first touch point again
|
||||
double timeDuration; // HOLD duration in milliseconds
|
||||
} Hold;
|
||||
struct {
|
||||
Vector2 vector; // DRAG vector (between initial and current position)
|
||||
float angle; // DRAG angle (relative to x-axis)
|
||||
float distance; // DRAG distance (from initial touch point to final) (normalized [0..1])
|
||||
float intensity; // DRAG intensity, how far why did the DRAG (pixels per frame)
|
||||
} Drag;
|
||||
struct {
|
||||
bool start; // SWIPE used to define when start measuring GESTURES.Swipe.timeDuration
|
||||
double timeDuration; // SWIPE time to calculate drag intensity
|
||||
} Swipe;
|
||||
struct {
|
||||
Vector2 vector; // PINCH vector (between first and second touch points)
|
||||
float angle; // PINCH angle (relative to x-axis)
|
||||
float distance; // PINCH displacement distance (normalized [0..1])
|
||||
} Pinch;
|
||||
} GesturesData;
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Global Variables Definition
|
||||
//----------------------------------------------------------------------------------
|
||||
|
||||
// Touch gesture variables
|
||||
static Vector2 touchDownPosition = { 0.0f, 0.0f }; // First touch down position
|
||||
static Vector2 touchDownPosition2 = { 0.0f, 0.0f }; // Second touch down position
|
||||
static Vector2 touchDownDragPosition = { 0.0f, 0.0f }; // Touch drag position
|
||||
static Vector2 touchUpPosition = { 0.0f, 0.0f }; // Touch up position
|
||||
static Vector2 moveDownPosition = { 0.0f, 0.0f }; // First touch down position on move
|
||||
static Vector2 moveDownPosition2 = { 0.0f, 0.0f }; // Second touch down position on move
|
||||
|
||||
static int pointCount = 0; // Touch points counter
|
||||
static int firstTouchId = -1; // Touch id for first touch point
|
||||
static double eventTime = 0.0; // Time stamp when an event happened
|
||||
|
||||
// Tap gesture variables
|
||||
static int tapCounter = 0; // TAP counter (one tap implies TOUCH_DOWN and TOUCH_UP actions)
|
||||
|
||||
// Hold gesture variables
|
||||
static bool resetHold = false; // HOLD reset to get first touch point again
|
||||
static double timeHold = 0.0f; // HOLD duration in milliseconds
|
||||
|
||||
// Drag gesture variables
|
||||
static Vector2 dragVector = { 0.0f , 0.0f }; // DRAG vector (between initial and current position)
|
||||
static float dragAngle = 0.0f; // DRAG angle (relative to x-axis)
|
||||
static float dragDistance = 0.0f; // DRAG distance (from initial touch point to final) (normalized [0..1])
|
||||
static float dragIntensity = 0.0f; // DRAG intensity, how far why did the DRAG (pixels per frame)
|
||||
|
||||
// Swipe gestures variables
|
||||
static bool startMoving = false; // SWIPE used to define when start measuring swipeTime
|
||||
static double swipeTime = 0.0; // SWIPE time to calculate drag intensity
|
||||
|
||||
// Pinch gesture variables
|
||||
static Vector2 pinchVector = { 0.0f , 0.0f }; // PINCH vector (between first and second touch points)
|
||||
static float pinchAngle = 0.0f; // PINCH angle (relative to x-axis)
|
||||
static float pinchDistance = 0.0f; // PINCH displacement distance (normalized [0..1])
|
||||
|
||||
static int currentGesture = GESTURE_NONE; // Current detected gesture
|
||||
|
||||
// Enabled gestures flags, all gestures enabled by default
|
||||
static unsigned int enabledGestures = 0b0000001111111111;
|
||||
static GesturesData GESTURES = {
|
||||
.Touch.firstId = -1,
|
||||
.current = GESTURE_NONE,
|
||||
.enabledFlags = 0b0000001111111111 // All gestures enabled by default
|
||||
};
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Module specific Functions Declaration
|
||||
@@ -236,13 +237,13 @@ static double GetCurrentTime(void);
|
||||
// Enable only desired getures to be detected
|
||||
void SetGesturesEnabled(unsigned int gestureFlags)
|
||||
{
|
||||
enabledGestures = gestureFlags;
|
||||
GESTURES.enabledFlags = gestureFlags;
|
||||
}
|
||||
|
||||
// Check if a gesture have been detected
|
||||
bool IsGestureDetected(int gesture)
|
||||
{
|
||||
if ((enabledGestures & currentGesture) == gesture) return true;
|
||||
if ((GESTURES.enabledFlags & GESTURES.current) == gesture) return true;
|
||||
else return false;
|
||||
}
|
||||
|
||||
@@ -250,150 +251,150 @@ bool IsGestureDetected(int gesture)
|
||||
void ProcessGestureEvent(GestureEvent event)
|
||||
{
|
||||
// Reset required variables
|
||||
pointCount = event.pointCount; // Required on UpdateGestures()
|
||||
GESTURES.Touch.pointCount = event.pointCount; // Required on UpdateGestures()
|
||||
|
||||
if (pointCount < 2)
|
||||
if (GESTURES.Touch.pointCount < 2)
|
||||
{
|
||||
if (event.touchAction == TOUCH_DOWN)
|
||||
{
|
||||
tapCounter++; // Tap counter
|
||||
GESTURES.Touch.tapCounter++; // Tap counter
|
||||
|
||||
// Detect GESTURE_DOUBLE_TAP
|
||||
if ((currentGesture == GESTURE_NONE) && (tapCounter >= 2) && ((GetCurrentTime() - eventTime) < TAP_TIMEOUT) && (Vector2Distance(touchDownPosition, event.position[0]) < DOUBLETAP_RANGE))
|
||||
if ((GESTURES.current == GESTURE_NONE) && (GESTURES.Touch.tapCounter >= 2) && ((GetCurrentTime() - GESTURES.Touch.eventTime) < TAP_TIMEOUT) && (Vector2Distance(GESTURES.Touch.downPositionA, event.position[0]) < DOUBLETAP_RANGE))
|
||||
{
|
||||
currentGesture = GESTURE_DOUBLETAP;
|
||||
tapCounter = 0;
|
||||
GESTURES.current = GESTURE_DOUBLETAP;
|
||||
GESTURES.Touch.tapCounter = 0;
|
||||
}
|
||||
else // Detect GESTURE_TAP
|
||||
{
|
||||
tapCounter = 1;
|
||||
currentGesture = GESTURE_TAP;
|
||||
GESTURES.Touch.tapCounter = 1;
|
||||
GESTURES.current = GESTURE_TAP;
|
||||
}
|
||||
|
||||
touchDownPosition = event.position[0];
|
||||
touchDownDragPosition = event.position[0];
|
||||
GESTURES.Touch.downPositionA = event.position[0];
|
||||
GESTURES.Touch.downDragPosition = event.position[0];
|
||||
|
||||
touchUpPosition = touchDownPosition;
|
||||
eventTime = GetCurrentTime();
|
||||
GESTURES.Touch.upPosition = GESTURES.Touch.downPositionA;
|
||||
GESTURES.Touch.eventTime = GetCurrentTime();
|
||||
|
||||
firstTouchId = event.pointerId[0];
|
||||
GESTURES.Touch.firstId = event.pointerId[0];
|
||||
|
||||
dragVector = (Vector2){ 0.0f, 0.0f };
|
||||
GESTURES.Drag.vector = (Vector2){ 0.0f, 0.0f };
|
||||
}
|
||||
else if (event.touchAction == TOUCH_UP)
|
||||
{
|
||||
if (currentGesture == GESTURE_DRAG) touchUpPosition = event.position[0];
|
||||
if (GESTURES.current == GESTURE_DRAG) GESTURES.Touch.upPosition = event.position[0];
|
||||
|
||||
// NOTE: dragIntensity dependend on the resolution of the screen
|
||||
dragDistance = Vector2Distance(touchDownPosition, touchUpPosition);
|
||||
dragIntensity = dragDistance/(float)((GetCurrentTime() - swipeTime));
|
||||
// NOTE: GESTURES.Drag.intensity dependend on the resolution of the screen
|
||||
GESTURES.Drag.distance = Vector2Distance(GESTURES.Touch.downPositionA, GESTURES.Touch.upPosition);
|
||||
GESTURES.Drag.intensity = GESTURES.Drag.distance/(float)((GetCurrentTime() - GESTURES.Swipe.timeDuration));
|
||||
|
||||
startMoving = false;
|
||||
GESTURES.Swipe.start = false;
|
||||
|
||||
// Detect GESTURE_SWIPE
|
||||
if ((dragIntensity > FORCE_TO_SWIPE) && (firstTouchId == event.pointerId[0]))
|
||||
if ((GESTURES.Drag.intensity > FORCE_TO_SWIPE) && (GESTURES.Touch.firstId == event.pointerId[0]))
|
||||
{
|
||||
// NOTE: Angle should be inverted in Y
|
||||
dragAngle = 360.0f - Vector2Angle(touchDownPosition, touchUpPosition);
|
||||
GESTURES.Drag.angle = 360.0f - Vector2Angle(GESTURES.Touch.downPositionA, GESTURES.Touch.upPosition);
|
||||
|
||||
if ((dragAngle < 30) || (dragAngle > 330)) currentGesture = GESTURE_SWIPE_RIGHT; // Right
|
||||
else if ((dragAngle > 30) && (dragAngle < 120)) currentGesture = GESTURE_SWIPE_UP; // Up
|
||||
else if ((dragAngle > 120) && (dragAngle < 210)) currentGesture = GESTURE_SWIPE_LEFT; // Left
|
||||
else if ((dragAngle > 210) && (dragAngle < 300)) currentGesture = GESTURE_SWIPE_DOWN; // Down
|
||||
else currentGesture = GESTURE_NONE;
|
||||
if ((GESTURES.Drag.angle < 30) || (GESTURES.Drag.angle > 330)) GESTURES.current = GESTURE_SWIPE_RIGHT; // Right
|
||||
else if ((GESTURES.Drag.angle > 30) && (GESTURES.Drag.angle < 120)) GESTURES.current = GESTURE_SWIPE_UP; // Up
|
||||
else if ((GESTURES.Drag.angle > 120) && (GESTURES.Drag.angle < 210)) GESTURES.current = GESTURE_SWIPE_LEFT; // Left
|
||||
else if ((GESTURES.Drag.angle > 210) && (GESTURES.Drag.angle < 300)) GESTURES.current = GESTURE_SWIPE_DOWN; // Down
|
||||
else GESTURES.current = GESTURE_NONE;
|
||||
}
|
||||
else
|
||||
{
|
||||
dragDistance = 0.0f;
|
||||
dragIntensity = 0.0f;
|
||||
dragAngle = 0.0f;
|
||||
GESTURES.Drag.distance = 0.0f;
|
||||
GESTURES.Drag.intensity = 0.0f;
|
||||
GESTURES.Drag.angle = 0.0f;
|
||||
|
||||
currentGesture = GESTURE_NONE;
|
||||
GESTURES.current = GESTURE_NONE;
|
||||
}
|
||||
|
||||
touchDownDragPosition = (Vector2){ 0.0f, 0.0f };
|
||||
pointCount = 0;
|
||||
GESTURES.Touch.downDragPosition = (Vector2){ 0.0f, 0.0f };
|
||||
GESTURES.Touch.pointCount = 0;
|
||||
}
|
||||
else if (event.touchAction == TOUCH_MOVE)
|
||||
{
|
||||
if (currentGesture == GESTURE_DRAG) eventTime = GetCurrentTime();
|
||||
if (GESTURES.current == GESTURE_DRAG) GESTURES.Touch.eventTime = GetCurrentTime();
|
||||
|
||||
if (!startMoving)
|
||||
if (!GESTURES.Swipe.start)
|
||||
{
|
||||
swipeTime = GetCurrentTime();
|
||||
startMoving = true;
|
||||
GESTURES.Swipe.timeDuration = GetCurrentTime();
|
||||
GESTURES.Swipe.start = true;
|
||||
}
|
||||
|
||||
moveDownPosition = event.position[0];
|
||||
GESTURES.Touch.moveDownPositionA = event.position[0];
|
||||
|
||||
if (currentGesture == GESTURE_HOLD)
|
||||
if (GESTURES.current == GESTURE_HOLD)
|
||||
{
|
||||
if (resetHold) touchDownPosition = event.position[0];
|
||||
if (GESTURES.Hold.resetRequired) GESTURES.Touch.downPositionA = event.position[0];
|
||||
|
||||
resetHold = false;
|
||||
GESTURES.Hold.resetRequired = false;
|
||||
|
||||
// Detect GESTURE_DRAG
|
||||
if (Vector2Distance(touchDownPosition, moveDownPosition) >= MINIMUM_DRAG)
|
||||
if (Vector2Distance(GESTURES.Touch.downPositionA, GESTURES.Touch.moveDownPositionA) >= MINIMUM_DRAG)
|
||||
{
|
||||
eventTime = GetCurrentTime();
|
||||
currentGesture = GESTURE_DRAG;
|
||||
GESTURES.Touch.eventTime = GetCurrentTime();
|
||||
GESTURES.current = GESTURE_DRAG;
|
||||
}
|
||||
}
|
||||
|
||||
dragVector.x = moveDownPosition.x - touchDownDragPosition.x;
|
||||
dragVector.y = moveDownPosition.y - touchDownDragPosition.y;
|
||||
GESTURES.Drag.vector.x = GESTURES.Touch.moveDownPositionA.x - GESTURES.Touch.downDragPosition.x;
|
||||
GESTURES.Drag.vector.y = GESTURES.Touch.moveDownPositionA.y - GESTURES.Touch.downDragPosition.y;
|
||||
}
|
||||
}
|
||||
else // Two touch points
|
||||
{
|
||||
if (event.touchAction == TOUCH_DOWN)
|
||||
{
|
||||
touchDownPosition = event.position[0];
|
||||
touchDownPosition2 = event.position[1];
|
||||
GESTURES.Touch.downPositionA = event.position[0];
|
||||
GESTURES.Touch.downPositionB = event.position[1];
|
||||
|
||||
//pinchDistance = Vector2Distance(touchDownPosition, touchDownPosition2);
|
||||
//GESTURES.Pinch.distance = Vector2Distance(GESTURES.Touch.downPositionA, GESTURES.Touch.downPositionB);
|
||||
|
||||
pinchVector.x = touchDownPosition2.x - touchDownPosition.x;
|
||||
pinchVector.y = touchDownPosition2.y - touchDownPosition.y;
|
||||
GESTURES.Pinch.vector.x = GESTURES.Touch.downPositionB.x - GESTURES.Touch.downPositionA.x;
|
||||
GESTURES.Pinch.vector.y = GESTURES.Touch.downPositionB.y - GESTURES.Touch.downPositionA.y;
|
||||
|
||||
currentGesture = GESTURE_HOLD;
|
||||
timeHold = GetCurrentTime();
|
||||
GESTURES.current = GESTURE_HOLD;
|
||||
GESTURES.Hold.timeDuration = GetCurrentTime();
|
||||
}
|
||||
else if (event.touchAction == TOUCH_MOVE)
|
||||
{
|
||||
pinchDistance = Vector2Distance(moveDownPosition, moveDownPosition2);
|
||||
GESTURES.Pinch.distance = Vector2Distance(GESTURES.Touch.moveDownPositionA, GESTURES.Touch.moveDownPositionB);
|
||||
|
||||
touchDownPosition = moveDownPosition;
|
||||
touchDownPosition2 = moveDownPosition2;
|
||||
GESTURES.Touch.downPositionA = GESTURES.Touch.moveDownPositionA;
|
||||
GESTURES.Touch.downPositionB = GESTURES.Touch.moveDownPositionB;
|
||||
|
||||
moveDownPosition = event.position[0];
|
||||
moveDownPosition2 = event.position[1];
|
||||
GESTURES.Touch.moveDownPositionA = event.position[0];
|
||||
GESTURES.Touch.moveDownPositionB = event.position[1];
|
||||
|
||||
pinchVector.x = moveDownPosition2.x - moveDownPosition.x;
|
||||
pinchVector.y = moveDownPosition2.y - moveDownPosition.y;
|
||||
GESTURES.Pinch.vector.x = GESTURES.Touch.moveDownPositionB.x - GESTURES.Touch.moveDownPositionA.x;
|
||||
GESTURES.Pinch.vector.y = GESTURES.Touch.moveDownPositionB.y - GESTURES.Touch.moveDownPositionA.y;
|
||||
|
||||
if ((Vector2Distance(touchDownPosition, moveDownPosition) >= MINIMUM_PINCH) || (Vector2Distance(touchDownPosition2, moveDownPosition2) >= MINIMUM_PINCH))
|
||||
if ((Vector2Distance(GESTURES.Touch.downPositionA, GESTURES.Touch.moveDownPositionA) >= MINIMUM_PINCH) || (Vector2Distance(GESTURES.Touch.downPositionB, GESTURES.Touch.moveDownPositionB) >= MINIMUM_PINCH))
|
||||
{
|
||||
if ((Vector2Distance(moveDownPosition, moveDownPosition2) - pinchDistance) < 0) currentGesture = GESTURE_PINCH_IN;
|
||||
else currentGesture = GESTURE_PINCH_OUT;
|
||||
if ((Vector2Distance(GESTURES.Touch.moveDownPositionA, GESTURES.Touch.moveDownPositionB) - GESTURES.Pinch.distance) < 0) GESTURES.current = GESTURE_PINCH_IN;
|
||||
else GESTURES.current = GESTURE_PINCH_OUT;
|
||||
}
|
||||
else
|
||||
{
|
||||
currentGesture = GESTURE_HOLD;
|
||||
timeHold = GetCurrentTime();
|
||||
GESTURES.current = GESTURE_HOLD;
|
||||
GESTURES.Hold.timeDuration = GetCurrentTime();
|
||||
}
|
||||
|
||||
// NOTE: Angle should be inverted in Y
|
||||
pinchAngle = 360.0f - Vector2Angle(moveDownPosition, moveDownPosition2);
|
||||
GESTURES.Pinch.angle = 360.0f - Vector2Angle(GESTURES.Touch.moveDownPositionA, GESTURES.Touch.moveDownPositionB);
|
||||
}
|
||||
else if (event.touchAction == TOUCH_UP)
|
||||
{
|
||||
pinchDistance = 0.0f;
|
||||
pinchAngle = 0.0f;
|
||||
pinchVector = (Vector2){ 0.0f, 0.0f };
|
||||
pointCount = 0;
|
||||
GESTURES.Pinch.distance = 0.0f;
|
||||
GESTURES.Pinch.angle = 0.0f;
|
||||
GESTURES.Pinch.vector = (Vector2){ 0.0f, 0.0f };
|
||||
GESTURES.Touch.pointCount = 0;
|
||||
|
||||
currentGesture = GESTURE_NONE;
|
||||
GESTURES.current = GESTURE_NONE;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -404,23 +405,23 @@ void UpdateGestures(void)
|
||||
// NOTE: Gestures are processed through system callbacks on touch events
|
||||
|
||||
// Detect GESTURE_HOLD
|
||||
if (((currentGesture == GESTURE_TAP) || (currentGesture == GESTURE_DOUBLETAP)) && (pointCount < 2))
|
||||
if (((GESTURES.current == GESTURE_TAP) || (GESTURES.current == GESTURE_DOUBLETAP)) && (GESTURES.Touch.pointCount < 2))
|
||||
{
|
||||
currentGesture = GESTURE_HOLD;
|
||||
timeHold = GetCurrentTime();
|
||||
GESTURES.current = GESTURE_HOLD;
|
||||
GESTURES.Hold.timeDuration = GetCurrentTime();
|
||||
}
|
||||
|
||||
if (((GetCurrentTime() - eventTime) > TAP_TIMEOUT) && (currentGesture == GESTURE_DRAG) && (pointCount < 2))
|
||||
if (((GetCurrentTime() - GESTURES.Touch.eventTime) > TAP_TIMEOUT) && (GESTURES.current == GESTURE_DRAG) && (GESTURES.Touch.pointCount < 2))
|
||||
{
|
||||
currentGesture = GESTURE_HOLD;
|
||||
timeHold = GetCurrentTime();
|
||||
resetHold = true;
|
||||
GESTURES.current = GESTURE_HOLD;
|
||||
GESTURES.Hold.timeDuration = GetCurrentTime();
|
||||
GESTURES.Hold.resetRequired = true;
|
||||
}
|
||||
|
||||
// Detect GESTURE_NONE
|
||||
if ((currentGesture == GESTURE_SWIPE_RIGHT) || (currentGesture == GESTURE_SWIPE_UP) || (currentGesture == GESTURE_SWIPE_LEFT) || (currentGesture == GESTURE_SWIPE_DOWN))
|
||||
if ((GESTURES.current == GESTURE_SWIPE_RIGHT) || (GESTURES.current == GESTURE_SWIPE_UP) || (GESTURES.current == GESTURE_SWIPE_LEFT) || (GESTURES.current == GESTURE_SWIPE_DOWN))
|
||||
{
|
||||
currentGesture = GESTURE_NONE;
|
||||
GESTURES.current = GESTURE_NONE;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -429,14 +430,14 @@ int GetTouchPointsCount(void)
|
||||
{
|
||||
// NOTE: point count is calculated when ProcessGestureEvent(GestureEvent event) is called
|
||||
|
||||
return pointCount;
|
||||
return GESTURES.Touch.pointCount;
|
||||
}
|
||||
|
||||
// Get latest detected gesture
|
||||
int GetGestureDetected(void)
|
||||
{
|
||||
// Get current gesture only if enabled
|
||||
return (enabledGestures & currentGesture);
|
||||
return (GESTURES.enabledFlags & GESTURES.current);
|
||||
}
|
||||
|
||||
// Hold time measured in ms
|
||||
@@ -446,7 +447,7 @@ float GetGestureHoldDuration(void)
|
||||
|
||||
double time = 0.0;
|
||||
|
||||
if (currentGesture == GESTURE_HOLD) time = GetCurrentTime() - timeHold;
|
||||
if (GESTURES.current == GESTURE_HOLD) time = GetCurrentTime() - GESTURES.Hold.timeDuration;
|
||||
|
||||
return (float)time;
|
||||
}
|
||||
@@ -456,7 +457,7 @@ Vector2 GetGestureDragVector(void)
|
||||
{
|
||||
// NOTE: drag vector is calculated on one touch points TOUCH_MOVE
|
||||
|
||||
return dragVector;
|
||||
return GESTURES.Drag.vector;
|
||||
}
|
||||
|
||||
// Get drag angle
|
||||
@@ -465,16 +466,16 @@ float GetGestureDragAngle(void)
|
||||
{
|
||||
// NOTE: drag angle is calculated on one touch points TOUCH_UP
|
||||
|
||||
return dragAngle;
|
||||
return GESTURES.Drag.angle;
|
||||
}
|
||||
|
||||
// Get distance between two pinch points
|
||||
Vector2 GetGesturePinchVector(void)
|
||||
{
|
||||
// NOTE: The position values used for pinchDistance are not modified like the position values of [core.c]-->GetTouchPosition(int index)
|
||||
// NOTE: The position values used for GESTURES.Pinch.distance are not modified like the position values of [core.c]-->GetTouchPosition(int index)
|
||||
// NOTE: pinch distance is calculated on two touch points TOUCH_MOVE
|
||||
|
||||
return pinchVector;
|
||||
return GESTURES.Pinch.vector;
|
||||
}
|
||||
|
||||
// Get angle beween two pinch points
|
||||
@@ -483,7 +484,7 @@ float GetGesturePinchAngle(void)
|
||||
{
|
||||
// NOTE: pinch angle is calculated on two touch points TOUCH_MOVE
|
||||
|
||||
return pinchAngle;
|
||||
return GESTURES.Pinch.angle;
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
|
743
src/raudio.c
743
src/raudio.c
@@ -4,11 +4,11 @@
|
||||
*
|
||||
* FEATURES:
|
||||
* - Manage audio device (init/close)
|
||||
* - Manage raw audio context
|
||||
* - Manage mixing channels
|
||||
* - Load and unload audio files
|
||||
* - Format wave data (sample rate, size, channels)
|
||||
* - Play/Stop/Pause/Resume loaded audio
|
||||
* - Manage mixing channels
|
||||
* - Manage raw audio context
|
||||
*
|
||||
* CONFIGURATION:
|
||||
*
|
||||
@@ -124,7 +124,15 @@
|
||||
// After some math, considering a sampleRate of 48000, a buffer refill rate of 1/60 seconds and a
|
||||
// standard double-buffering system, a 4096 samples buffer has been chosen, it should be enough
|
||||
// In case of music-stalls, just increase this number
|
||||
#define AUDIO_BUFFER_SIZE 4096 // PCM data samples (i.e. 16bit, Mono: 8Kb)
|
||||
#if !defined(AUDIO_BUFFER_SIZE)
|
||||
#define AUDIO_BUFFER_SIZE 4096 // PCM data samples (i.e. 16bit, Mono: 8Kb)
|
||||
#endif
|
||||
|
||||
#define DEVICE_FORMAT ma_format_f32
|
||||
#define DEVICE_CHANNELS 2
|
||||
#define DEVICE_SAMPLE_RATE 44100
|
||||
|
||||
#define MAX_AUDIO_BUFFER_POOL_CHANNELS 16
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Types and Structures Definition
|
||||
@@ -155,14 +163,74 @@ typedef enum {
|
||||
} TraceLogType;
|
||||
#endif
|
||||
|
||||
// NOTE: Different logic is used when feeding data to the playback device
|
||||
// depending on whether or not data is streamed (Music vs Sound)
|
||||
typedef enum {
|
||||
AUDIO_BUFFER_USAGE_STATIC = 0,
|
||||
AUDIO_BUFFER_USAGE_STREAM
|
||||
} AudioBufferUsage;
|
||||
|
||||
// Audio buffer structure
|
||||
struct rAudioBuffer {
|
||||
ma_pcm_converter dsp; // PCM data converter
|
||||
|
||||
float volume; // Audio buffer volume
|
||||
float pitch; // Audio buffer pitch
|
||||
|
||||
bool playing; // Audio buffer state: AUDIO_PLAYING
|
||||
bool paused; // Audio buffer state: AUDIO_PAUSED
|
||||
bool looping; // Audio buffer looping, always true for AudioStreams
|
||||
int usage; // Audio buffer usage mode: STATIC or STREAM
|
||||
|
||||
bool isSubBufferProcessed[2]; // SubBuffer processed (virtual double buffer)
|
||||
unsigned int sizeInFrames; // Total buffer size in frames
|
||||
unsigned int frameCursorPos; // Frame cursor position
|
||||
unsigned int totalFramesProcessed; // Total frames processed in this buffer (required for play timming)
|
||||
|
||||
unsigned char *data; // Data buffer, on music stream keeps filling
|
||||
|
||||
rAudioBuffer *next; // Next audio buffer on the list
|
||||
rAudioBuffer *prev; // Previous audio buffer on the list
|
||||
};
|
||||
|
||||
#define AudioBuffer rAudioBuffer // HACK: To avoid CoreAudio (macOS) symbol collision
|
||||
|
||||
// Audio data context
|
||||
typedef struct AudioData {
|
||||
struct {
|
||||
ma_context context; // miniaudio context data
|
||||
ma_device device; // miniaudio device
|
||||
ma_mutex lock; // miniaudio mutex lock
|
||||
bool isReady; // Check if audio device is ready
|
||||
float masterVolume; // Master volume (multiplied on output mixing)
|
||||
} System;
|
||||
struct {
|
||||
AudioBuffer *first; // Pointer to first AudioBuffer in the list
|
||||
AudioBuffer *last; // Pointer to last AudioBuffer in the list
|
||||
} Buffer;
|
||||
struct {
|
||||
AudioBuffer *pool[MAX_AUDIO_BUFFER_POOL_CHANNELS]; // Multichannel AudioBuffer pointers pool
|
||||
unsigned int poolCounter; // AudioBuffer pointers pool counter
|
||||
unsigned int channels[MAX_AUDIO_BUFFER_POOL_CHANNELS]; // AudioBuffer pool channels
|
||||
} MultiChannel;
|
||||
} AudioData;
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Global Variables Definition
|
||||
//----------------------------------------------------------------------------------
|
||||
// ...
|
||||
static AudioData AUDIO = { 0 }; // Global CORE context
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Module specific Functions Declaration
|
||||
//----------------------------------------------------------------------------------
|
||||
static void OnLog(ma_context *pContext, ma_device *pDevice, ma_uint32 logLevel, const char *message);
|
||||
static void OnSendAudioDataToDevice(ma_device *pDevice, void *pFramesOut, const void *pFramesInput, ma_uint32 frameCount);
|
||||
static ma_uint32 OnAudioBufferDSPRead(ma_pcm_converter *pDSP, void *pFramesOut, ma_uint32 frameCount, void *pUserData);
|
||||
static void MixAudioFrames(float *framesOut, const float *framesIn, ma_uint32 frameCount, float localVolume);
|
||||
|
||||
static void InitAudioBufferPool(void); // Initialise the multichannel buffer pool
|
||||
static void CloseAudioBufferPool(void); // Close the audio buffers pool
|
||||
|
||||
#if defined(SUPPORT_FILEFORMAT_WAV)
|
||||
static Wave LoadWAV(const char *fileName); // Load WAV file
|
||||
static int SaveWAV(Wave wave, const char *fileName); // Save wave data as WAV file
|
||||
@@ -178,73 +246,15 @@ static Wave LoadMP3(const char *fileName); // Load MP3 file
|
||||
#endif
|
||||
|
||||
#if defined(RAUDIO_STANDALONE)
|
||||
bool IsFileExtension(const char *fileName, const char *ext); // Check file extension
|
||||
void TraceLog(int msgType, const char *text, ...); // Show trace log messages (LOG_INFO, LOG_WARNING, LOG_ERROR, LOG_DEBUG)
|
||||
bool IsFileExtension(const char *fileName, const char *ext);// Check file extension
|
||||
void TraceLog(int msgType, const char *text, ...); // Show trace log messages (LOG_INFO, LOG_WARNING, LOG_ERROR, LOG_DEBUG)
|
||||
#endif
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// AudioBuffer Functionality
|
||||
//----------------------------------------------------------------------------------
|
||||
#define DEVICE_FORMAT ma_format_f32
|
||||
#define DEVICE_CHANNELS 2
|
||||
#define DEVICE_SAMPLE_RATE 44100
|
||||
|
||||
#define MAX_AUDIO_BUFFER_POOL_CHANNELS 16
|
||||
|
||||
typedef enum { AUDIO_BUFFER_USAGE_STATIC = 0, AUDIO_BUFFER_USAGE_STREAM } AudioBufferUsage;
|
||||
|
||||
// Audio buffer structure
|
||||
// NOTE: Slightly different logic is used when feeding data to the
|
||||
// playback device depending on whether or not data is streamed
|
||||
struct rAudioBuffer {
|
||||
ma_pcm_converter dsp; // PCM data converter
|
||||
|
||||
float volume; // Audio buffer volume
|
||||
float pitch; // Audio buffer pitch
|
||||
|
||||
bool playing; // Audio buffer state: AUDIO_PLAYING
|
||||
bool paused; // Audio buffer state: AUDIO_PAUSED
|
||||
bool looping; // Audio buffer looping, always true for AudioStreams
|
||||
int usage; // Audio buffer usage mode: STATIC or STREAM
|
||||
|
||||
bool isSubBufferProcessed[2]; // SubBuffer processed (virtual double buffer)
|
||||
unsigned int frameCursorPos; // Frame cursor position
|
||||
unsigned int bufferSizeInFrames; // Total buffer size in frames
|
||||
unsigned int totalFramesProcessed; // Total frames processed in this buffer (required for play timming)
|
||||
|
||||
unsigned char *buffer; // Data buffer, on music stream keeps filling
|
||||
|
||||
rAudioBuffer *next; // Next audio buffer on the list
|
||||
rAudioBuffer *prev; // Previous audio buffer on the list
|
||||
};
|
||||
|
||||
#define AudioBuffer rAudioBuffer // HACK: To avoid CoreAudio (macOS) symbol collision
|
||||
|
||||
// Audio buffers are tracked in a linked list
|
||||
static AudioBuffer *firstAudioBuffer = NULL; // Pointer to first AudioBuffer in the list
|
||||
static AudioBuffer *lastAudioBuffer = NULL; // Pointer to last AudioBuffer in the list
|
||||
|
||||
// miniaudio global variables
|
||||
static ma_context context; // miniaudio context data
|
||||
static ma_device device; // miniaudio device
|
||||
static ma_mutex audioLock; // miniaudio mutex lock
|
||||
static bool isAudioInitialized = false; // Check if audio device is initialized
|
||||
static float masterVolume = 1.0f; // Master volume (multiplied on output mixing)
|
||||
|
||||
// Multi channel playback global variables
|
||||
static AudioBuffer *audioBufferPool[MAX_AUDIO_BUFFER_POOL_CHANNELS] = { 0 }; // Multichannel AudioBuffer pointers pool
|
||||
static unsigned int audioBufferPoolCounter = 0; // AudioBuffer pointers pool counter
|
||||
static unsigned int audioBufferPoolChannels[MAX_AUDIO_BUFFER_POOL_CHANNELS] = { 0 }; // AudioBuffer pool channels
|
||||
|
||||
// miniaudio functions declaration
|
||||
static void OnLog(ma_context *pContext, ma_device *pDevice, ma_uint32 logLevel, const char *message);
|
||||
static void OnSendAudioDataToDevice(ma_device *pDevice, void *pFramesOut, const void *pFramesInput, ma_uint32 frameCount);
|
||||
static ma_uint32 OnAudioBufferDSPRead(ma_pcm_converter *pDSP, void *pFramesOut, ma_uint32 frameCount, void *pUserData);
|
||||
static void MixAudioFrames(float *framesOut, const float *framesIn, ma_uint32 frameCount, float localVolume);
|
||||
|
||||
// AudioBuffer management functions declaration
|
||||
// NOTE: Those functions are not exposed by raylib... for the moment
|
||||
AudioBuffer *InitAudioBuffer(ma_format format, ma_uint32 channels, ma_uint32 sampleRate, ma_uint32 bufferSizeInFrames, int usage);
|
||||
//----------------------------------------------------------------------------------
|
||||
AudioBuffer *InitAudioBuffer(ma_format format, ma_uint32 channels, ma_uint32 sampleRate, ma_uint32 sizeInFrames, int usage);
|
||||
void CloseAudioBuffer(AudioBuffer *buffer);
|
||||
bool IsAudioBufferPlaying(AudioBuffer *buffer);
|
||||
void PlayAudioBuffer(AudioBuffer *buffer);
|
||||
@@ -256,248 +266,20 @@ void SetAudioBufferPitch(AudioBuffer *buffer, float pitch);
|
||||
void TrackAudioBuffer(AudioBuffer *buffer);
|
||||
void UntrackAudioBuffer(AudioBuffer *buffer);
|
||||
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// miniaudio functions definitions
|
||||
//----------------------------------------------------------------------------------
|
||||
|
||||
// Log callback function
|
||||
static void OnLog(ma_context *pContext, ma_device *pDevice, ma_uint32 logLevel, const char *message)
|
||||
{
|
||||
(void)pContext;
|
||||
(void)pDevice;
|
||||
|
||||
TraceLog(LOG_ERROR, message); // All log messages from miniaudio are errors
|
||||
}
|
||||
|
||||
// Sending audio data to device callback function
|
||||
// NOTE: All the mixing takes place here
|
||||
static void OnSendAudioDataToDevice(ma_device *pDevice, void *pFramesOut, const void *pFramesInput, ma_uint32 frameCount)
|
||||
{
|
||||
(void)pDevice;
|
||||
|
||||
// Mixing is basically just an accumulation, we need to initialize the output buffer to 0
|
||||
memset(pFramesOut, 0, frameCount*pDevice->playback.channels*ma_get_bytes_per_sample(pDevice->playback.format));
|
||||
|
||||
// Using a mutex here for thread-safety which makes things not real-time
|
||||
// This is unlikely to be necessary for this project, but may want to consider how you might want to avoid this
|
||||
ma_mutex_lock(&audioLock);
|
||||
{
|
||||
for (AudioBuffer *audioBuffer = firstAudioBuffer; audioBuffer != NULL; audioBuffer = audioBuffer->next)
|
||||
{
|
||||
// Ignore stopped or paused sounds
|
||||
if (!audioBuffer->playing || audioBuffer->paused) continue;
|
||||
|
||||
ma_uint32 framesRead = 0;
|
||||
|
||||
while (1)
|
||||
{
|
||||
if (framesRead > frameCount)
|
||||
{
|
||||
TraceLog(LOG_DEBUG, "Mixed too many frames from audio buffer");
|
||||
break;
|
||||
}
|
||||
|
||||
if (framesRead == frameCount) break;
|
||||
|
||||
// Just read as much data as we can from the stream
|
||||
ma_uint32 framesToRead = (frameCount - framesRead);
|
||||
|
||||
while (framesToRead > 0)
|
||||
{
|
||||
float tempBuffer[1024]; // 512 frames for stereo
|
||||
|
||||
ma_uint32 framesToReadRightNow = framesToRead;
|
||||
if (framesToReadRightNow > sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS)
|
||||
{
|
||||
framesToReadRightNow = sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS;
|
||||
}
|
||||
|
||||
ma_uint32 framesJustRead = (ma_uint32)ma_pcm_converter_read(&audioBuffer->dsp, tempBuffer, framesToReadRightNow);
|
||||
if (framesJustRead > 0)
|
||||
{
|
||||
float *framesOut = (float *)pFramesOut + (framesRead*device.playback.channels);
|
||||
float *framesIn = tempBuffer;
|
||||
|
||||
MixAudioFrames(framesOut, framesIn, framesJustRead, audioBuffer->volume);
|
||||
|
||||
framesToRead -= framesJustRead;
|
||||
framesRead += framesJustRead;
|
||||
}
|
||||
|
||||
if (!audioBuffer->playing)
|
||||
{
|
||||
framesRead = frameCount;
|
||||
break;
|
||||
}
|
||||
|
||||
// If we weren't able to read all the frames we requested, break
|
||||
if (framesJustRead < framesToReadRightNow)
|
||||
{
|
||||
if (!audioBuffer->looping)
|
||||
{
|
||||
StopAudioBuffer(audioBuffer);
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Should never get here, but just for safety,
|
||||
// move the cursor position back to the start and continue the loop
|
||||
audioBuffer->frameCursorPos = 0;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If for some reason we weren't able to read every frame we'll need to break from the loop
|
||||
// Not doing this could theoretically put us into an infinite loop
|
||||
if (framesToRead > 0) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ma_mutex_unlock(&audioLock);
|
||||
}
|
||||
|
||||
// DSP read from audio buffer callback function
|
||||
static ma_uint32 OnAudioBufferDSPRead(ma_pcm_converter *pDSP, void *pFramesOut, ma_uint32 frameCount, void *pUserData)
|
||||
{
|
||||
AudioBuffer *audioBuffer = (AudioBuffer *)pUserData;
|
||||
|
||||
ma_uint32 subBufferSizeInFrames = (audioBuffer->bufferSizeInFrames > 1)? audioBuffer->bufferSizeInFrames/2 : audioBuffer->bufferSizeInFrames;
|
||||
ma_uint32 currentSubBufferIndex = audioBuffer->frameCursorPos/subBufferSizeInFrames;
|
||||
|
||||
if (currentSubBufferIndex > 1)
|
||||
{
|
||||
TraceLog(LOG_DEBUG, "Frame cursor position moved too far forward in audio stream");
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Another thread can update the processed state of buffers so
|
||||
// we just take a copy here to try and avoid potential synchronization problems
|
||||
bool isSubBufferProcessed[2];
|
||||
isSubBufferProcessed[0] = audioBuffer->isSubBufferProcessed[0];
|
||||
isSubBufferProcessed[1] = audioBuffer->isSubBufferProcessed[1];
|
||||
|
||||
ma_uint32 frameSizeInBytes = ma_get_bytes_per_sample(audioBuffer->dsp.formatConverterIn.config.formatIn)*audioBuffer->dsp.formatConverterIn.config.channels;
|
||||
|
||||
// Fill out every frame until we find a buffer that's marked as processed. Then fill the remainder with 0
|
||||
ma_uint32 framesRead = 0;
|
||||
while (1)
|
||||
{
|
||||
// We break from this loop differently depending on the buffer's usage
|
||||
// - For static buffers, we simply fill as much data as we can
|
||||
// - For streaming buffers we only fill the halves of the buffer that are processed
|
||||
// Unprocessed halves must keep their audio data in-tact
|
||||
if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC)
|
||||
{
|
||||
if (framesRead >= frameCount) break;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (isSubBufferProcessed[currentSubBufferIndex]) break;
|
||||
}
|
||||
|
||||
ma_uint32 totalFramesRemaining = (frameCount - framesRead);
|
||||
if (totalFramesRemaining == 0) break;
|
||||
|
||||
ma_uint32 framesRemainingInOutputBuffer;
|
||||
if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC)
|
||||
{
|
||||
framesRemainingInOutputBuffer = audioBuffer->bufferSizeInFrames - audioBuffer->frameCursorPos;
|
||||
}
|
||||
else
|
||||
{
|
||||
ma_uint32 firstFrameIndexOfThisSubBuffer = subBufferSizeInFrames*currentSubBufferIndex;
|
||||
framesRemainingInOutputBuffer = subBufferSizeInFrames - (audioBuffer->frameCursorPos - firstFrameIndexOfThisSubBuffer);
|
||||
}
|
||||
|
||||
ma_uint32 framesToRead = totalFramesRemaining;
|
||||
if (framesToRead > framesRemainingInOutputBuffer) framesToRead = framesRemainingInOutputBuffer;
|
||||
|
||||
memcpy((unsigned char *)pFramesOut + (framesRead*frameSizeInBytes), audioBuffer->buffer + (audioBuffer->frameCursorPos*frameSizeInBytes), framesToRead*frameSizeInBytes);
|
||||
audioBuffer->frameCursorPos = (audioBuffer->frameCursorPos + framesToRead)%audioBuffer->bufferSizeInFrames;
|
||||
framesRead += framesToRead;
|
||||
|
||||
// If we've read to the end of the buffer, mark it as processed
|
||||
if (framesToRead == framesRemainingInOutputBuffer)
|
||||
{
|
||||
audioBuffer->isSubBufferProcessed[currentSubBufferIndex] = true;
|
||||
isSubBufferProcessed[currentSubBufferIndex] = true;
|
||||
|
||||
currentSubBufferIndex = (currentSubBufferIndex + 1)%2;
|
||||
|
||||
// We need to break from this loop if we're not looping
|
||||
if (!audioBuffer->looping)
|
||||
{
|
||||
StopAudioBuffer(audioBuffer);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Zero-fill excess
|
||||
ma_uint32 totalFramesRemaining = (frameCount - framesRead);
|
||||
if (totalFramesRemaining > 0)
|
||||
{
|
||||
memset((unsigned char *)pFramesOut + (framesRead*frameSizeInBytes), 0, totalFramesRemaining*frameSizeInBytes);
|
||||
|
||||
// For static buffers we can fill the remaining frames with silence for safety, but we don't want
|
||||
// to report those frames as "read". The reason for this is that the caller uses the return value
|
||||
// to know whether or not a non-looping sound has finished playback.
|
||||
if (audioBuffer->usage != AUDIO_BUFFER_USAGE_STATIC) framesRead += totalFramesRemaining;
|
||||
}
|
||||
|
||||
return framesRead;
|
||||
}
|
||||
|
||||
// This is the main mixing function. Mixing is pretty simple in this project - it's just an accumulation.
|
||||
// NOTE: framesOut is both an input and an output. It will be initially filled with zeros outside of this function.
|
||||
static void MixAudioFrames(float *framesOut, const float *framesIn, ma_uint32 frameCount, float localVolume)
|
||||
{
|
||||
for (ma_uint32 iFrame = 0; iFrame < frameCount; ++iFrame)
|
||||
{
|
||||
for (ma_uint32 iChannel = 0; iChannel < device.playback.channels; ++iChannel)
|
||||
{
|
||||
float *frameOut = framesOut + (iFrame*device.playback.channels);
|
||||
const float *frameIn = framesIn + (iFrame*device.playback.channels);
|
||||
|
||||
frameOut[iChannel] += (frameIn[iChannel]*masterVolume*localVolume);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialise the multichannel buffer pool
|
||||
static void InitAudioBufferPool()
|
||||
{
|
||||
// Dummy buffers
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++)
|
||||
{
|
||||
audioBufferPool[i] = InitAudioBuffer(DEVICE_FORMAT, DEVICE_CHANNELS, DEVICE_SAMPLE_RATE, 0, AUDIO_BUFFER_USAGE_STATIC);
|
||||
}
|
||||
}
|
||||
|
||||
// Close the audio buffers pool
|
||||
static void CloseAudioBufferPool()
|
||||
{
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++)
|
||||
{
|
||||
RL_FREE(audioBufferPool[i]->buffer);
|
||||
RL_FREE(audioBufferPool[i]);
|
||||
}
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Module Functions Definition - Audio Device initialization and Closing
|
||||
//----------------------------------------------------------------------------------
|
||||
// Initialize audio device
|
||||
void InitAudioDevice(void)
|
||||
{
|
||||
// TODO: Load AUDIO context memory dynamically?
|
||||
AUDIO.System.masterVolume = 1.0f;
|
||||
|
||||
// Init audio context
|
||||
ma_context_config contextConfig = ma_context_config_init();
|
||||
contextConfig.logCallback = OnLog;
|
||||
ma_context_config ctxConfig = ma_context_config_init();
|
||||
ctxConfig.logCallback = OnLog;
|
||||
|
||||
ma_result result = ma_context_init(NULL, 0, &contextConfig, &context);
|
||||
ma_result result = ma_context_init(NULL, 0, &ctxConfig, &AUDIO.System.context);
|
||||
if (result != MA_SUCCESS)
|
||||
{
|
||||
TraceLog(LOG_ERROR, "Failed to initialize audio context");
|
||||
@@ -507,78 +289,78 @@ void InitAudioDevice(void)
|
||||
// Init audio device
|
||||
// NOTE: Using the default device. Format is floating point because it simplifies mixing.
|
||||
ma_device_config config = ma_device_config_init(ma_device_type_playback);
|
||||
config.playback.pDeviceID = NULL; // NULL for the default playback device.
|
||||
config.playback.pDeviceID = NULL; // NULL for the default playback AUDIO.System.device.
|
||||
config.playback.format = DEVICE_FORMAT;
|
||||
config.playback.channels = DEVICE_CHANNELS;
|
||||
config.capture.pDeviceID = NULL; // NULL for the default capture device.
|
||||
config.capture.pDeviceID = NULL; // NULL for the default capture AUDIO.System.device.
|
||||
config.capture.format = ma_format_s16;
|
||||
config.capture.channels = 1;
|
||||
config.sampleRate = DEVICE_SAMPLE_RATE;
|
||||
config.dataCallback = OnSendAudioDataToDevice;
|
||||
config.pUserData = NULL;
|
||||
|
||||
result = ma_device_init(&context, &config, &device);
|
||||
result = ma_device_init(&AUDIO.System.context, &config, &AUDIO.System.device);
|
||||
if (result != MA_SUCCESS)
|
||||
{
|
||||
TraceLog(LOG_ERROR, "Failed to initialize audio playback device");
|
||||
ma_context_uninit(&context);
|
||||
TraceLog(LOG_ERROR, "Failed to initialize audio playback AUDIO.System.device");
|
||||
ma_context_uninit(&AUDIO.System.context);
|
||||
return;
|
||||
}
|
||||
|
||||
// Keep the device running the whole time. May want to consider doing something a bit smarter and only have the device running
|
||||
// while there's at least one sound being played.
|
||||
result = ma_device_start(&device);
|
||||
result = ma_device_start(&AUDIO.System.device);
|
||||
if (result != MA_SUCCESS)
|
||||
{
|
||||
TraceLog(LOG_ERROR, "Failed to start audio playback device");
|
||||
ma_device_uninit(&device);
|
||||
ma_context_uninit(&context);
|
||||
TraceLog(LOG_ERROR, "Failed to start audio playback AUDIO.System.device");
|
||||
ma_device_uninit(&AUDIO.System.device);
|
||||
ma_context_uninit(&AUDIO.System.context);
|
||||
return;
|
||||
}
|
||||
|
||||
// Mixing happens on a seperate thread which means we need to synchronize. I'm using a mutex here to make things simple, but may
|
||||
// want to look at something a bit smarter later on to keep everything real-time, if that's necessary.
|
||||
if (ma_mutex_init(&context, &audioLock) != MA_SUCCESS)
|
||||
if (ma_mutex_init(&AUDIO.System.context, &AUDIO.System.lock) != MA_SUCCESS)
|
||||
{
|
||||
TraceLog(LOG_ERROR, "Failed to create mutex for audio mixing");
|
||||
ma_device_uninit(&device);
|
||||
ma_context_uninit(&context);
|
||||
ma_device_uninit(&AUDIO.System.device);
|
||||
ma_context_uninit(&AUDIO.System.context);
|
||||
return;
|
||||
}
|
||||
|
||||
TraceLog(LOG_INFO, "Audio device initialized successfully");
|
||||
TraceLog(LOG_INFO, "Audio backend: miniaudio / %s", ma_get_backend_name(context.backend));
|
||||
TraceLog(LOG_INFO, "Audio format: %s -> %s", ma_get_format_name(device.playback.format), ma_get_format_name(device.playback.internalFormat));
|
||||
TraceLog(LOG_INFO, "Audio channels: %d -> %d", device.playback.channels, device.playback.internalChannels);
|
||||
TraceLog(LOG_INFO, "Audio sample rate: %d -> %d", device.sampleRate, device.playback.internalSampleRate);
|
||||
TraceLog(LOG_INFO, "Audio buffer size: %d", device.playback.internalBufferSizeInFrames);
|
||||
TraceLog(LOG_INFO, "Audio backend: miniaudio / %s", ma_get_backend_name(AUDIO.System.context.backend));
|
||||
TraceLog(LOG_INFO, "Audio format: %s -> %s", ma_get_format_name(AUDIO.System.device.playback.format), ma_get_format_name(AUDIO.System.device.playback.internalFormat));
|
||||
TraceLog(LOG_INFO, "Audio channels: %d -> %d", AUDIO.System.device.playback.channels, AUDIO.System.device.playback.internalChannels);
|
||||
TraceLog(LOG_INFO, "Audio sample rate: %d -> %d", AUDIO.System.device.sampleRate, AUDIO.System.device.playback.internalSampleRate);
|
||||
TraceLog(LOG_INFO, "Audio buffer size: %d", AUDIO.System.device.playback.internalBufferSizeInFrames);
|
||||
|
||||
InitAudioBufferPool();
|
||||
TraceLog(LOG_INFO, "Audio multichannel pool size: %i", MAX_AUDIO_BUFFER_POOL_CHANNELS);
|
||||
|
||||
isAudioInitialized = true;
|
||||
AUDIO.System.isReady = true;
|
||||
}
|
||||
|
||||
// Close the audio device for all contexts
|
||||
void CloseAudioDevice(void)
|
||||
{
|
||||
if (isAudioInitialized)
|
||||
if (AUDIO.System.isReady)
|
||||
{
|
||||
ma_mutex_uninit(&audioLock);
|
||||
ma_device_uninit(&device);
|
||||
ma_context_uninit(&context);
|
||||
ma_mutex_uninit(&AUDIO.System.lock);
|
||||
ma_device_uninit(&AUDIO.System.device);
|
||||
ma_context_uninit(&AUDIO.System.context);
|
||||
|
||||
CloseAudioBufferPool();
|
||||
|
||||
TraceLog(LOG_INFO, "Audio device closed successfully");
|
||||
TraceLog(LOG_INFO, "Audio AUDIO.System.device closed successfully");
|
||||
}
|
||||
else TraceLog(LOG_WARNING, "Could not close audio device because it is not currently initialized");
|
||||
else TraceLog(LOG_WARNING, "Could not close audio AUDIO.System.device because it is not currently initialized");
|
||||
}
|
||||
|
||||
// Check if device has been initialized successfully
|
||||
bool IsAudioDeviceReady(void)
|
||||
{
|
||||
return isAudioInitialized;
|
||||
return AUDIO.System.isReady;
|
||||
}
|
||||
|
||||
// Set master volume (listener)
|
||||
@@ -587,7 +369,7 @@ void SetMasterVolume(float volume)
|
||||
if (volume < 0.0f) volume = 0.0f;
|
||||
else if (volume > 1.0f) volume = 1.0f;
|
||||
|
||||
masterVolume = volume;
|
||||
AUDIO.System.masterVolume = volume;
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
@@ -595,7 +377,7 @@ void SetMasterVolume(float volume)
|
||||
//----------------------------------------------------------------------------------
|
||||
|
||||
// Initialize a new audio buffer (filled with silence)
|
||||
AudioBuffer *InitAudioBuffer(ma_format format, ma_uint32 channels, ma_uint32 sampleRate, ma_uint32 bufferSizeInFrames, int usage)
|
||||
AudioBuffer *InitAudioBuffer(ma_format format, ma_uint32 channels, ma_uint32 sampleRate, ma_uint32 sizeInFrames, int usage)
|
||||
{
|
||||
AudioBuffer *audioBuffer = (AudioBuffer *)RL_CALLOC(1, sizeof(AudioBuffer));
|
||||
|
||||
@@ -605,7 +387,7 @@ AudioBuffer *InitAudioBuffer(ma_format format, ma_uint32 channels, ma_uint32 sam
|
||||
return NULL;
|
||||
}
|
||||
|
||||
audioBuffer->buffer = RL_CALLOC(bufferSizeInFrames*channels*ma_get_bytes_per_sample(format), 1);
|
||||
audioBuffer->data = RL_CALLOC(sizeInFrames*channels*ma_get_bytes_per_sample(format), 1);
|
||||
|
||||
// Audio data runs through a format converter
|
||||
ma_pcm_converter_config dspConfig;
|
||||
@@ -637,7 +419,7 @@ AudioBuffer *InitAudioBuffer(ma_format format, ma_uint32 channels, ma_uint32 sam
|
||||
audioBuffer->looping = false;
|
||||
audioBuffer->usage = usage;
|
||||
audioBuffer->frameCursorPos = 0;
|
||||
audioBuffer->bufferSizeInFrames = bufferSizeInFrames;
|
||||
audioBuffer->sizeInFrames = sizeInFrames;
|
||||
|
||||
// Buffers should be marked as processed by default so that a call to
|
||||
// UpdateAudioStream() immediately after initialization works correctly
|
||||
@@ -656,7 +438,7 @@ void CloseAudioBuffer(AudioBuffer *buffer)
|
||||
if (buffer != NULL)
|
||||
{
|
||||
UntrackAudioBuffer(buffer);
|
||||
RL_FREE(buffer->buffer);
|
||||
RL_FREE(buffer->data);
|
||||
RL_FREE(buffer);
|
||||
}
|
||||
else TraceLog(LOG_ERROR, "CloseAudioBuffer() : No audio buffer");
|
||||
@@ -748,35 +530,35 @@ void SetAudioBufferPitch(AudioBuffer *buffer, float pitch)
|
||||
// Track audio buffer to linked list next position
|
||||
void TrackAudioBuffer(AudioBuffer *buffer)
|
||||
{
|
||||
ma_mutex_lock(&audioLock);
|
||||
ma_mutex_lock(&AUDIO.System.lock);
|
||||
{
|
||||
if (firstAudioBuffer == NULL) firstAudioBuffer = buffer;
|
||||
if (AUDIO.Buffer.first == NULL) AUDIO.Buffer.first = buffer;
|
||||
else
|
||||
{
|
||||
lastAudioBuffer->next = buffer;
|
||||
buffer->prev = lastAudioBuffer;
|
||||
AUDIO.Buffer.last->next = buffer;
|
||||
buffer->prev = AUDIO.Buffer.last;
|
||||
}
|
||||
|
||||
lastAudioBuffer = buffer;
|
||||
AUDIO.Buffer.last = buffer;
|
||||
}
|
||||
ma_mutex_unlock(&audioLock);
|
||||
ma_mutex_unlock(&AUDIO.System.lock);
|
||||
}
|
||||
|
||||
// Untrack audio buffer from linked list
|
||||
void UntrackAudioBuffer(AudioBuffer *buffer)
|
||||
{
|
||||
ma_mutex_lock(&audioLock);
|
||||
ma_mutex_lock(&AUDIO.System.lock);
|
||||
{
|
||||
if (buffer->prev == NULL) firstAudioBuffer = buffer->next;
|
||||
if (buffer->prev == NULL) AUDIO.Buffer.first = buffer->next;
|
||||
else buffer->prev->next = buffer->next;
|
||||
|
||||
if (buffer->next == NULL) lastAudioBuffer = buffer->prev;
|
||||
if (buffer->next == NULL) AUDIO.Buffer.last = buffer->prev;
|
||||
else buffer->next->prev = buffer->prev;
|
||||
|
||||
buffer->prev = NULL;
|
||||
buffer->next = NULL;
|
||||
}
|
||||
ma_mutex_unlock(&audioLock);
|
||||
ma_mutex_unlock(&AUDIO.System.lock);
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
@@ -829,7 +611,7 @@ Sound LoadSoundFromWave(Wave wave)
|
||||
{
|
||||
// When using miniaudio we need to do our own mixing.
|
||||
// To simplify this we need convert the format of each sound to be consistent with
|
||||
// the format used to open the playback device. We can do this two ways:
|
||||
// the format used to open the playback AUDIO.System.device. We can do this two ways:
|
||||
//
|
||||
// 1) Convert the whole sound in one go at load time (here).
|
||||
// 2) Convert the audio data in chunks at mixing time.
|
||||
@@ -845,7 +627,7 @@ Sound LoadSoundFromWave(Wave wave)
|
||||
AudioBuffer *audioBuffer = InitAudioBuffer(DEVICE_FORMAT, DEVICE_CHANNELS, DEVICE_SAMPLE_RATE, frameCount, AUDIO_BUFFER_USAGE_STATIC);
|
||||
if (audioBuffer == NULL) TraceLog(LOG_WARNING, "LoadSoundFromWave() : Failed to create audio buffer");
|
||||
|
||||
frameCount = (ma_uint32)ma_convert_frames(audioBuffer->buffer, audioBuffer->dsp.formatConverterIn.config.formatIn, audioBuffer->dsp.formatConverterIn.config.channels, audioBuffer->dsp.src.config.sampleRateIn, wave.data, formatIn, wave.channels, wave.sampleRate, frameCountIn);
|
||||
frameCount = (ma_uint32)ma_convert_frames(audioBuffer->data, audioBuffer->dsp.formatConverterIn.config.formatIn, audioBuffer->dsp.formatConverterIn.config.channels, audioBuffer->dsp.src.config.sampleRateIn, wave.data, formatIn, wave.channels, wave.sampleRate, frameCountIn);
|
||||
if (frameCount == 0) TraceLog(LOG_WARNING, "LoadSoundFromWave() : Format conversion failed");
|
||||
|
||||
sound.sampleCount = frameCount*DEVICE_CHANNELS;
|
||||
@@ -884,7 +666,7 @@ void UpdateSound(Sound sound, const void *data, int samplesCount)
|
||||
StopAudioBuffer(audioBuffer);
|
||||
|
||||
// TODO: May want to lock/unlock this since this data buffer is read at mixing time
|
||||
memcpy(audioBuffer->buffer, data, samplesCount*audioBuffer->dsp.formatConverterIn.config.channels*ma_get_bytes_per_sample(audioBuffer->dsp.formatConverterIn.config.formatIn));
|
||||
memcpy(audioBuffer->data, data, samplesCount*audioBuffer->dsp.formatConverterIn.config.channels*ma_get_bytes_per_sample(audioBuffer->dsp.formatConverterIn.config.formatIn));
|
||||
}
|
||||
else TraceLog(LOG_ERROR, "UpdateSound() : Invalid sound - no audio buffer");
|
||||
}
|
||||
@@ -973,13 +755,13 @@ void PlaySoundMulti(Sound sound)
|
||||
// find the first non playing pool entry
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++)
|
||||
{
|
||||
if (audioBufferPoolChannels[i] > oldAge)
|
||||
if (AUDIO.MultiChannel.channels[i] > oldAge)
|
||||
{
|
||||
oldAge = audioBufferPoolChannels[i];
|
||||
oldAge = AUDIO.MultiChannel.channels[i];
|
||||
oldIndex = i;
|
||||
}
|
||||
|
||||
if (!IsAudioBufferPlaying(audioBufferPool[i]))
|
||||
if (!IsAudioBufferPlaying(AUDIO.MultiChannel.pool[i]))
|
||||
{
|
||||
index = i;
|
||||
break;
|
||||
@@ -989,7 +771,7 @@ void PlaySoundMulti(Sound sound)
|
||||
// If no none playing pool members can be index choose the oldest
|
||||
if (index == -1)
|
||||
{
|
||||
TraceLog(LOG_WARNING,"pool age %i ended a sound early no room in buffer pool", audioBufferPoolCounter);
|
||||
TraceLog(LOG_WARNING,"pool age %i ended a sound early no room in buffer pool", AUDIO.MultiChannel.poolCounter);
|
||||
|
||||
if (oldIndex == -1)
|
||||
{
|
||||
@@ -1002,32 +784,32 @@ void PlaySoundMulti(Sound sound)
|
||||
index = oldIndex;
|
||||
|
||||
// Just in case...
|
||||
StopAudioBuffer(audioBufferPool[index]);
|
||||
StopAudioBuffer(AUDIO.MultiChannel.pool[index]);
|
||||
}
|
||||
|
||||
// Experimentally mutex lock doesn't seem to be needed this makes sense
|
||||
// as audioBufferPool[index] isn't playing and the only stuff we're copying
|
||||
// as AUDIO.MultiChannel.pool[index] isn't playing and the only stuff we're copying
|
||||
// shouldn't be changing...
|
||||
|
||||
audioBufferPoolChannels[index] = audioBufferPoolCounter;
|
||||
audioBufferPoolCounter++;
|
||||
AUDIO.MultiChannel.channels[index] = AUDIO.MultiChannel.poolCounter;
|
||||
AUDIO.MultiChannel.poolCounter++;
|
||||
|
||||
audioBufferPool[index]->volume = sound.stream.buffer->volume;
|
||||
audioBufferPool[index]->pitch = sound.stream.buffer->pitch;
|
||||
audioBufferPool[index]->looping = sound.stream.buffer->looping;
|
||||
audioBufferPool[index]->usage = sound.stream.buffer->usage;
|
||||
audioBufferPool[index]->isSubBufferProcessed[0] = false;
|
||||
audioBufferPool[index]->isSubBufferProcessed[1] = false;
|
||||
audioBufferPool[index]->bufferSizeInFrames = sound.stream.buffer->bufferSizeInFrames;
|
||||
audioBufferPool[index]->buffer = sound.stream.buffer->buffer;
|
||||
AUDIO.MultiChannel.pool[index]->volume = sound.stream.buffer->volume;
|
||||
AUDIO.MultiChannel.pool[index]->pitch = sound.stream.buffer->pitch;
|
||||
AUDIO.MultiChannel.pool[index]->looping = sound.stream.buffer->looping;
|
||||
AUDIO.MultiChannel.pool[index]->usage = sound.stream.buffer->usage;
|
||||
AUDIO.MultiChannel.pool[index]->isSubBufferProcessed[0] = false;
|
||||
AUDIO.MultiChannel.pool[index]->isSubBufferProcessed[1] = false;
|
||||
AUDIO.MultiChannel.pool[index]->sizeInFrames = sound.stream.buffer->sizeInFrames;
|
||||
AUDIO.MultiChannel.pool[index]->data = sound.stream.buffer->data;
|
||||
|
||||
PlayAudioBuffer(audioBufferPool[index]);
|
||||
PlayAudioBuffer(AUDIO.MultiChannel.pool[index]);
|
||||
}
|
||||
|
||||
// Stop any sound played with PlaySoundMulti()
|
||||
void StopSoundMulti(void)
|
||||
{
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++) StopAudioBuffer(audioBufferPool[i]);
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++) StopAudioBuffer(AUDIO.MultiChannel.pool[i]);
|
||||
}
|
||||
|
||||
// Get number of sounds playing in the multichannel buffer pool
|
||||
@@ -1037,7 +819,7 @@ int GetSoundsPlaying(void)
|
||||
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++)
|
||||
{
|
||||
if (IsAudioBufferPlaying(audioBufferPool[i])) counter++;
|
||||
if (IsAudioBufferPlaying(AUDIO.MultiChannel.pool[i])) counter++;
|
||||
}
|
||||
|
||||
return counter;
|
||||
@@ -1243,7 +1025,7 @@ Music LoadMusicStream(const char *fileName)
|
||||
|
||||
int result = jar_xm_create_context_from_file(&ctxXm, 48000, fileName);
|
||||
|
||||
if (result == 0) // XM context created successfully
|
||||
if (result == 0) // XM AUDIO.System.context created successfully
|
||||
{
|
||||
music.ctxType = MUSIC_MODULE_XM;
|
||||
jar_xm_set_max_loop_count(ctxXm, 0); // Set infinite number of loops
|
||||
@@ -1374,7 +1156,6 @@ void StopMusicStream(Music music)
|
||||
{
|
||||
StopAudioStream(music.stream);
|
||||
|
||||
// Restart music context
|
||||
switch (music.ctxType)
|
||||
{
|
||||
#if defined(SUPPORT_FILEFORMAT_OGG)
|
||||
@@ -1401,7 +1182,7 @@ void UpdateMusicStream(Music music)
|
||||
{
|
||||
bool streamEnding = false;
|
||||
|
||||
unsigned int subBufferSizeInFrames = music.stream.buffer->bufferSizeInFrames/2;
|
||||
unsigned int subBufferSizeInFrames = music.stream.buffer->sizeInFrames/2;
|
||||
|
||||
// NOTE: Using dynamic allocation because it could require more than 16KB
|
||||
void *pcm = RL_CALLOC(subBufferSizeInFrames*music.stream.channels*music.stream.sampleSize/8, 1);
|
||||
@@ -1559,7 +1340,7 @@ AudioStream InitAudioStream(unsigned int sampleRate, unsigned int sampleSize, un
|
||||
ma_format formatIn = ((stream.sampleSize == 8)? ma_format_u8 : ((stream.sampleSize == 16)? ma_format_s16 : ma_format_f32));
|
||||
|
||||
// The size of a streaming buffer must be at least double the size of a period
|
||||
unsigned int periodSize = device.playback.internalBufferSizeInFrames/device.playback.internalPeriods;
|
||||
unsigned int periodSize = AUDIO.System.device.playback.internalBufferSizeInFrames/AUDIO.System.device.playback.internalPeriods;
|
||||
unsigned int subBufferSize = AUDIO_BUFFER_SIZE;
|
||||
|
||||
if (subBufferSize < periodSize) subBufferSize = periodSize;
|
||||
@@ -1610,8 +1391,8 @@ void UpdateAudioStream(AudioStream stream, const void *data, int samplesCount)
|
||||
subBufferToUpdate = (audioBuffer->isSubBufferProcessed[0])? 0 : 1;
|
||||
}
|
||||
|
||||
ma_uint32 subBufferSizeInFrames = audioBuffer->bufferSizeInFrames/2;
|
||||
unsigned char *subBuffer = audioBuffer->buffer + ((subBufferSizeInFrames*stream.channels*(stream.sampleSize/8))*subBufferToUpdate);
|
||||
ma_uint32 subBufferSizeInFrames = audioBuffer->sizeInFrames/2;
|
||||
unsigned char *subBuffer = audioBuffer->data + ((subBufferSizeInFrames*stream.channels*(stream.sampleSize/8))*subBufferToUpdate);
|
||||
|
||||
// TODO: Get total frames processed on this buffer... DOES NOT WORK.
|
||||
audioBuffer->totalFramesProcessed += subBufferSizeInFrames;
|
||||
@@ -1697,6 +1478,232 @@ void SetAudioStreamPitch(AudioStream stream, float pitch)
|
||||
// Module specific Functions Definition
|
||||
//----------------------------------------------------------------------------------
|
||||
|
||||
// Log callback function
|
||||
static void OnLog(ma_context *pContext, ma_device *pDevice, ma_uint32 logLevel, const char *message)
|
||||
{
|
||||
(void)pContext;
|
||||
(void)pDevice;
|
||||
|
||||
TraceLog(LOG_ERROR, message); // All log messages from miniaudio are errors
|
||||
}
|
||||
|
||||
// Sending audio data to device callback function
|
||||
// NOTE: All the mixing takes place here
|
||||
static void OnSendAudioDataToDevice(ma_device *pDevice, void *pFramesOut, const void *pFramesInput, ma_uint32 frameCount)
|
||||
{
|
||||
(void)pDevice;
|
||||
|
||||
// Mixing is basically just an accumulation, we need to initialize the output buffer to 0
|
||||
memset(pFramesOut, 0, frameCount*pDevice->playback.channels*ma_get_bytes_per_sample(pDevice->playback.format));
|
||||
|
||||
// Using a mutex here for thread-safety which makes things not real-time
|
||||
// This is unlikely to be necessary for this project, but may want to consider how you might want to avoid this
|
||||
ma_mutex_lock(&AUDIO.System.lock);
|
||||
{
|
||||
for (AudioBuffer *audioBuffer = AUDIO.Buffer.first; audioBuffer != NULL; audioBuffer = audioBuffer->next)
|
||||
{
|
||||
// Ignore stopped or paused sounds
|
||||
if (!audioBuffer->playing || audioBuffer->paused) continue;
|
||||
|
||||
ma_uint32 framesRead = 0;
|
||||
|
||||
while (1)
|
||||
{
|
||||
if (framesRead > frameCount)
|
||||
{
|
||||
TraceLog(LOG_DEBUG, "Mixed too many frames from audio buffer");
|
||||
break;
|
||||
}
|
||||
|
||||
if (framesRead == frameCount) break;
|
||||
|
||||
// Just read as much data as we can from the stream
|
||||
ma_uint32 framesToRead = (frameCount - framesRead);
|
||||
|
||||
while (framesToRead > 0)
|
||||
{
|
||||
float tempBuffer[1024]; // 512 frames for stereo
|
||||
|
||||
ma_uint32 framesToReadRightNow = framesToRead;
|
||||
if (framesToReadRightNow > sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS)
|
||||
{
|
||||
framesToReadRightNow = sizeof(tempBuffer)/sizeof(tempBuffer[0])/DEVICE_CHANNELS;
|
||||
}
|
||||
|
||||
ma_uint32 framesJustRead = (ma_uint32)ma_pcm_converter_read(&audioBuffer->dsp, tempBuffer, framesToReadRightNow);
|
||||
if (framesJustRead > 0)
|
||||
{
|
||||
float *framesOut = (float *)pFramesOut + (framesRead*AUDIO.System.device.playback.channels);
|
||||
float *framesIn = tempBuffer;
|
||||
|
||||
MixAudioFrames(framesOut, framesIn, framesJustRead, audioBuffer->volume);
|
||||
|
||||
framesToRead -= framesJustRead;
|
||||
framesRead += framesJustRead;
|
||||
}
|
||||
|
||||
if (!audioBuffer->playing)
|
||||
{
|
||||
framesRead = frameCount;
|
||||
break;
|
||||
}
|
||||
|
||||
// If we weren't able to read all the frames we requested, break
|
||||
if (framesJustRead < framesToReadRightNow)
|
||||
{
|
||||
if (!audioBuffer->looping)
|
||||
{
|
||||
StopAudioBuffer(audioBuffer);
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Should never get here, but just for safety,
|
||||
// move the cursor position back to the start and continue the loop
|
||||
audioBuffer->frameCursorPos = 0;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If for some reason we weren't able to read every frame we'll need to break from the loop
|
||||
// Not doing this could theoretically put us into an infinite loop
|
||||
if (framesToRead > 0) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ma_mutex_unlock(&AUDIO.System.lock);
|
||||
}
|
||||
|
||||
// DSP read from audio buffer callback function
|
||||
static ma_uint32 OnAudioBufferDSPRead(ma_pcm_converter *pDSP, void *pFramesOut, ma_uint32 frameCount, void *pUserData)
|
||||
{
|
||||
AudioBuffer *audioBuffer = (AudioBuffer *)pUserData;
|
||||
|
||||
ma_uint32 subBufferSizeInFrames = (audioBuffer->sizeInFrames > 1)? audioBuffer->sizeInFrames/2 : audioBuffer->sizeInFrames;
|
||||
ma_uint32 currentSubBufferIndex = audioBuffer->frameCursorPos/subBufferSizeInFrames;
|
||||
|
||||
if (currentSubBufferIndex > 1)
|
||||
{
|
||||
TraceLog(LOG_DEBUG, "Frame cursor position moved too far forward in audio stream");
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Another thread can update the processed state of buffers so
|
||||
// we just take a copy here to try and avoid potential synchronization problems
|
||||
bool isSubBufferProcessed[2];
|
||||
isSubBufferProcessed[0] = audioBuffer->isSubBufferProcessed[0];
|
||||
isSubBufferProcessed[1] = audioBuffer->isSubBufferProcessed[1];
|
||||
|
||||
ma_uint32 frameSizeInBytes = ma_get_bytes_per_sample(audioBuffer->dsp.formatConverterIn.config.formatIn)*audioBuffer->dsp.formatConverterIn.config.channels;
|
||||
|
||||
// Fill out every frame until we find a buffer that's marked as processed. Then fill the remainder with 0
|
||||
ma_uint32 framesRead = 0;
|
||||
while (1)
|
||||
{
|
||||
// We break from this loop differently depending on the buffer's usage
|
||||
// - For static buffers, we simply fill as much data as we can
|
||||
// - For streaming buffers we only fill the halves of the buffer that are processed
|
||||
// Unprocessed halves must keep their audio data in-tact
|
||||
if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC)
|
||||
{
|
||||
if (framesRead >= frameCount) break;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (isSubBufferProcessed[currentSubBufferIndex]) break;
|
||||
}
|
||||
|
||||
ma_uint32 totalFramesRemaining = (frameCount - framesRead);
|
||||
if (totalFramesRemaining == 0) break;
|
||||
|
||||
ma_uint32 framesRemainingInOutputBuffer;
|
||||
if (audioBuffer->usage == AUDIO_BUFFER_USAGE_STATIC)
|
||||
{
|
||||
framesRemainingInOutputBuffer = audioBuffer->sizeInFrames - audioBuffer->frameCursorPos;
|
||||
}
|
||||
else
|
||||
{
|
||||
ma_uint32 firstFrameIndexOfThisSubBuffer = subBufferSizeInFrames*currentSubBufferIndex;
|
||||
framesRemainingInOutputBuffer = subBufferSizeInFrames - (audioBuffer->frameCursorPos - firstFrameIndexOfThisSubBuffer);
|
||||
}
|
||||
|
||||
ma_uint32 framesToRead = totalFramesRemaining;
|
||||
if (framesToRead > framesRemainingInOutputBuffer) framesToRead = framesRemainingInOutputBuffer;
|
||||
|
||||
memcpy((unsigned char *)pFramesOut + (framesRead*frameSizeInBytes), audioBuffer->data + (audioBuffer->frameCursorPos*frameSizeInBytes), framesToRead*frameSizeInBytes);
|
||||
audioBuffer->frameCursorPos = (audioBuffer->frameCursorPos + framesToRead)%audioBuffer->sizeInFrames;
|
||||
framesRead += framesToRead;
|
||||
|
||||
// If we've read to the end of the buffer, mark it as processed
|
||||
if (framesToRead == framesRemainingInOutputBuffer)
|
||||
{
|
||||
audioBuffer->isSubBufferProcessed[currentSubBufferIndex] = true;
|
||||
isSubBufferProcessed[currentSubBufferIndex] = true;
|
||||
|
||||
currentSubBufferIndex = (currentSubBufferIndex + 1)%2;
|
||||
|
||||
// We need to break from this loop if we're not looping
|
||||
if (!audioBuffer->looping)
|
||||
{
|
||||
StopAudioBuffer(audioBuffer);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Zero-fill excess
|
||||
ma_uint32 totalFramesRemaining = (frameCount - framesRead);
|
||||
if (totalFramesRemaining > 0)
|
||||
{
|
||||
memset((unsigned char *)pFramesOut + (framesRead*frameSizeInBytes), 0, totalFramesRemaining*frameSizeInBytes);
|
||||
|
||||
// For static buffers we can fill the remaining frames with silence for safety, but we don't want
|
||||
// to report those frames as "read". The reason for this is that the caller uses the return value
|
||||
// to know whether or not a non-looping sound has finished playback.
|
||||
if (audioBuffer->usage != AUDIO_BUFFER_USAGE_STATIC) framesRead += totalFramesRemaining;
|
||||
}
|
||||
|
||||
return framesRead;
|
||||
}
|
||||
|
||||
// This is the main mixing function. Mixing is pretty simple in this project - it's just an accumulation.
|
||||
// NOTE: framesOut is both an input and an output. It will be initially filled with zeros outside of this function.
|
||||
static void MixAudioFrames(float *framesOut, const float *framesIn, ma_uint32 frameCount, float localVolume)
|
||||
{
|
||||
for (ma_uint32 iFrame = 0; iFrame < frameCount; ++iFrame)
|
||||
{
|
||||
for (ma_uint32 iChannel = 0; iChannel < AUDIO.System.device.playback.channels; ++iChannel)
|
||||
{
|
||||
float *frameOut = framesOut + (iFrame*AUDIO.System.device.playback.channels);
|
||||
const float *frameIn = framesIn + (iFrame*AUDIO.System.device.playback.channels);
|
||||
|
||||
frameOut[iChannel] += (frameIn[iChannel]*AUDIO.System.masterVolume*localVolume);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialise the multichannel buffer pool
|
||||
static void InitAudioBufferPool(void)
|
||||
{
|
||||
// Dummy buffers
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++)
|
||||
{
|
||||
AUDIO.MultiChannel.pool[i] = InitAudioBuffer(DEVICE_FORMAT, DEVICE_CHANNELS, DEVICE_SAMPLE_RATE, 0, AUDIO_BUFFER_USAGE_STATIC);
|
||||
}
|
||||
}
|
||||
|
||||
// Close the audio buffers pool
|
||||
static void CloseAudioBufferPool(void)
|
||||
{
|
||||
for (int i = 0; i < MAX_AUDIO_BUFFER_POOL_CHANNELS; i++)
|
||||
{
|
||||
RL_FREE(AUDIO.MultiChannel.pool[i]->data);
|
||||
RL_FREE(AUDIO.MultiChannel.pool[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(SUPPORT_FILEFORMAT_WAV)
|
||||
// Load WAV file into Wave structure
|
||||
static Wave LoadWAV(const char *fileName)
|
||||
|
@@ -1,8 +1,8 @@
|
||||
GLFW_ICON ICON "raylib.ico"
|
||||
|
||||
1 VERSIONINFO
|
||||
FILEVERSION 2,6,0,0
|
||||
PRODUCTVERSION 2,6,0,0
|
||||
FILEVERSION 3,0,0,0
|
||||
PRODUCTVERSION 3,0,0,0
|
||||
BEGIN
|
||||
BLOCK "StringFileInfo"
|
||||
BEGIN
|
||||
@@ -11,12 +11,12 @@ BEGIN
|
||||
BEGIN
|
||||
//VALUE "CompanyName", "raylib technologies"
|
||||
VALUE "FileDescription", "raylib dynamic library (www.raylib.com)"
|
||||
VALUE "FileVersion", "2.6.0"
|
||||
VALUE "FileVersion", "3.0.0"
|
||||
VALUE "InternalName", "raylib_dll"
|
||||
VALUE "LegalCopyright", "(c) 2020 Ramon Santamaria (@raysan5)"
|
||||
//VALUE "OriginalFilename", "raylib.dll"
|
||||
VALUE "ProductName", "raylib"
|
||||
VALUE "ProductVersion", "2.6.0"
|
||||
VALUE "ProductVersion", "3.0.0"
|
||||
END
|
||||
END
|
||||
BLOCK "VarFileInfo"
|
||||
|
@@ -1081,8 +1081,6 @@ RLAPI void DrawTriangleStrip(Vector2 *points, int pointsCount, Color color);
|
||||
RLAPI void DrawPoly(Vector2 center, int sides, float radius, float rotation, Color color); // Draw a regular polygon (Vector version)
|
||||
RLAPI void DrawPolyLines(Vector2 center, int sides, float radius, float rotation, Color color); // Draw a polygon outline of n sides
|
||||
|
||||
RLAPI void SetShapesTexture(Texture2D texture, Rectangle source); // Define default texture used to draw shapes
|
||||
|
||||
// Basic shapes collision detection functions
|
||||
RLAPI bool CheckCollisionRecs(Rectangle rec1, Rectangle rec2); // Check collision between two rectangles
|
||||
RLAPI bool CheckCollisionCircles(Vector2 center1, float radius1, Vector2 center2, float radius2); // Check collision between two circles
|
||||
@@ -1329,6 +1327,9 @@ RLAPI void UnloadShader(Shader shader); // Unl
|
||||
|
||||
RLAPI Shader GetShaderDefault(void); // Get default shader
|
||||
RLAPI Texture2D GetTextureDefault(void); // Get default texture
|
||||
RLAPI Texture2D GetShapesTexture(void); // Get texture to draw shapes
|
||||
RLAPI Rectangle GetShapesTextureRec(void); // Get texture rectangle to draw shapes
|
||||
RLAPI void SetShapesTexture(Texture2D texture, Rectangle source); // Define default texture used to draw shapes
|
||||
|
||||
// Shader configuration functions
|
||||
RLAPI int GetShaderLocation(Shader shader, const char *uniformName); // Get shader uniform location
|
||||
|
@@ -1,8 +1,8 @@
|
||||
GLFW_ICON ICON "raylib.ico"
|
||||
|
||||
1 VERSIONINFO
|
||||
FILEVERSION 2,6,0,0
|
||||
PRODUCTVERSION 2,6,0,0
|
||||
FILEVERSION 3,0,0,0
|
||||
PRODUCTVERSION 3,0,0,0
|
||||
BEGIN
|
||||
BLOCK "StringFileInfo"
|
||||
BEGIN
|
||||
@@ -11,12 +11,12 @@ BEGIN
|
||||
BEGIN
|
||||
//VALUE "CompanyName", "raylib technologies"
|
||||
VALUE "FileDescription", "raylib application (www.raylib.com)"
|
||||
VALUE "FileVersion", "2.6.0"
|
||||
VALUE "FileVersion", "3.0.0"
|
||||
VALUE "InternalName", "raylib app"
|
||||
VALUE "LegalCopyright", "(c) 2020 Ramon Santamaria (@raysan5)"
|
||||
//VALUE "OriginalFilename", "raylib_app.exe"
|
||||
VALUE "ProductName", "raylib game"
|
||||
VALUE "ProductVersion", "2.6.0"
|
||||
VALUE "ProductVersion", "3.0.0"
|
||||
END
|
||||
END
|
||||
BLOCK "VarFileInfo"
|
||||
|
955
src/rlgl.h
955
src/rlgl.h
File diff suppressed because it is too large
Load Diff
198
src/shapes.c
198
src/shapes.c
@@ -58,14 +58,12 @@
|
||||
//----------------------------------------------------------------------------------
|
||||
// Global Variables Definition
|
||||
//----------------------------------------------------------------------------------
|
||||
static Texture2D texShapes = { 0 }; // Texture used on shapes drawing (usually a white)
|
||||
static Rectangle recTexShapes = { 0 }; // Texture source rectangle used on shapes drawing
|
||||
// ...
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Module specific Functions Declaration
|
||||
//----------------------------------------------------------------------------------
|
||||
static float EaseCubicInOut(float t, float b, float c, float d); // Cubic easing
|
||||
static Texture2D GetShapesTexture(void); // Get texture to draw shapes
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Module Functions Definition
|
||||
@@ -138,16 +136,16 @@ void DrawLineEx(Vector2 startPos, Vector2 endPos, float thick, Color color)
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlNormal3f(0.0f, 0.0f, 1.0f);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(0.0f, 0.0f);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(0.0f, thick);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(d, thick);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(d, 0.0f);
|
||||
rlEnd();
|
||||
rlPopMatrix();
|
||||
@@ -241,16 +239,16 @@ void DrawCircleSector(Vector2 center, float radius, int startAngle, int endAngle
|
||||
{
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x, center.y);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*radius, center.y + cosf(DEG2RAD*angle)*radius);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*radius, center.y + cosf(DEG2RAD*(angle + stepLength))*radius);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength*2))*radius, center.y + cosf(DEG2RAD*(angle + stepLength*2))*radius);
|
||||
|
||||
angle += (stepLength*2);
|
||||
@@ -261,16 +259,16 @@ void DrawCircleSector(Vector2 center, float radius, int startAngle, int endAngle
|
||||
{
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x, center.y);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*radius, center.y + cosf(DEG2RAD*angle)*radius);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*radius, center.y + cosf(DEG2RAD*(angle + stepLength))*radius);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x, center.y);
|
||||
}
|
||||
rlEnd();
|
||||
@@ -489,16 +487,16 @@ void DrawRing(Vector2 center, float innerRadius, float outerRadius, int startAng
|
||||
{
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*innerRadius, center.y + cosf(DEG2RAD*angle)*innerRadius);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*outerRadius, center.y + cosf(DEG2RAD*angle)*outerRadius);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*outerRadius, center.y + cosf(DEG2RAD*(angle + stepLength))*outerRadius);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*innerRadius, center.y + cosf(DEG2RAD*(angle + stepLength))*innerRadius);
|
||||
|
||||
angle += stepLength;
|
||||
@@ -643,16 +641,16 @@ void DrawRectanglePro(Rectangle rec, Vector2 origin, float rotation, Color color
|
||||
rlNormal3f(0.0f, 0.0f, 1.0f);
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(0.0f, 0.0f);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(0.0f, rec.height);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(rec.width, rec.height);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(rec.width, 0.0f);
|
||||
rlEnd();
|
||||
rlPopMatrix();
|
||||
@@ -686,19 +684,19 @@ void DrawRectangleGradientEx(Rectangle rec, Color col1, Color col2, Color col3,
|
||||
|
||||
// NOTE: Default raylib font character 95 is a white square
|
||||
rlColor4ub(col1.r, col1.g, col1.b, col1.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(rec.x, rec.y);
|
||||
|
||||
rlColor4ub(col2.r, col2.g, col2.b, col2.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(rec.x, rec.y + rec.height);
|
||||
|
||||
rlColor4ub(col3.r, col3.g, col3.b, col3.a);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(rec.x + rec.width, rec.y + rec.height);
|
||||
|
||||
rlColor4ub(col4.r, col4.g, col4.b, col4.a);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(rec.x + rec.width, rec.y);
|
||||
rlEnd();
|
||||
rlPopMatrix();
|
||||
@@ -821,13 +819,13 @@ void DrawRectangleRounded(Rectangle rec, float roundness, int segments, Color co
|
||||
for (int i = 0; i < segments/2; i++)
|
||||
{
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x, center.y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*radius, center.y + cosf(DEG2RAD*angle)*radius);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*radius, center.y + cosf(DEG2RAD*(angle + stepLength))*radius);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength*2))*radius, center.y + cosf(DEG2RAD*(angle + stepLength*2))*radius);
|
||||
angle += (stepLength*2);
|
||||
}
|
||||
@@ -835,70 +833,70 @@ void DrawRectangleRounded(Rectangle rec, float roundness, int segments, Color co
|
||||
if (segments%2)
|
||||
{
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x, center.y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*radius, center.y + cosf(DEG2RAD*angle)*radius);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*radius, center.y + cosf(DEG2RAD*(angle + stepLength))*radius);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x, center.y);
|
||||
}
|
||||
}
|
||||
|
||||
// [2] Upper Rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[0].x, point[0].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[8].x, point[8].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[9].x, point[9].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[1].x, point[1].y);
|
||||
|
||||
// [4] Right Rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[2].x, point[2].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[9].x, point[9].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[10].x, point[10].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[3].x, point[3].y);
|
||||
|
||||
// [6] Bottom Rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[11].x, point[11].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[5].x, point[5].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[4].x, point[4].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[10].x, point[10].y);
|
||||
|
||||
// [8] Left Rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[7].x, point[7].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[6].x, point[6].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[11].x, point[11].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[8].x, point[8].y);
|
||||
|
||||
// [9] Middle Rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[8].x, point[8].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[11].x, point[11].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[10].x, point[10].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[9].x, point[9].y);
|
||||
|
||||
rlEnd();
|
||||
@@ -1053,13 +1051,13 @@ void DrawRectangleRoundedLines(Rectangle rec, float roundness, int segments, int
|
||||
for (int i = 0; i < segments; i++)
|
||||
{
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*innerRadius, center.y + cosf(DEG2RAD*angle)*innerRadius);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*angle)*outerRadius, center.y + cosf(DEG2RAD*angle)*outerRadius);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*outerRadius, center.y + cosf(DEG2RAD*(angle + stepLength))*outerRadius);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(center.x + sinf(DEG2RAD*(angle + stepLength))*innerRadius, center.y + cosf(DEG2RAD*(angle + stepLength))*innerRadius);
|
||||
|
||||
angle += stepLength;
|
||||
@@ -1068,46 +1066,46 @@ void DrawRectangleRoundedLines(Rectangle rec, float roundness, int segments, int
|
||||
|
||||
// Upper rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[0].x, point[0].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[8].x, point[8].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[9].x, point[9].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[1].x, point[1].y);
|
||||
|
||||
// Right rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[2].x, point[2].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[10].x, point[10].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[11].x, point[11].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[3].x, point[3].y);
|
||||
|
||||
// Lower rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[13].x, point[13].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[5].x, point[5].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[4].x, point[4].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[12].x, point[12].y);
|
||||
|
||||
// Left rectangle
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[15].x, point[15].y);
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[7].x, point[7].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(point[6].x, point[6].y);
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(point[14].x, point[14].y);
|
||||
|
||||
rlEnd();
|
||||
@@ -1221,16 +1219,16 @@ void DrawTriangle(Vector2 v1, Vector2 v2, Vector2 v3, Color color)
|
||||
rlBegin(RL_QUADS);
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(v1.x, v1.y);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(v2.x, v2.y);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(v2.x, v2.y);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(v3.x, v3.y);
|
||||
rlEnd();
|
||||
|
||||
@@ -1278,16 +1276,16 @@ void DrawTriangleFan(Vector2 *points, int pointsCount, Color color)
|
||||
|
||||
for (int i = 1; i < pointsCount - 1; i++)
|
||||
{
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(points[0].x, points[0].y);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(points[i].x, points[i].y);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(points[i + 1].x, points[i + 1].y);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(points[i + 1].x, points[i + 1].y);
|
||||
}
|
||||
rlEnd();
|
||||
@@ -1345,17 +1343,17 @@ void DrawPoly(Vector2 center, int sides, float radius, float rotation, Color col
|
||||
{
|
||||
rlColor4ub(color.r, color.g, color.b, color.a);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(0, 0);
|
||||
|
||||
rlTexCoord2f(recTexShapes.x/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f(GetShapesTextureRec().x/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(sinf(DEG2RAD*centralAngle)*radius, cosf(DEG2RAD*centralAngle)*radius);
|
||||
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, (recTexShapes.y + recTexShapes.height)/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, (GetShapesTextureRec().y + GetShapesTextureRec().height)/GetShapesTexture().height);
|
||||
rlVertex2f(sinf(DEG2RAD*centralAngle)*radius, cosf(DEG2RAD*centralAngle)*radius);
|
||||
|
||||
centralAngle += 360.0f/(float)sides;
|
||||
rlTexCoord2f((recTexShapes.x + recTexShapes.width)/texShapes.width, recTexShapes.y/texShapes.height);
|
||||
rlTexCoord2f((GetShapesTextureRec().x + GetShapesTextureRec().width)/GetShapesTexture().width, GetShapesTextureRec().y/GetShapesTexture().height);
|
||||
rlVertex2f(sinf(DEG2RAD*centralAngle)*radius, cosf(DEG2RAD*centralAngle)*radius);
|
||||
}
|
||||
rlEnd();
|
||||
@@ -1402,13 +1400,6 @@ void DrawPolyLines(Vector2 center, int sides, float radius, float rotation, Colo
|
||||
rlPopMatrix();
|
||||
}
|
||||
|
||||
// Define default texture used to draw shapes
|
||||
void SetShapesTexture(Texture2D texture, Rectangle source)
|
||||
{
|
||||
texShapes = texture;
|
||||
recTexShapes = source;
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Module Functions Definition - Collision Detection functions
|
||||
//----------------------------------------------------------------------------------
|
||||
@@ -1576,22 +1567,3 @@ static float EaseCubicInOut(float t, float b, float c, float d)
|
||||
|
||||
return 0.5f*c*(t*t*t + 2.0f) + b;
|
||||
}
|
||||
|
||||
// Get texture to draw shapes (RAII)
|
||||
static Texture2D GetShapesTexture(void)
|
||||
{
|
||||
if (texShapes.id == 0)
|
||||
{
|
||||
#if defined(SUPPORT_FONT_TEXTURE)
|
||||
texShapes = GetFontDefault().texture; // Use font texture white character
|
||||
Rectangle rec = GetFontDefault().recs[95];
|
||||
// NOTE: We setup a 1px padding on char rectangle to avoid texture bleeding on MSAA filtering
|
||||
recTexShapes = (Rectangle){ rec.x + 1, rec.y + 1, rec.width - 2, rec.height - 2 };
|
||||
#else
|
||||
texShapes = GetTextureDefault(); // Use default white texture
|
||||
recTexShapes = (Rectangle){ 0.0f, 0.0f, 1.0f, 1.0f };
|
||||
#endif
|
||||
}
|
||||
|
||||
return texShapes;
|
||||
}
|
||||
|
12
src/utils.h
12
src/utils.h
@@ -32,6 +32,18 @@
|
||||
#include <android/asset_manager.h> // Required for: AAssetManager
|
||||
#endif
|
||||
|
||||
#if defined(SUPPORT_TRACELOG)
|
||||
#define TRACELOG(level, ...) TraceLog(level, __VA_ARGS__)
|
||||
|
||||
#if defined(SUPPORT_TRACELOG_DEBUG)
|
||||
#define TRACELOGD(...) TraceLog(LOG_DEBUG, __VA_ARGS__)
|
||||
#else
|
||||
#define TRACELOGD(...) void(0)
|
||||
#endif
|
||||
#else
|
||||
#define TRACELOG(level, ...) void(0)
|
||||
#endif
|
||||
|
||||
//----------------------------------------------------------------------------------
|
||||
// Some basic Defines
|
||||
//----------------------------------------------------------------------------------
|
||||
|
Reference in New Issue
Block a user