Merge branch 'master' into skytrias-vendor-additions

This commit is contained in:
gingerBill
2023-06-23 14:33:01 +01:00
committed by GitHub
542 changed files with 71002 additions and 39774 deletions

View File

@@ -104,13 +104,13 @@ jobs:
run: ./odin check examples/all -vet -strict-style -target:linux_arm64
timeout-minutes: 10
build_windows:
runs-on: windows-2019
runs-on: windows-2022
steps:
- uses: actions/checkout@v1
- name: build Odin
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
./build.bat 1
- name: Odin version
run: ./odin version
@@ -121,58 +121,65 @@ jobs:
- name: Odin check
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
odin check examples/demo -vet
timeout-minutes: 10
- name: Odin run
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
odin run examples/demo
timeout-minutes: 10
- name: Odin run -debug
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
odin run examples/demo -debug
timeout-minutes: 10
- name: Odin check examples/all
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
odin check examples/all -strict-style
timeout-minutes: 10
- name: Core library tests
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
cd tests\core
call build.bat
timeout-minutes: 10
- name: Vendor library tests
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
cd tests\vendor
call build.bat
timeout-minutes: 10
- name: Odin internals tests
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
cd tests\internal
call build.bat
timeout-minutes: 10
- name: Odin documentation tests
shell: cmd
run: |
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
cd tests\documentation
call build.bat
timeout-minutes: 10
- name: core:math/big tests
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
cd tests\core\math\big
call build.bat
timeout-minutes: 10
- name: Odin check examples/all for Windows 32bits
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
odin check examples/all -strict-style -target:windows_i386
timeout-minutes: 10

View File

@@ -7,18 +7,18 @@ on:
jobs:
build_windows:
runs-on: windows-2019
runs-on: windows-2022
steps:
- uses: actions/checkout@v1
- name: build Odin
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
./build.bat 1 1
- name: Odin run
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat
odin run examples/demo
- name: Copy artifacts
run: |

View File

@@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Close Stale Issues
uses: actions/stale@v4.1.0
uses: actions/stale@v7.0.0
with:
# stale-issue-message: |
# Hello!
@@ -36,7 +36,7 @@ jobs:
# The motivation for this automation is to help prioritize issues in the backlog and not ignore, reject, or belittle anyone..
days-before-stale: 120
days-before-close: 30
days-before-close: -1
exempt-draft-pr: true
ascending: true
operations-per-run: 1000

2
.gitignore vendored
View File

@@ -22,6 +22,8 @@ bld/
[Oo]bj/
[Ll]og/
![Cc]ore/[Ll]og/
tests/documentation/verify/
tests/documentation/all.odin-doc
# Visual Studio 2015 cache/options directory
.vs/
# Visual Studio Code options directory

View File

@@ -11,7 +11,7 @@
<img src="https://img.shields.io/badge/platforms-Windows%20|%20Linux%20|%20macOS-green.svg">
</a>
<br>
<a href="https://discord.gg/odinlang">
<a href="https://discord.com/invite/sVBPHEv">
<img src="https://img.shields.io/discord/568138951836172421?logo=discord">
</a>
<a href="https://github.com/odin-lang/odin/actions">

View File

@@ -3,18 +3,20 @@
setlocal EnableDelayedExpansion
where /Q cl.exe || (
set __VSCMD_ARG_NO_LOGO=1
for /f "tokens=*" %%i in ('"C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" -latest -requires Microsoft.VisualStudio.Workload.NativeDesktop -property installationPath') do set VS=%%i
if "!VS!" equ "" (
echo ERROR: Visual Studio installation not found
exit /b 1
)
call "!VS!\VC\Auxiliary\Build\vcvarsall.bat" amd64 || exit /b 1
set __VSCMD_ARG_NO_LOGO=1
for /f "tokens=*" %%i in ('"C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe" -latest -requires Microsoft.VisualStudio.Workload.NativeDesktop -property installationPath') do set VS=%%i
if "!VS!" equ "" (
echo ERROR: Visual Studio installation not found
exit /b 1
)
call "!VS!\VC\Auxiliary\Build\vcvarsall.bat" amd64 || exit /b 1
)
if "%VSCMD_ARG_TGT_ARCH%" neq "x64" (
echo ERROR: please run this from MSVC x64 native tools command prompt, 32-bit target is not supported!
exit /b 1
if "%ODIN_IGNORE_MSVC_CHECK%" == "" (
echo ERROR: please run this from MSVC x64 native tools command prompt, 32-bit target is not supported!
exit /b 1
)
)
for /f "usebackq tokens=1,2 delims=,=- " %%i in (`wmic os get LocalDateTime /value`) do @if %%i==LocalDateTime (
@@ -48,8 +50,11 @@ set odin_version_raw="dev-%curr_year%-%curr_month%"
set compiler_flags= -nologo -Oi -TP -fp:precise -Gm- -MP -FC -EHsc- -GR- -GF
set compiler_defines= -DODIN_VERSION_RAW=\"%odin_version_raw%\"
if not exist .git\ goto skip_git_hash
for /f %%i in ('git rev-parse --short HEAD') do set GIT_SHA=%%i
if %ERRORLEVEL% equ 0 set compiler_defines=%compiler_defines% -DGIT_SHA=\"%GIT_SHA%\"
:skip_git_hash
if %nightly% equ 1 set compiler_defines=%compiler_defines% -DNIGHTLY
if %release_mode% EQU 0 ( rem Debug
@@ -62,12 +67,14 @@ if %release_mode% EQU 0 ( rem Debug
set compiler_warnings= ^
-W4 -WX ^
-wd4100 -wd4101 -wd4127 -wd4146 ^
-wd4505 ^
-wd4456 -wd4457
set compiler_includes= ^
/Isrc\
set libs= ^
kernel32.lib ^
Synchronization.lib ^
bin\llvm\windows\LLVM-C.lib
set linker_flags= -incremental:no -opt:ref -subsystem:console
@@ -94,4 +101,4 @@ if %release_mode% EQU 0 odin run examples/demo
del *.obj > NUL 2> NUL
:end_of_build
:end_of_build

View File

@@ -6,13 +6,18 @@ set -eu
: ${CXXFLAGS=}
: ${LDFLAGS=}
: ${ODIN_VERSION=dev-$(date +"%Y-%m")}
: ${GIT_SHA=}
CPPFLAGS="$CPPFLAGS -DODIN_VERSION_RAW=\"$ODIN_VERSION\""
CXXFLAGS="$CXXFLAGS -std=c++14"
LDFLAGS="$LDFLAGS -pthread -lm -lstdc++"
GIT_SHA=$(git rev-parse --short HEAD || :)
if [ "$GIT_SHA" ]; then CPPFLAGS="$CPPFLAGS -DGIT_SHA=\"$GIT_SHA\""; fi
if [ -d ".git" ]; then
GIT_SHA=$(git rev-parse --short HEAD || :)
if [ "$GIT_SHA" ]; then
CPPFLAGS="$CPPFLAGS -DGIT_SHA=\"$GIT_SHA\""
fi
fi
DISABLED_WARNINGS="-Wno-switch -Wno-macro-redefined -Wno-unused-value"
OS=$(uname)
@@ -25,11 +30,11 @@ panic() {
version() { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; }
config_darwin() {
ARCH=$(uname -m)
local ARCH=$(uname -m)
: ${LLVM_CONFIG=llvm-config}
# allow for arm only llvm's with version 13
if [ ARCH == arm64 ]; then
if [ "${ARCH}" == "arm64" ]; then
MIN_LLVM_VERSION=("13.0.0")
else
# allow for x86 / amd64 all llvm versions beginning from 11
@@ -37,7 +42,7 @@ config_darwin() {
fi
if [ $(version $($LLVM_CONFIG --version)) -lt $(version $MIN_LLVM_VERSION) ]; then
if [ ARCH == arm64 ]; then
if [ "${ARCH}" == "arm64" ]; then
panic "Requirement: llvm-config must be base version 13 for arm64"
else
panic "Requirement: llvm-config must be base version greater than 11 for amd64/x86"
@@ -50,7 +55,7 @@ config_darwin() {
panic "Requirement: llvm-config must be base version smaller than 15"
fi
LDFLAGS="$LDFLAGS -liconv -ldl"
LDFLAGS="$LDFLAGS -liconv -ldl -framework System"
CXXFLAGS="$CXXFLAGS $($LLVM_CONFIG --cxxflags --ldflags)"
LDFLAGS="$LDFLAGS -lLLVM-C"
}
@@ -59,11 +64,11 @@ config_freebsd() {
: ${LLVM_CONFIG=}
if [ ! "$LLVM_CONFIG" ]; then
if which llvm-config11 > /dev/null 2>&1; then
if [ -x "$(command -v llvm-config11)" ]; then
LLVM_CONFIG=llvm-config11
elif which llvm-config12 > /dev/null 2>&1; then
elif [ -x "$(command -v llvm-config12)" ]; then
LLVM_CONFIG=llvm-config12
elif which llvm-config13 > /dev/null 2>&1; then
elif [ -x "$(command -v llvm-config13)" ]; then
LLVM_CONFIG=llvm-config13
else
panic "Unable to find LLVM-config"
@@ -86,12 +91,14 @@ config_linux() {
: ${LLVM_CONFIG=}
if [ ! "$LLVM_CONFIG" ]; then
if which llvm-config > /dev/null 2>&1; then
if [ -x "$(command -v llvm-config)" ]; then
LLVM_CONFIG=llvm-config
elif which llvm-config-11 > /dev/null 2>&1; then
elif [ -x "$(command -v llvm-config-11)" ]; then
LLVM_CONFIG=llvm-config-11
elif which llvm-config-11-64 > /dev/null 2>&1; then
elif [ -x "$(command -v llvm-config-11-64)" ]; then
LLVM_CONFIG=llvm-config-11-64
elif [ -x "$(command -v llvm-config-14)" ]; then
LLVM_CONFIG=llvm-config-14
else
panic "Unable to find LLVM-config"
fi
@@ -111,7 +118,7 @@ config_linux() {
LDFLAGS="$LDFLAGS -ldl"
CXXFLAGS="$CXXFLAGS $($LLVM_CONFIG --cxxflags --ldflags)"
LDFLAGS="$LDFLAGS $($LLVM_CONFIG --libs core native --system-libs --libfiles) -Wl,-rpath=\$ORIGIN"
LDFLAGS="$LDFLAGS $($LLVM_CONFIG --libs core native --system-libs --libfiles) -Wl,-rpath=\$ORIGIN"
# Creates a copy of the llvm library in the build dir, this is meant to support compiler explorer.
# The annoyance is that this copy can be cluttering the development folder. TODO: split staging folders
@@ -128,13 +135,21 @@ build_odin() {
EXTRAFLAGS="-O3"
;;
release-native)
EXTRAFLAGS="-O3 -march=native"
local ARCH=$(uname -m)
if [ "${ARCH}" == "arm64" ]; then
# Use preferred flag for Arm (ie arm64 / aarch64 / etc)
EXTRAFLAGS="-O3 -mcpu=native"
else
# Use preferred flag for x86 / amd64
EXTRAFLAGS="-O3 -march=native"
fi
;;
nightly)
EXTRAFLAGS="-DNIGHTLY -O3"
;;
*)
panic "Build mode unsupported!"
;;
esac
set -x
@@ -147,7 +162,7 @@ run_demo() {
}
have_which() {
if ! which which > /dev/null 2>&1; then
if ! command -v which > /dev/null 2>&1 ; then
panic "Could not find \`which\`"
fi
}
@@ -169,6 +184,7 @@ FreeBSD)
;;
*)
panic "Platform unsupported!"
;;
esac
if [[ $# -eq 0 ]]; then

View File

@@ -2,25 +2,25 @@ package bufio
import "core:io"
// Loadahead_Reader provides io lookahead.
// Lookahead_Reader provides io lookahead.
// This is useful for tokenizers/parsers.
// Loadahead_Reader is similar to bufio.Reader, but unlike bufio.Reader, Loadahead_Reader's buffer size
// Lookahead_Reader is similar to bufio.Reader, but unlike bufio.Reader, Lookahead_Reader's buffer size
// will EXACTLY match the specified size, whereas bufio.Reader's buffer size may differ from the specified size.
// This makes sure that the buffer will not be accidentally read beyond the expected size.
Loadahead_Reader :: struct {
Lookahead_Reader :: struct {
r: io.Reader,
buf: []byte,
n: int,
}
lookahead_reader_init :: proc(lr: ^Loadahead_Reader, r: io.Reader, buf: []byte) -> ^Loadahead_Reader {
lookahead_reader_init :: proc(lr: ^Lookahead_Reader, r: io.Reader, buf: []byte) -> ^Lookahead_Reader {
lr.r = r
lr.buf = buf
lr.n = 0
return lr
}
lookahead_reader_buffer :: proc(lr: ^Loadahead_Reader) -> []byte {
lookahead_reader_buffer :: proc(lr: ^Lookahead_Reader) -> []byte {
return lr.buf[:lr.n]
}
@@ -28,7 +28,7 @@ lookahead_reader_buffer :: proc(lr: ^Loadahead_Reader) -> []byte {
// lookahead_reader_peek returns a slice of the Lookahead_Reader which holds n bytes
// If the Lookahead_Reader cannot hold enough bytes, it will read from the underlying reader to populate the rest.
// NOTE: The returned buffer is not a copy of the underlying buffer
lookahead_reader_peek :: proc(lr: ^Loadahead_Reader, n: int) -> ([]byte, io.Error) {
lookahead_reader_peek :: proc(lr: ^Lookahead_Reader, n: int) -> ([]byte, io.Error) {
switch {
case n < 0:
return nil, .Negative_Read
@@ -58,13 +58,13 @@ lookahead_reader_peek :: proc(lr: ^Loadahead_Reader, n: int) -> ([]byte, io.Erro
// lookahead_reader_peek_all returns a slice of the Lookahead_Reader populating the full buffer
// If the Lookahead_Reader cannot hold enough bytes, it will read from the underlying reader to populate the rest.
// NOTE: The returned buffer is not a copy of the underlying buffer
lookahead_reader_peek_all :: proc(lr: ^Loadahead_Reader) -> ([]byte, io.Error) {
lookahead_reader_peek_all :: proc(lr: ^Lookahead_Reader) -> ([]byte, io.Error) {
return lookahead_reader_peek(lr, len(lr.buf))
}
// lookahead_reader_consume drops the first n populated bytes from the Lookahead_Reader.
lookahead_reader_consume :: proc(lr: ^Loadahead_Reader, n: int) -> io.Error {
lookahead_reader_consume :: proc(lr: ^Lookahead_Reader, n: int) -> io.Error {
switch {
case n == 0:
return nil
@@ -78,6 +78,6 @@ lookahead_reader_consume :: proc(lr: ^Loadahead_Reader, n: int) -> io.Error {
return nil
}
lookahead_reader_consume_all :: proc(lr: ^Loadahead_Reader) -> io.Error {
lookahead_reader_consume_all :: proc(lr: ^Lookahead_Reader) -> io.Error {
return lookahead_reader_consume(lr, lr.n)
}

View File

@@ -14,51 +14,29 @@ read_writer_init :: proc(rw: ^Read_Writer, r: ^Reader, w: ^Writer) {
}
read_writer_to_stream :: proc(rw: ^Read_Writer) -> (s: io.Stream) {
s.stream_data = rw
s.stream_vtable = &_read_writer_vtable
s.procedure = _read_writer_procedure
s.data = rw
return
}
@(private)
_read_writer_vtable := io.Stream_VTable{
impl_read = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
b := (^Read_Writer)(s.stream_data).r
return reader_read(b, p)
},
impl_unread_byte = proc(s: io.Stream) -> io.Error {
b := (^Read_Writer)(s.stream_data).r
return reader_unread_byte(b)
},
impl_read_rune = proc(s: io.Stream) -> (r: rune, size: int, err: io.Error) {
b := (^Read_Writer)(s.stream_data).r
return reader_read_rune(b)
},
impl_unread_rune = proc(s: io.Stream) -> io.Error {
b := (^Read_Writer)(s.stream_data).r
return reader_unread_rune(b)
},
impl_write_to = proc(s: io.Stream, w: io.Writer) -> (n: i64, err: io.Error) {
b := (^Read_Writer)(s.stream_data).r
return reader_write_to(b, w)
},
impl_flush = proc(s: io.Stream) -> io.Error {
b := (^Read_Writer)(s.stream_data).w
return writer_flush(b)
},
impl_write = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
b := (^Read_Writer)(s.stream_data).w
return writer_write(b, p)
},
impl_write_byte = proc(s: io.Stream, c: byte) -> io.Error {
b := (^Read_Writer)(s.stream_data).w
return writer_write_byte(b, c)
},
impl_write_rune = proc(s: io.Stream, r: rune) -> (int, io.Error) {
b := (^Read_Writer)(s.stream_data).w
return writer_write_rune(b, r)
},
impl_read_from = proc(s: io.Stream, r: io.Reader) -> (n: i64, err: io.Error) {
b := (^Read_Writer)(s.stream_data).w
return writer_read_from(b, r)
},
}
_read_writer_procedure := proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, offset: i64, whence: io.Seek_From) -> (n: i64, err: io.Error) {
rw := (^Read_Writer)(stream_data)
n_int: int
#partial switch mode {
case .Flush:
err = writer_flush(rw.w)
return
case .Read:
n_int, err = reader_read(rw.r, p)
n = i64(n_int)
return
case .Write:
n_int, err = writer_write(rw.w, p)
n = i64(n_int)
return
case .Query:
return io.query_utility({.Flush, .Read, .Write, .Query})
}
return 0, .Empty
}

View File

@@ -311,18 +311,6 @@ reader_write_to :: proc(b: ^Reader, w: io.Writer) -> (n: i64, err: io.Error) {
}
m: i64
if nr, ok := io.to_writer_to(b.rd); ok {
m, err = io.write_to(nr, w)
n += m
return n, err
}
if nw, ok := io.to_reader_from(w); ok {
m, err = io.read_from(nw, b.rd)
n += m
return n, err
}
if b.w-b.r < len(b.buf) {
if err = _reader_read_new_chunk(b); err != nil {
return
@@ -352,48 +340,28 @@ reader_write_to :: proc(b: ^Reader, w: io.Writer) -> (n: i64, err: io.Error) {
// reader_to_stream converts a Reader into an io.Stream
reader_to_stream :: proc(b: ^Reader) -> (s: io.Stream) {
s.stream_data = b
s.stream_vtable = &_reader_vtable
s.data = b
s.procedure = _reader_proc
return
}
@(private)
_reader_vtable := io.Stream_VTable{
impl_destroy = proc(s: io.Stream) -> io.Error {
b := (^Reader)(s.stream_data)
_reader_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, offset: i64, whence: io.Seek_From) -> (n: i64, err: io.Error) {
b := (^Reader)(stream_data)
#partial switch mode {
case .Read:
return io._i64_err(reader_read(b, p))
case .Destroy:
reader_destroy(b)
return nil
},
impl_read = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
b := (^Reader)(s.stream_data)
return reader_read(b, p)
},
impl_read_byte = proc(s: io.Stream) -> (c: byte, err: io.Error) {
b := (^Reader)(s.stream_data)
return reader_read_byte(b)
},
impl_unread_byte = proc(s: io.Stream) -> io.Error {
b := (^Reader)(s.stream_data)
return reader_unread_byte(b)
},
impl_read_rune = proc(s: io.Stream) -> (r: rune, size: int, err: io.Error) {
b := (^Reader)(s.stream_data)
return reader_read_rune(b)
},
impl_unread_rune = proc(s: io.Stream) -> io.Error {
b := (^Reader)(s.stream_data)
return reader_unread_rune(b)
},
impl_write_to = proc(s: io.Stream, w: io.Writer) -> (n: i64, err: io.Error) {
b := (^Reader)(s.stream_data)
return reader_write_to(b, w)
},
return
case .Query:
return io.query_utility({.Read, .Destroy, .Query})
}
return 0, .Empty
}
//
// Utility procedures
//

View File

@@ -173,14 +173,6 @@ writer_read_from :: proc(b: ^Writer, r: io.Reader) -> (n: i64, err: io.Error) {
if b.err != nil {
return 0, b.err
}
if writer_buffered(b) == 0 {
if w, ok := io.to_reader_from(b.wr); !ok {
n, err = io.read_from(w, r)
b.err = err
return
}
}
for {
if writer_available(b) == 0 {
writer_flush(b) or_return
@@ -222,38 +214,35 @@ writer_read_from :: proc(b: ^Writer, r: io.Reader) -> (n: i64, err: io.Error) {
// writer_to_stream converts a Writer into an io.Stream
writer_to_stream :: proc(b: ^Writer) -> (s: io.Stream) {
s.stream_data = b
s.stream_vtable = &_writer_vtable
s.data = b
s.procedure = _writer_proc
return
}
// writer_to_stream converts a Writer into an io.Stream
writer_to_writer :: proc(b: ^Writer) -> (s: io.Writer) {
return writer_to_stream(b)
}
@(private)
_writer_vtable := io.Stream_VTable{
impl_destroy = proc(s: io.Stream) -> io.Error {
b := (^Writer)(s.stream_data)
_writer_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, offset: i64, whence: io.Seek_From) -> (n: i64, err: io.Error) {
b := (^Writer)(stream_data)
#partial switch mode {
case .Flush:
err = writer_flush(b)
return
case .Write:
n_int: int
n_int, err = writer_write(b, p)
n = i64(n_int)
return
case .Destroy:
writer_destroy(b)
return nil
},
impl_flush = proc(s: io.Stream) -> io.Error {
b := (^Writer)(s.stream_data)
return writer_flush(b)
},
impl_write = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
b := (^Writer)(s.stream_data)
return writer_write(b, p)
},
impl_write_byte = proc(s: io.Stream, c: byte) -> io.Error {
b := (^Writer)(s.stream_data)
return writer_write_byte(b, c)
},
impl_write_rune = proc(s: io.Stream, r: rune) -> (int, io.Error) {
b := (^Writer)(s.stream_data)
return writer_write_rune(b, r)
},
impl_read_from = proc(s: io.Stream, r: io.Reader) -> (n: i64, err: io.Error) {
b := (^Writer)(s.stream_data)
return writer_read_from(b, r)
},
return
case .Query:
return io.query_utility({.Flush, .Write, .Destroy, .Query})
}
return 0, .Empty
}

View File

@@ -94,7 +94,15 @@ cap :: proc(array: Array_Type) -> int ---
size_of :: proc($T: typeid) -> int ---
align_of :: proc($T: typeid) -> int ---
offset_of :: proc($T: typeid) -> uintptr ---
// e.g. offset_of(t.f), where t is an instance of the type T
offset_of_selector :: proc(selector: $T) -> uintptr ---
// e.g. offset_of(T, f), where T can be the type instead of a variable
offset_of_member :: proc($T: typeid, member: $M) -> uintptr ---
offset_of :: proc{offset_of_selector, offset_of_member}
// e.g. offset_of(T, "f"), where T can be the type instead of a variable
offset_of_by_string :: proc($T: typeid, member: string) -> uintptr ---
type_of :: proc(x: expr) -> type ---
type_info_of :: proc($T: typeid) -> ^runtime.Type_Info ---
typeid_of :: proc($T: typeid) -> typeid ---
@@ -109,7 +117,7 @@ jmag :: proc(value: Quaternion) -> Float ---
kmag :: proc(value: Quaternion) -> Float ---
conj :: proc(value: Complex_Or_Quaternion) -> Complex_Or_Quaternion ---
expand_to_tuple :: proc(value: Struct_Or_Array) -> (A, B, C, ...) ---
expand_values :: proc(value: Struct_Or_Array) -> (A, B, C, ...) ---
min :: proc(values: ..T) -> T ---
max :: proc(values: ..T) -> T ---

View File

@@ -38,6 +38,11 @@ buffer_init_string :: proc(b: ^Buffer, s: string) {
}
buffer_init_allocator :: proc(b: ^Buffer, len, cap: int, allocator := context.allocator) {
if b.buf == nil {
b.buf = make([dynamic]byte, len, cap, allocator)
return
}
b.buf.allocator = allocator
reserve(&b.buf, cap)
resize(&b.buf, len)
@@ -370,69 +375,31 @@ buffer_read_from :: proc(b: ^Buffer, r: io.Reader) -> (n: i64, err: io.Error) #n
buffer_to_stream :: proc(b: ^Buffer) -> (s: io.Stream) {
s.stream_data = b
s.stream_vtable = &_buffer_vtable
s.data = b
s.procedure = _buffer_proc
return
}
@(private)
_buffer_vtable := io.Stream_VTable{
impl_size = proc(s: io.Stream) -> i64 {
b := (^Buffer)(s.stream_data)
return i64(buffer_capacity(b))
},
impl_read = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_read(b, p)
},
impl_read_at = proc(s: io.Stream, p: []byte, offset: i64) -> (n: int, err: io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_read_at(b, p, int(offset))
},
impl_read_byte = proc(s: io.Stream) -> (byte, io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_read_byte(b)
},
impl_read_rune = proc(s: io.Stream) -> (r: rune, size: int, err: io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_read_rune(b)
},
impl_write = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_write(b, p)
},
impl_write_at = proc(s: io.Stream, p: []byte, offset: i64) -> (n: int, err: io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_write_at(b, p, int(offset))
},
impl_write_byte = proc(s: io.Stream, c: byte) -> io.Error {
b := (^Buffer)(s.stream_data)
return buffer_write_byte(b, c)
},
impl_write_rune = proc(s: io.Stream, r: rune) -> (int, io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_write_rune(b, r)
},
impl_unread_byte = proc(s: io.Stream) -> io.Error {
b := (^Buffer)(s.stream_data)
return buffer_unread_byte(b)
},
impl_unread_rune = proc(s: io.Stream) -> io.Error {
b := (^Buffer)(s.stream_data)
return buffer_unread_rune(b)
},
impl_destroy = proc(s: io.Stream) -> io.Error {
b := (^Buffer)(s.stream_data)
_buffer_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, offset: i64, whence: io.Seek_From) -> (n: i64, err: io.Error) {
b := (^Buffer)(stream_data)
#partial switch mode {
case .Read:
return io._i64_err(buffer_read(b, p))
case .Read_At:
return io._i64_err(buffer_read_at(b, p, int(offset)))
case .Write:
return io._i64_err(buffer_write(b, p))
case .Write_At:
return io._i64_err(buffer_write_at(b, p, int(offset)))
case .Size:
n = i64(buffer_capacity(b))
return
case .Destroy:
buffer_destroy(b)
return nil
},
impl_write_to = proc(s: io.Stream, w: io.Writer) -> (n: i64, err: io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_write_to(b, w)
},
impl_read_from = proc(s: io.Stream, r: io.Reader) -> (n: i64, err: io.Error) {
b := (^Buffer)(s.stream_data)
return buffer_read_from(b, r)
},
return
case .Query:
return io.query_utility({.Read, .Read_At, .Write, .Write_At, .Size, .Destroy})
}
return 0, .Empty
}

View File

@@ -16,8 +16,8 @@ reader_init :: proc(r: ^Reader, s: []byte) {
}
reader_to_stream :: proc(r: ^Reader) -> (s: io.Stream) {
s.stream_data = r
s.stream_vtable = &_reader_vtable
s.data = r
s.procedure = _reader_proc
return
}
@@ -137,41 +137,22 @@ reader_write_to :: proc(r: ^Reader, w: io.Writer) -> (n: i64, err: io.Error) {
@(private)
_reader_vtable := io.Stream_VTable{
impl_size = proc(s: io.Stream) -> i64 {
r := (^Reader)(s.stream_data)
return reader_size(r)
},
impl_read = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
r := (^Reader)(s.stream_data)
return reader_read(r, p)
},
impl_read_at = proc(s: io.Stream, p: []byte, off: i64) -> (n: int, err: io.Error) {
r := (^Reader)(s.stream_data)
return reader_read_at(r, p, off)
},
impl_read_byte = proc(s: io.Stream) -> (byte, io.Error) {
r := (^Reader)(s.stream_data)
return reader_read_byte(r)
},
impl_unread_byte = proc(s: io.Stream) -> io.Error {
r := (^Reader)(s.stream_data)
return reader_unread_byte(r)
},
impl_read_rune = proc(s: io.Stream) -> (ch: rune, size: int, err: io.Error) {
r := (^Reader)(s.stream_data)
return reader_read_rune(r)
},
impl_unread_rune = proc(s: io.Stream) -> io.Error {
r := (^Reader)(s.stream_data)
return reader_unread_rune(r)
},
impl_seek = proc(s: io.Stream, offset: i64, whence: io.Seek_From) -> (i64, io.Error) {
r := (^Reader)(s.stream_data)
return reader_seek(r, offset, whence)
},
impl_write_to = proc(s: io.Stream, w: io.Writer) -> (n: i64, err: io.Error) {
r := (^Reader)(s.stream_data)
return reader_write_to(r, w)
},
_reader_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte, offset: i64, whence: io.Seek_From) -> (n: i64, err: io.Error) {
r := (^Reader)(stream_data)
#partial switch mode {
case .Read:
return io._i64_err(reader_read(r, p))
case .Read_At:
return io._i64_err(reader_read_at(r, p, offset))
case .Seek:
n, err = reader_seek(r, offset, whence)
return
case .Size:
n = reader_size(r)
return
case .Query:
return io.query_utility({.Read, .Read_At, .Seek, .Size, .Query})
}
return 0, .Empty
}

View File

@@ -1118,7 +1118,7 @@ expand_macro :: proc(cpp: ^Preprocessor, rest: ^^Token, tok: ^Token) -> bool {
search_include_next :: proc(cpp: ^Preprocessor, filename: string) -> (path: string, ok: bool) {
for ; cpp.include_next_index < len(cpp.include_paths); cpp.include_next_index += 1 {
tpath := filepath.join(elems={cpp.include_paths[cpp.include_next_index], filename}, allocator=context.temp_allocator)
tpath := filepath.join({cpp.include_paths[cpp.include_next_index], filename}, allocator=context.temp_allocator)
if os.exists(tpath) {
return strings.clone(tpath), true
}
@@ -1136,7 +1136,7 @@ search_include_paths :: proc(cpp: ^Preprocessor, filename: string) -> (path: str
}
for include_path in cpp.include_paths {
tpath := filepath.join(elems={include_path, filename}, allocator=context.temp_allocator)
tpath := filepath.join({include_path, filename}, allocator=context.temp_allocator)
if os.exists(tpath) {
path, ok = strings.clone(tpath), true
cpp.filepath_cache[filename] = path

View File

@@ -44,7 +44,7 @@ when ODIN_OS == .Windows {
@(link_name="_Cnd_destroy") cnd_destroy :: proc(cond: ^cnd_t) ---
@(link_name="_Cnd_init") cnd_init :: proc(cond: ^cnd_t) -> int ---
@(link_name="_Cnd_signal") cnd_signal :: proc(cond: ^cnd_t) -> int ---
@(link_name="_Cnd_timedwait") cnd_timedwait :: proc(cond: ^cnd_t, ts: ^timespec) -> int ---
@(link_name="_Cnd_timedwait") cnd_timedwait :: proc(cond: ^cnd_t, mtx: ^mtx_t, ts: ^timespec) -> int ---
@(link_name="_Cnd_wait") cnd_wait :: proc(cond: ^cnd_t, mtx: ^mtx_t) -> int ---
// 7.26.4 Mutex functions
@@ -108,7 +108,7 @@ when ODIN_OS == .Linux {
cnd_destroy :: proc(cond: ^cnd_t) ---
cnd_init :: proc(cond: ^cnd_t) -> int ---
cnd_signal :: proc(cond: ^cnd_t) -> int ---
cnd_timedwait :: proc(cond: ^cnd_t, ts: ^timespec) -> int ---
cnd_timedwait :: proc(cond: ^cnd_t, mtx: ^mtx_t, ts: ^timespec) -> int ---
cnd_wait :: proc(cond: ^cnd_t, mtx: ^mtx_t) -> int ---
// 7.26.4 Mutex functions

View File

@@ -188,7 +188,8 @@ input_size_from_memory :: proc(z: ^Context_Memory_Input) -> (res: i64, err: Erro
}
input_size_from_stream :: proc(z: ^Context_Stream_Input) -> (res: i64, err: Error) {
return io.size(z.input), nil
res, _ = io.size(z.input)
return
}
input_size :: proc{input_size_from_memory, input_size_from_stream}
@@ -212,8 +213,10 @@ read_slice_from_memory :: #force_inline proc(z: ^Context_Memory_Input, size: int
@(optimization_mode="speed")
read_slice_from_stream :: #force_inline proc(z: ^Context_Stream_Input, size: int) -> (res: []u8, err: io.Error) {
// TODO: REMOVE ALL USE OF context.temp_allocator here
// the is literally no need for it
b := make([]u8, size, context.temp_allocator)
_, e := z.input->impl_read(b[:])
_, e := io.read(z.input, b[:])
if e == .None {
return b, .None
}

View File

@@ -335,7 +335,7 @@ load_from_context :: proc(z: ^$C, buf: ^bytes.Buffer, known_gzip_size := -1, exp
// fmt.printf("GZIP: Expected Payload Size: %v\n", expected_output_size);
zlib_error := zlib.inflate_raw(z=z, expected_output_size=expected_output_size)
zlib_error := zlib.inflate_raw(z, expected_output_size=expected_output_size)
if zlib_error != nil {
return zlib_error
}

View File

@@ -177,12 +177,10 @@ decompress_slice_to_string :: proc(input: []u8, model := DEFAULT_MODEL, allocato
max_output_size := decompress_bound(len(input), model)
buf: [dynamic]u8
if !resize(&buf, max_output_size) {
return "", .Out_Of_Memory
}
resize(&buf, max_output_size) or_return
length, result := decompress_slice_to_output_buffer(input, buf[:])
resize(&buf, length)
resize(&buf, length) or_return
return string(buf[:]), result
}
decompress :: proc{decompress_slice_to_output_buffer, decompress_slice_to_string}
@@ -307,12 +305,10 @@ compress_string :: proc(input: string, model := DEFAULT_MODEL, allocator := cont
max_output_size := compress_bound(len(input))
buf: [dynamic]u8
if !resize(&buf, max_output_size) {
return {}, .Out_Of_Memory
}
resize(&buf, max_output_size) or_return
length, result := compress_string_to_buffer(input, buf[:])
resize(&buf, length)
resize(&buf, length) or_return
return buf[:length], result
}
compress :: proc{compress_string_to_buffer, compress_string}

View File

@@ -471,7 +471,7 @@ inflate_from_context :: proc(using ctx: ^compress.Context_Memory_Input, raw := f
}
// Parse ZLIB stream without header.
inflate_raw(z=ctx, expected_output_size=expected_output_size) or_return
inflate_raw(ctx, expected_output_size=expected_output_size) or_return
if !raw {
compress.discard_to_next_byte_lsb(ctx)
@@ -665,7 +665,7 @@ inflate_from_byte_array :: proc(input: []u8, buf: ^bytes.Buffer, raw := false, e
ctx.input_data = input
ctx.output = buf
return inflate_from_context(ctx=&ctx, raw=raw, expected_output_size=expected_output_size)
return inflate_from_context(&ctx, raw=raw, expected_output_size=expected_output_size)
}
inflate_from_byte_array_raw :: proc(input: []u8, buf: ^bytes.Buffer, raw := false, expected_output_size := -1) -> (err: Error) {
@@ -674,7 +674,7 @@ inflate_from_byte_array_raw :: proc(input: []u8, buf: ^bytes.Buffer, raw := fals
ctx.input_data = input
ctx.output = buf
return inflate_raw(z=&ctx, expected_output_size=expected_output_size)
return inflate_raw(&ctx, expected_output_size=expected_output_size)
}
inflate :: proc{inflate_from_context, inflate_from_byte_array}

View File

@@ -27,27 +27,28 @@ Bit_Array_Iterator :: struct {
word_idx: int,
bit_idx: uint,
}
/*
In:
- ba: ^Bit_Array - the array to iterate over
Wraps a `Bit_Array` into an Iterator
Out:
- it: ^Bit_Array_Iterator - the iterator that holds iteration state
Inputs:
- ba: Pointer to the Bit_Array
Returns:
- it: Iterator struct
*/
make_iterator :: proc (ba: ^Bit_Array) -> (it: Bit_Array_Iterator) {
return Bit_Array_Iterator { array = ba }
}
/*
In:
- it: ^Bit_Array_Iterator - the iterator struct that holds the state.
Returns the next bit, including its set-state. ok=false once exhausted
Out:
- set: bool - the state of the bit at `index`
- index: int - the next bit of the Bit_Array referenced by `it`.
- ok: bool - `true` if the iterator returned a valid index,
`false` if there were no more bits
Inputs:
- it: The iterator that holds the state.
Returns:
- set: `true` if the bit at `index` is set.
- index: The next bit of the Bit_Array referenced by `it`.
- ok: `true` if the iterator can continue, `false` if the iterator is done
*/
iterate_by_all :: proc (it: ^Bit_Array_Iterator) -> (set: bool, index: int, ok: bool) {
index = it.word_idx * NUM_BITS + int(it.bit_idx) + it.array.bias
@@ -64,39 +65,51 @@ iterate_by_all :: proc (it: ^Bit_Array_Iterator) -> (set: bool, index: int, ok:
return set, index, true
}
/*
In:
- it: ^Bit_Array_Iterator - the iterator struct that holds the state.
Returns the next Set Bit, for example if `0b1010`, then the iterator will return index={1, 3} over two calls.
Out:
- index: int - the next set bit of the Bit_Array referenced by `it`.
- ok: bool - `true` if the iterator returned a valid index,
`false` if there were no more bits set
Inputs:
- it: The iterator that holds the state.
Returns:
- index: The next *set* bit of the Bit_Array referenced by `it`.
- ok: `true` if the iterator can continue, `false` if the iterator is done
*/
iterate_by_set :: proc (it: ^Bit_Array_Iterator) -> (index: int, ok: bool) {
return iterate_internal_(it, true)
}
/*
In:
- it: ^Bit_Array_Iterator - the iterator struct that holds the state.
Returns the next Unset Bit, for example if `0b1010`, then the iterator will return index={0, 2} over two calls.
Out:
- index: int - the next unset bit of the Bit_Array referenced by `it`.
- ok: bool - `true` if the iterator returned a valid index,
`false` if there were no more unset bits
Inputs:
- it: The iterator that holds the state.
Returns:
- index: The next *unset* bit of the Bit_Array referenced by `it`.
- ok: `true` if the iterator can continue, `false` if the iterator is done
*/
iterate_by_unset:: proc (it: ^Bit_Array_Iterator) -> (index: int, ok: bool) {
return iterate_internal_(it, false)
}
/*
Iterates through set/unset bits
*Private*
Inputs:
- it: The iterator that holds the state.
- ITERATE_SET_BITS: `true` for returning only set bits, false for returning only unset bits
Returns:
- index: The next *unset* bit of the Bit_Array referenced by `it`.
- ok: `true` if the iterator can continue, `false` if the iterator is done
*/
@(private="file")
iterate_internal_ :: proc (it: ^Bit_Array_Iterator, $ITERATE_SET_BITS: bool) -> (index: int, ok: bool) {
word := it.array.bits[it.word_idx] if len(it.array.bits) > it.word_idx else 0
when ! ITERATE_SET_BITS { word = ~word }
// if the word is empty or we have already gone over all the bits in it,
// If the word is empty or we have already gone over all the bits in it,
// b.bit_idx is greater than the index of any set bit in the word,
// meaning that word >> b.bit_idx == 0.
for it.word_idx < len(it.array.bits) && word >> it.bit_idx == 0 {
@@ -106,14 +119,14 @@ iterate_internal_ :: proc (it: ^Bit_Array_Iterator, $ITERATE_SET_BITS: bool) ->
when ! ITERATE_SET_BITS { word = ~word }
}
// if we are iterating the set bits, reaching the end of the array means we have no more bits to check
// If we are iterating the set bits, reaching the end of the array means we have no more bits to check
when ITERATE_SET_BITS {
if it.word_idx >= len(it.array.bits) {
return 0, false
}
}
// reaching here means that the word has some set bits
// Reaching here means that the word has some set bits
it.bit_idx += uint(intrinsics.count_trailing_zeros(word >> it.bit_idx))
index = it.word_idx * NUM_BITS + int(it.bit_idx) + it.array.bias
@@ -124,24 +137,21 @@ iterate_internal_ :: proc (it: ^Bit_Array_Iterator, $ITERATE_SET_BITS: bool) ->
}
return index, index <= it.array.max_index
}
/*
In:
- ba: ^Bit_Array - a pointer to the Bit Array
- index: The bit index. Can be an enum member.
Gets the state of a bit in the bit-array
Out:
- res: The bit you're interested in.
- ok: Whether the index was valid. Returns `false` if the index is smaller than the bias.
Inputs:
- ba: Pointer to the Bit_Array
- index: Which bit in the array
The `ok` return value may be ignored.
Returns:
- res: `true` if the bit at `index` is set.
- ok: Whether the index was valid. Returns `false` if the index is smaller than the bias.
*/
get :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator) -> (res: bool, ok: bool) {
get :: proc(ba: ^Bit_Array, #any_int index: uint) -> (res: bool, ok: bool) #optional_ok {
idx := int(index) - ba.bias
if ba == nil || int(index) < ba.bias { return false, false }
context.allocator = allocator
leg_index := idx >> INDEX_SHIFT
bit_index := idx & INDEX_MASK
@@ -157,18 +167,36 @@ get :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator
return res, true
}
/*
In:
- ba: ^Bit_Array - a pointer to the Bit Array
- index: The bit index. Can be an enum member.
Gets the state of a bit in the bit-array
Out:
- ok: Whether or not we managed to set requested bit.
*Bypasses all Checks*
`set` automatically resizes the Bit Array to accommodate the requested index if needed.
Inputs:
- ba: Pointer to the Bit_Array
- index: Which bit in the array
Returns:
- `true` if bit is set
*/
set :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator) -> (ok: bool) {
unsafe_get :: #force_inline proc(ba: ^Bit_Array, #any_int index: uint) -> bool #no_bounds_check {
return bool((ba.bits[index >> INDEX_SHIFT] >> uint(index & INDEX_MASK)) & 1)
}
/*
Sets the state of a bit in the bit-array
*Conditionally Allocates (Resizes backing data when `index > len(ba.bits)`)*
Inputs:
- ba: Pointer to the Bit_Array
- index: Which bit in the array
- set_to: `true` sets the bit on, `false` to turn it off
- allocator: (default is context.allocator)
Returns:
- ok: Whether the set was successful, `false` on allocation failure or bad index
*/
set :: proc(ba: ^Bit_Array, #any_int index: uint, set_to: bool = true, allocator := context.allocator) -> (ok: bool) {
idx := int(index) - ba.bias
@@ -181,65 +209,97 @@ set :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator
resize_if_needed(ba, leg_index) or_return
ba.max_index = max(idx, ba.max_index)
ba.bits[leg_index] |= 1 << uint(bit_index)
if set_to{ ba.bits[leg_index] |= 1 << uint(bit_index) }
else { ba.bits[leg_index] &= ~(1 << uint(bit_index)) }
return true
}
/*
In:
- ba: ^Bit_Array - a pointer to the Bit Array
- index: The bit index. Can be an enum member.
Sets the state of a bit in the bit-array
Out:
- ok: Whether or not we managed to unset requested bit.
*Bypasses all checks*
`unset` automatically resizes the Bit Array to accommodate the requested index if needed.
Inputs:
- ba: Pointer to the Bit_Array
- index: Which bit in the array
*/
unset :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator) -> (ok: bool) {
idx := int(index) - ba.bias
if ba == nil || int(index) < ba.bias { return false }
context.allocator = allocator
leg_index := idx >> INDEX_SHIFT
bit_index := idx & INDEX_MASK
resize_if_needed(ba, leg_index) or_return
ba.max_index = max(idx, ba.max_index)
ba.bits[leg_index] &= ~(1 << uint(bit_index))
return true
unsafe_set :: proc(ba: ^Bit_Array, bit: int) #no_bounds_check {
ba.bits[bit >> INDEX_SHIFT] |= 1 << uint(bit & INDEX_MASK)
}
/*
A helper function to create a Bit Array with optional bias, in case your smallest index is non-zero (including negative).
Unsets the state of a bit in the bit-array. (Convienence wrapper for `set`)
*Conditionally Allocates (Resizes backing data when `index > len(ba.bits)`)*
Inputs:
- ba: Pointer to the Bit_Array
- index: Which bit in the array
- allocator: (default is context.allocator)
Returns:
- ok: Whether the unset was successful, `false` on allocation failure or bad index
*/
create :: proc(max_index: int, min_index := 0, allocator := context.allocator) -> (res: ^Bit_Array, ok: bool) #optional_ok {
unset :: #force_inline proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator) -> (ok: bool) {
return set(ba, index, false, allocator)
}
/*
Unsets the state of a bit in the bit-array
*Bypasses all Checks*
Inputs:
- ba: Pointer to the Bit_Array
- index: Which bit in the array
*/
unsafe_unset :: proc(b: ^Bit_Array, bit: int) #no_bounds_check {
b.bits[bit >> INDEX_SHIFT] &= ~(1 << uint(bit & INDEX_MASK))
}
/*
A helper function to create a Bit Array with optional bias, in case your smallest index is non-zero (including negative).
*Allocates (`new(Bit_Array) & make(ba.bits)`)*
Inputs:
- max_index: maximum starting index
- min_index: minimum starting index (used as a bias)
- allocator: (default is context.allocator)
Returns:
- ba: Allocates a bit_Array, backing data is set to `max-min / 64` indices, rounded up (eg 65 - 0 allocates for [2]u64).
*/
create :: proc(max_index: int, min_index: int = 0, allocator := context.allocator) -> (res: ^Bit_Array, ok: bool) #optional_ok {
context.allocator = allocator
size_in_bits := max_index - min_index
if size_in_bits < 1 { return {}, false }
legs := size_in_bits >> INDEX_SHIFT
if size_in_bits & INDEX_MASK > 0 {legs+=1}
bits, err := make([dynamic]u64, legs)
ok = err == mem.Allocator_Error.None
res = new(Bit_Array)
res.bits = bits
res.bias = min_index
res.max_index = max_index
res.free_pointer = true
return res, resize_if_needed(res, legs)
return
}
/*
Sets all bits to `false`.
Sets all values in the Bit_Array to zero.
Inputs:
- ba: The target Bit_Array
*/
clear :: proc(ba: ^Bit_Array) {
if ba == nil { return }
mem.zero_slice(ba.bits[:])
}
/*
Releases the memory used by the Bit Array.
Deallocates the Bit_Array and its backing storage
Inputs:
- ba: The target Bit_Array
*/
destroy :: proc(ba: ^Bit_Array) {
if ba == nil { return }
@@ -248,9 +308,8 @@ destroy :: proc(ba: ^Bit_Array) {
free(ba)
}
}
/*
Resizes the Bit Array. For internal use.
Resizes the Bit Array. For internal use. Provisions needed capacity+1
If you want to reserve the memory for a given-sized Bit Array up front, you can use `create`.
*/
@(private="file")

View File

@@ -14,7 +14,7 @@ Queue :: struct($T: typeid) {
DEFAULT_CAPACITY :: 16
// Procedure to initialize a queue
init :: proc(q: ^$Q/Queue($T), capacity := DEFAULT_CAPACITY, allocator := context.allocator) -> bool {
init :: proc(q: ^$Q/Queue($T), capacity := DEFAULT_CAPACITY, allocator := context.allocator) -> runtime.Allocator_Error {
if q.data.allocator.procedure == nil {
q.data.allocator = allocator
}
@@ -55,11 +55,11 @@ space :: proc(q: $Q/Queue($T)) -> int {
}
// Reserve enough space for at least the specified capacity
reserve :: proc(q: ^$Q/Queue($T), capacity: int) -> bool {
reserve :: proc(q: ^$Q/Queue($T), capacity: int) -> runtime.Allocator_Error {
if uint(capacity) > q.len {
return _grow(q, uint(capacity))
}
return true
return nil
}
@@ -112,25 +112,25 @@ peek_back :: proc(q: ^$Q/Queue($T), loc := #caller_location) -> ^T {
}
// Push an element to the back of the queue
push_back :: proc(q: ^$Q/Queue($T), elem: T) -> bool {
push_back :: proc(q: ^$Q/Queue($T), elem: T) -> (ok: bool, err: runtime.Allocator_Error) {
if space(q^) == 0 {
_grow(q) or_return
}
idx := (q.offset+uint(q.len))%builtin.len(q.data)
q.data[idx] = elem
q.len += 1
return true
return true, nil
}
// Push an element to the front of the queue
push_front :: proc(q: ^$Q/Queue($T), elem: T) -> bool {
push_front :: proc(q: ^$Q/Queue($T), elem: T) -> (ok: bool, err: runtime.Allocator_Error) {
if space(q^) == 0 {
_grow(q) or_return
}
q.offset = uint(q.offset - 1 + builtin.len(q.data)) % builtin.len(q.data)
q.len += 1
q.data[q.offset] = elem
return true
return true, nil
}
@@ -173,7 +173,7 @@ pop_front_safe :: proc(q: ^$Q/Queue($T)) -> (elem: T, ok: bool) {
}
// Push multiple elements to the front of the queue
push_back_elems :: proc(q: ^$Q/Queue($T), elems: ..T) -> bool {
push_back_elems :: proc(q: ^$Q/Queue($T), elems: ..T) -> (ok: bool, err: runtime.Allocator_Error) {
n := uint(builtin.len(elems))
if space(q^) < int(n) {
_grow(q, q.len + n) or_return
@@ -188,7 +188,7 @@ push_back_elems :: proc(q: ^$Q/Queue($T), elems: ..T) -> bool {
copy(q.data[insert_from:], elems[:insert_to])
copy(q.data[:insert_from], elems[insert_to:])
q.len += n
return true
return true, nil
}
// Consume `n` elements from the front of the queue
@@ -225,7 +225,7 @@ clear :: proc(q: ^$Q/Queue($T)) {
// Internal growinh procedure
_grow :: proc(q: ^$Q/Queue($T), min_capacity: uint = 0) -> bool {
_grow :: proc(q: ^$Q/Queue($T), min_capacity: uint = 0) -> runtime.Allocator_Error {
new_capacity := max(min_capacity, uint(8), uint(builtin.len(q.data))*2)
n := uint(builtin.len(q.data))
builtin.resize(&q.data, int(new_capacity)) or_return
@@ -234,5 +234,5 @@ _grow :: proc(q: ^$Q/Queue($T), min_capacity: uint = 0) -> bool {
copy(q.data[new_capacity-diff:], q.data[q.offset:][:diff])
q.offset += new_capacity - n
}
return true
return nil
}

View File

@@ -9,14 +9,16 @@ package fiat
u1 :: distinct u8
i1 :: distinct i8
cmovznz_u64 :: #force_inline proc "contextless" (arg1: u1, arg2, arg3: u64) -> (out1: u64) {
@(optimization_mode="none")
cmovznz_u64 :: proc "contextless" (arg1: u1, arg2, arg3: u64) -> (out1: u64) {
x1 := (u64(arg1) * 0xffffffffffffffff)
x2 := ((x1 & arg3) | ((~x1) & arg2))
out1 = x2
return
}
cmovznz_u32 :: #force_inline proc "contextless" (arg1: u1, arg2, arg3: u32) -> (out1: u32) {
@(optimization_mode="none")
cmovznz_u32 :: proc "contextless" (arg1: u1, arg2, arg3: u32) -> (out1: u32) {
x1 := (u32(arg1) * 0xffffffff)
x2 := ((x1 & arg3) | ((~x1) & arg2))
out1 = x2

View File

@@ -305,7 +305,8 @@ fe_opp :: proc "contextless" (out1: ^Loose_Field_Element, arg1: ^Tight_Field_Ele
out1[4] = x5
}
fe_cond_assign :: proc "contextless" (out1, arg1: ^Tight_Field_Element, arg2: int) {
@(optimization_mode="none")
fe_cond_assign :: #force_no_inline proc "contextless" (out1, arg1: ^Tight_Field_Element, arg2: int) {
x1 := fiat.cmovznz_u64(fiat.u1(arg2), out1[0], arg1[0])
x2 := fiat.cmovznz_u64(fiat.u1(arg2), out1[1], arg1[1])
x3 := fiat.cmovznz_u64(fiat.u1(arg2), out1[2], arg1[2])
@@ -596,7 +597,8 @@ fe_set :: proc "contextless" (out1, arg1: ^Tight_Field_Element) {
out1[4] = x5
}
fe_cond_swap :: proc "contextless" (out1, out2: ^Tight_Field_Element, arg1: int) {
@(optimization_mode="none")
fe_cond_swap :: #force_no_inline proc "contextless" (out1, out2: ^Tight_Field_Element, arg1: int) {
mask := -u64(arg1)
x := (out1[0] ~ out2[0]) & mask
x1, y1 := out1[0] ~ x, out2[0] ~ x

View File

@@ -201,7 +201,8 @@ fe_opp :: proc "contextless" (out1: ^Loose_Field_Element, arg1: ^Tight_Field_Ele
out1[2] = x3
}
fe_cond_assign :: proc "contextless" (out1, arg1: ^Tight_Field_Element, arg2: bool) {
@(optimization_mode="none")
fe_cond_assign :: #force_no_inline proc "contextless" (out1, arg1: ^Tight_Field_Element, arg2: bool) {
x1 := fiat.cmovznz_u64(fiat.u1(arg2), out1[0], arg1[0])
x2 := fiat.cmovznz_u64(fiat.u1(arg2), out1[1], arg1[1])
x3 := fiat.cmovznz_u64(fiat.u1(arg2), out1[2], arg1[2])
@@ -342,7 +343,8 @@ fe_set :: #force_inline proc "contextless" (out1, arg1: ^Tight_Field_Element) {
out1[2] = x3
}
fe_cond_swap :: proc "contextless" (out1, out2: ^Tight_Field_Element, arg1: bool) {
@(optimization_mode="none")
fe_cond_swap :: #force_no_inline proc "contextless" (out1, out2: ^Tight_Field_Element, arg1: bool) {
mask := -u64(arg1)
x := (out1[0] ~ out2[0]) & mask
x1, y1 := out1[0] ~ x, out2[0] ~ x

View File

@@ -70,7 +70,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -149,7 +149,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -228,7 +228,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -307,7 +307,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -77,7 +77,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_blake2.update(&ctx, buf[:read])
}

View File

@@ -77,7 +77,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_blake2.update(&ctx, buf[:read])
}

View File

@@ -8,15 +8,23 @@ KEY_SIZE :: 32
NONCE_SIZE :: 12
XNONCE_SIZE :: 24
@(private)
_MAX_CTR_IETF :: 0xffffffff
@(private)
_BLOCK_SIZE :: 64
@(private)
_STATE_SIZE_U32 :: 16
@(private)
_ROUNDS :: 20
@(private)
_SIGMA_0 : u32 : 0x61707865
@(private)
_SIGMA_1 : u32 : 0x3320646e
@(private)
_SIGMA_2 : u32 : 0x79622d32
@(private)
_SIGMA_3 : u32 : 0x6b206574
Context :: struct {
@@ -179,6 +187,7 @@ reset :: proc (ctx: ^Context) {
ctx._is_initialized = false
}
@(private)
_do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
// Enforce the maximum consumed keystream per nonce.
//
@@ -441,6 +450,7 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
}
}
@(private)
_hchacha20 :: proc (dst, key, nonce: []byte) {
x0, x1, x2, x3 := _SIGMA_0, _SIGMA_1, _SIGMA_2, _SIGMA_3
x4 := util.U32_LE(key[0:4])

View File

@@ -10,8 +10,10 @@ KEY_SIZE :: chacha20.KEY_SIZE
NONCE_SIZE :: chacha20.NONCE_SIZE
TAG_SIZE :: poly1305.TAG_SIZE
@(private)
_P_MAX :: 64 * 0xffffffff // 64 * (2^32-1)
@(private)
_validate_common_slice_sizes :: proc (tag, key, nonce, aad, text: []byte) {
if len(tag) != TAG_SIZE {
panic("crypto/chacha20poly1305: invalid destination tag size")
@@ -37,7 +39,10 @@ _validate_common_slice_sizes :: proc (tag, key, nonce, aad, text: []byte) {
}
}
@(private)
_PAD: [16]byte
@(private)
_update_mac_pad16 :: #force_inline proc (ctx: ^poly1305.Context, x_len: int) {
if pad_len := 16 - (x_len & (16-1)); pad_len != 16 {
poly1305.update(ctx, _PAD[:pad_len])

View File

@@ -26,6 +26,7 @@ compare_constant_time :: proc "contextless" (a, b: []byte) -> int {
//
// The execution time of this routine is constant regardless of the
// contents of the memory being compared.
@(optimization_mode="none")
compare_byte_ptrs_constant_time :: proc "contextless" (a, b: ^byte, n: int) -> int {
x := mem.slice_ptr(a, n)
y := mem.slice_ptr(b, n)

View File

@@ -65,7 +65,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -70,7 +70,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -149,7 +149,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -228,7 +228,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -307,7 +307,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -79,7 +79,7 @@ hash_stream_128_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -164,7 +164,7 @@ hash_stream_128_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -249,7 +249,7 @@ hash_stream_128_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -334,7 +334,7 @@ hash_stream_160_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -419,7 +419,7 @@ hash_stream_160_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -504,7 +504,7 @@ hash_stream_160_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -589,7 +589,7 @@ hash_stream_192_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -674,7 +674,7 @@ hash_stream_192_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -759,7 +759,7 @@ hash_stream_192_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -844,7 +844,7 @@ hash_stream_224_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -929,7 +929,7 @@ hash_stream_224_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -1014,7 +1014,7 @@ hash_stream_224_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -1099,7 +1099,7 @@ hash_stream_256_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -1184,7 +1184,7 @@ hash_stream_256_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])
@@ -1270,7 +1270,7 @@ hash_stream_256_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
ctx.str_len = u32(len(buf[:read]))
if read > 0 {
update(&ctx, buf[:read])

View File

@@ -70,7 +70,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -149,7 +149,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -228,7 +228,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -307,7 +307,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -77,7 +77,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
@@ -159,7 +159,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
@@ -241,7 +241,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
@@ -323,7 +323,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}

View File

@@ -64,7 +64,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -68,7 +68,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -67,7 +67,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -8,6 +8,7 @@ import "core:mem"
KEY_SIZE :: 32
TAG_SIZE :: 16
@(private)
_BLOCK_SIZE :: 16
sum :: proc (dst, msg, key: []byte) {
@@ -141,6 +142,7 @@ reset :: proc (ctx: ^Context) {
ctx._is_initialized = false
}
@(private)
_blocks :: proc (ctx: ^Context, msg: []byte, final := false) {
n: field.Tight_Field_Element = ---
final_byte := byte(!final)

View File

@@ -12,7 +12,7 @@ _rand_bytes :: proc (dst: []byte) {
for l > 0 {
to_read := min(l, _MAX_PER_CALL_BYTES)
ret := unix.sys_getrandom(raw_data(dst), to_read, 0)
ret := unix.sys_getrandom(raw_data(dst), uint(to_read), 0)
if ret < 0 {
switch os.Errno(-ret) {
case os.EINTR:

View File

@@ -69,7 +69,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -145,7 +145,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -221,7 +221,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -297,7 +297,7 @@ hash_stream_320 :: proc(s: io.Stream) -> ([DIGEST_SIZE_320]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -67,7 +67,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -74,7 +74,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -153,7 +153,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -232,7 +232,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -311,7 +311,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -73,7 +73,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
@@ -152,7 +152,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
@@ -231,7 +231,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
@@ -310,7 +310,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}

View File

@@ -73,7 +73,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}
@@ -155,7 +155,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_sha3.update(&ctx, buf[:read])
}

View File

@@ -66,7 +66,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -70,7 +70,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}
@@ -146,7 +146,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -71,7 +71,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_tiger.update(&ctx, buf[:read])
}
@@ -150,7 +150,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_tiger.update(&ctx, buf[:read])
}
@@ -229,7 +229,7 @@ hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_tiger.update(&ctx, buf[:read])
}

View File

@@ -71,7 +71,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_tiger.update(&ctx, buf[:read])
}
@@ -150,7 +150,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_tiger.update(&ctx, buf[:read])
}
@@ -229,7 +229,7 @@ hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
_tiger.update(&ctx, buf[:read])
}

View File

@@ -11,6 +11,8 @@ package util
*/
import "core:mem"
// Keep vet happy
_ :: mem
// @note(bp): this can replace the other two
cast_slice :: #force_inline proc "contextless" ($D: typeid/[]$DE, src: $S/[]$SE) -> D {

View File

@@ -66,7 +66,7 @@ hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
defer delete(buf)
read := 1
for read > 0 {
read, _ = s->impl_read(buf)
read, _ = io.read(s, buf)
if read > 0 {
update(&ctx, buf[:read])
}

View File

@@ -6,8 +6,10 @@ import "core:mem"
SCALAR_SIZE :: 32
POINT_SIZE :: 32
@(private)
_BASE_POINT: [32]byte = {9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
@(private)
_scalar_bit :: #force_inline proc "contextless" (s: ^[32]byte, i: int) -> u8 {
if i < 0 {
return 0
@@ -15,6 +17,7 @@ _scalar_bit :: #force_inline proc "contextless" (s: ^[32]byte, i: int) -> u8 {
return (s[i>>3] >> uint(i&7)) & 1
}
@(private)
_scalarmult :: proc (out, scalar, point: ^[32]byte) {
// Montgomery pseduo-multiplication taken from Monocypher.

View File

@@ -1,8 +1,5 @@
package debug_pe
import "core:runtime"
import "core:io"
Section_Header32 :: struct {
name: [8]u8,
virtual_size: u32le,

7
core/dynlib/doc.odin Normal file
View File

@@ -0,0 +1,7 @@
/*
Package core:dynlib implements loading of shared libraries/DLLs and their symbols.
The behaviour of dynamically loaded libraries is specific to the target platform of the program.
For in depth detail on the underlying behaviour please refer to your target platform's documentation.
*/
package dynlib

View File

@@ -1,15 +1,94 @@
package dynlib
/*
A handle to a dynamically loaded library.
*/
Library :: distinct rawptr
load_library :: proc(path: string, global_symbols := false) -> (Library, bool) {
/*
Loads a dynamic library from the filesystem. The paramater `global_symbols` makes the symbols in the loaded
library available to resolve references in subsequently loaded libraries.
The paramater `global_symbols` is only used for the platforms `linux`, `darwin`, `freebsd` and `openbsd`.
On `windows` this paramater is ignored.
The underlying behaviour is platform specific.
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlopen`.
On `windows` refer to `LoadLibraryW`.
**Implicit Allocators**
`context.temp_allocator`
Example:
import "core:dynlib"
import "core:fmt"
load_my_library :: proc() {
LIBRARY_PATH :: "my_library.dll"
library, ok := dynlib.load_library(LIBRARY_PATH)
if ! ok {
return
}
fmt.println("The library %q was successfully loaded", LIBRARY_PATH)
}
*/
load_library :: proc(path: string, global_symbols := false) -> (library: Library, did_load: bool) {
return _load_library(path, global_symbols)
}
unload_library :: proc(library: Library) -> bool {
/*
Unloads a dynamic library.
The underlying behaviour is platform specific.
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlclose`.
On `windows` refer to `FreeLibrary`.
Example:
import "core:dynlib"
import "core:fmt"
load_then_unload_my_library :: proc() {
LIBRARY_PATH :: "my_library.dll"
library, ok := dynlib.load_library(LIBRARY_PATH)
if ! ok {
return
}
did_unload := dynlib.unload_library(library)
if ! did_unload {
return
}
fmt.println("The library %q was successfully unloaded", LIBRARY_PATH)
}
*/
unload_library :: proc(library: Library) -> (did_unload: bool) {
return _unload_library(library)
}
/*
Loads the address of a procedure/variable from a dynamic library.
The underlying behaviour is platform specific.
On `linux`, `darwin`, `freebsd` and `openbsd` refer to `dlsym`.
On `windows` refer to `GetProcAddress`.
**Implicit Allocators**
`context.temp_allocator`
Example:
import "core:dynlib"
import "core:fmt"
find_a_in_my_library :: proc() {
LIBRARY_PATH :: "my_library.dll"
library, ok := dynlib.load_library(LIBRARY_PATH)
if ! ok {
return
}
a, found_a := dynlib.symbol_address(library, "a")
if found_a do fmt.printf("The symbol %q was found at the address %v", "a", a)
}
*/
symbol_address :: proc(library: Library, symbol: string) -> (ptr: rawptr, found: bool) #optional_ok {
return _symbol_address(library, symbol)
}

15
core/dynlib/lib_js.odin Normal file
View File

@@ -0,0 +1,15 @@
//+build js
//+private
package dynlib
_load_library :: proc(path: string, global_symbols := false) -> (Library, bool) {
return nil, false
}
_unload_library :: proc(library: Library) -> bool {
return false
}
_symbol_address :: proc(library: Library, symbol: string) -> (ptr: rawptr, found: bool) {
return nil, false
}

View File

@@ -4,10 +4,12 @@ package dynlib
import win32 "core:sys/windows"
import "core:strings"
import "core:runtime"
_load_library :: proc(path: string, global_symbols := false) -> (Library, bool) {
// NOTE(bill): 'global_symbols' is here only for consistency with POSIX which has RTLD_GLOBAL
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
wide_path := win32.utf8_to_wstring(path, context.temp_allocator)
handle := cast(Library)win32.LoadLibraryW(wide_path)
return handle, handle != nil
@@ -19,6 +21,7 @@ _unload_library :: proc(library: Library) -> bool {
}
_symbol_address :: proc(library: Library, symbol: string) -> (ptr: rawptr, found: bool) {
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
c_str := strings.clone_to_cstring(symbol, context.temp_allocator)
ptr = win32.GetProcAddress(cast(win32.HMODULE)library, c_str)
found = ptr != nil

View File

@@ -42,7 +42,7 @@ write :: proc(w: ^Writer, record: []string) -> io.Error {
}
}
case:
if strings.contains_rune(field, w.comma) >= 0 {
if strings.contains_rune(field, w.comma) {
return true
}
if strings.contains_any(field, CHAR_SET) {

View File

@@ -0,0 +1,73 @@
package hex
import "core:strings"
encode :: proc(src: []byte, allocator := context.allocator) -> []byte #no_bounds_check {
dst := make([]byte, len(src) * 2, allocator)
for i, j := 0, 0; i < len(src); i += 1 {
v := src[i]
dst[j] = HEXTABLE[v>>4]
dst[j+1] = HEXTABLE[v&0x0f]
j += 2
}
return dst
}
decode :: proc(src: []byte, allocator := context.allocator) -> (dst: []byte, ok: bool) #no_bounds_check {
if len(src) % 2 == 1 {
return
}
dst = make([]byte, len(src) / 2, allocator)
for i, j := 0, 1; j < len(src); j += 2 {
p := src[j-1]
q := src[j]
a := hex_digit(p) or_return
b := hex_digit(q) or_return
dst[i] = (a << 4) | b
i += 1
}
return dst, true
}
// Decodes the given sequence into one byte.
// Should be called with one byte worth of the source, eg: 0x23 -> '#'.
decode_sequence :: proc(str: string) -> (res: byte, ok: bool) {
str := str
if strings.has_prefix(str, "0x") || strings.has_prefix(str, "0X") {
str = str[2:]
}
if len(str) != 2 {
return 0, false
}
upper := hex_digit(str[0]) or_return
lower := hex_digit(str[1]) or_return
return upper << 4 | lower, true
}
@(private)
HEXTABLE := [16]byte {
'0', '1', '2', '3',
'4', '5', '6', '7',
'8', '9', 'a', 'b',
'c', 'd', 'e', 'f',
}
@(private)
hex_digit :: proc(char: byte) -> (u8, bool) {
switch char {
case '0' ..= '9': return char - '0', true
case 'a' ..= 'f': return char - 'a' + 10, true
case 'A' ..= 'F': return char - 'A' + 10, true
case: return 0, false
}
}

View File

@@ -153,7 +153,7 @@ marshal_to_writer :: proc(w: io.Writer, v: any, opt: ^Marshal_Options) -> (err:
case complex128: r, i = f64(real(z)), f64(imag(z))
case: return .Unsupported_Type
}
io.write_byte(w, '[') or_return
io.write_f64(w, r) or_return
io.write_string(w, ", ") or_return
@@ -165,8 +165,8 @@ marshal_to_writer :: proc(w: io.Writer, v: any, opt: ^Marshal_Options) -> (err:
case runtime.Type_Info_String:
switch s in a {
case string: io.write_quoted_string(w, s) or_return
case cstring: io.write_quoted_string(w, string(s)) or_return
case string: io.write_quoted_string(w, s, '"', nil, true) or_return
case cstring: io.write_quoted_string(w, string(s), '"', nil, true) or_return
}
case runtime.Type_Info_Boolean:
@@ -198,7 +198,7 @@ marshal_to_writer :: proc(w: io.Writer, v: any, opt: ^Marshal_Options) -> (err:
case runtime.Type_Info_Procedure:
return .Unsupported_Type
case runtime.Type_Info_Tuple:
case runtime.Type_Info_Parameters:
return .Unsupported_Type
case runtime.Type_Info_Simd_Vector:
@@ -262,10 +262,14 @@ marshal_to_writer :: proc(w: io.Writer, v: any, opt: ^Marshal_Options) -> (err:
}
map_cap := uintptr(runtime.map_cap(m^))
ks, vs, hs, _, _ := runtime.map_kvh_data_dynamic(m^, info.map_info)
i := 0
for bucket_index in 0..<map_cap {
if !runtime.map_hash_is_valid(hs[bucket_index]) {
continue
}
opt_write_iteration(w, opt, i) or_return
i += 1
key := rawptr(runtime.map_cell_index_dynamic(ks, info.map_info.ks, bucket_index))
value := rawptr(runtime.map_cell_index_dynamic(vs, info.map_info.vs, bucket_index))
@@ -437,7 +441,7 @@ opt_write_start :: proc(w: io.Writer, opt: ^Marshal_Options, c: byte) -> (err: i
return
}
// insert comma seperation and write indentations
// insert comma separation and write indentations
opt_write_iteration :: proc(w: io.Writer, opt: ^Marshal_Options, iteration: int) -> (err: io.Error) {
switch opt.spec {
case .JSON, .JSON5:
@@ -457,7 +461,7 @@ opt_write_iteration :: proc(w: io.Writer, opt: ^Marshal_Options, iteration: int)
if opt.pretty {
io.write_byte(w, '\n') or_return
} else {
// comma seperation necessary for non pretty output!
// comma separation necessary for non pretty output!
io.write_string(w, ", ") or_return
}
}

View File

@@ -2,6 +2,7 @@ package json
import "core:mem"
import "core:unicode/utf8"
import "core:unicode/utf16"
import "core:strconv"
Parser :: struct {
@@ -403,11 +404,19 @@ unquote_string :: proc(token: Token, spec: Specification, allocator := context.a
}
i += 6
// If this is a surrogate pair, decode as such by taking the next rune too.
if r >= utf8.SURROGATE_MIN && r <= utf8.SURROGATE_HIGH_MAX && len(s) > i + 2 && s[i:i+2] == "\\u" {
r2 := get_u4_rune(s[i:])
if r2 >= utf8.SURROGATE_LOW_MIN && r2 <= utf8.SURROGATE_MAX {
i += 6
r = utf16.decode_surrogate_pair(r, r2)
}
}
buf, buf_width := utf8.encode_rune(r)
copy(b[w:], buf[:buf_width])
w += buf_width
case '0':
if spec != .JSON {
b[w] = '\x00'

View File

@@ -163,8 +163,9 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
skip_alphanum :: proc(t: ^Tokenizer) {
for t.offset < len(t.data) {
switch next_rune(t) {
switch t.r {
case 'A'..='Z', 'a'..='z', '0'..='9', '_':
next_rune(t)
continue
}

View File

@@ -87,7 +87,8 @@ Error :: enum {
destroy_value :: proc(value: Value) {
destroy_value :: proc(value: Value, allocator := context.allocator) {
context.allocator = allocator
#partial switch v in value {
case Object:
for key, elem in v {
@@ -103,5 +104,4 @@ destroy_value :: proc(value: Value) {
case String:
delete(v)
}
}
}

View File

@@ -215,6 +215,12 @@ unmarshal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
}
}
switch dst in &v {
// Handle json.Value as an unknown type
case Value:
dst = parse_value(p) or_return
return
}
#partial switch token.kind {
case .Null:
@@ -346,6 +352,8 @@ unmarshal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unm
fields := reflect.struct_fields_zipped(ti.id)
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD(ignore = context.temp_allocator == context.allocator)
field_used := make([]bool, len(fields), context.temp_allocator)
use_field_idx := -1

View File

@@ -33,6 +33,7 @@ import "core:intrinsics"
import "core:mem"
import "core:os"
import "core:strings"
import "core:runtime"
likely :: intrinsics.expect
@@ -408,7 +409,7 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
next := scan(t)
#partial switch next.kind {
case .Ident:
if len(next.text) == 3 && strings.to_lower(next.text, context.temp_allocator) == "xml" {
if len(next.text) == 3 && strings.equal_fold(next.text, "xml") {
parse_prologue(doc) or_return
} else if len(doc.prologue) > 0 {
/*
@@ -614,6 +615,7 @@ parse_prologue :: proc(doc: ^Document) -> (err: Error) {
}
case "encoding":
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
switch strings.to_lower(attr.val, context.temp_allocator) {
case "utf-8", "utf8":
doc.encoding = .UTF_8

View File

@@ -68,7 +68,7 @@ A period with no following number specifies a precision of 0.
Examples:
%f default width, default precision
%8f width 8, default precision
%.3f default width, precision 2
%.2f default width, precision 2
%8.3f width 8, precision 3
%8.f width 8, precision 0

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@ foreign import "odin_env"
@(private="file")
foreign odin_env {
write :: proc "c" (fd: u32, p: []byte) ---
write :: proc "contextless" (fd: u32, p: []byte) ---
}
@(private="file")

View File

@@ -4,42 +4,72 @@ package fmt
import "core:runtime"
import "core:os"
import "core:io"
import "core:bufio"
// fprint formats using the default print settings and writes to fd
fprint :: proc(fd: os.Handle, args: ..any, sep := " ") -> int {
w := io.to_writer(os.stream_from_handle(fd))
return wprint(w=w, args=args, sep=sep)
buf: [1024]byte
b: bufio.Writer
defer bufio.writer_flush(&b)
bufio.writer_init_with_buf(&b, os.stream_from_handle(fd), buf[:])
w := bufio.writer_to_writer(&b)
return wprint(w, ..args, sep=sep)
}
// fprintln formats using the default print settings and writes to fd
fprintln :: proc(fd: os.Handle, args: ..any, sep := " ") -> int {
w := io.to_writer(os.stream_from_handle(fd))
return wprintln(w=w, args=args, sep=sep)
buf: [1024]byte
b: bufio.Writer
defer bufio.writer_flush(&b)
bufio.writer_init_with_buf(&b, os.stream_from_handle(fd), buf[:])
w := bufio.writer_to_writer(&b)
return wprintln(w, ..args, sep=sep)
}
// fprintf formats according to the specified format string and writes to fd
fprintf :: proc(fd: os.Handle, fmt: string, args: ..any) -> int {
w := io.to_writer(os.stream_from_handle(fd))
buf: [1024]byte
b: bufio.Writer
defer bufio.writer_flush(&b)
bufio.writer_init_with_buf(&b, os.stream_from_handle(fd), buf[:])
w := bufio.writer_to_writer(&b)
return wprintf(w, fmt, ..args)
}
fprint_type :: proc(fd: os.Handle, info: ^runtime.Type_Info) -> (n: int, err: io.Error) {
w := io.to_writer(os.stream_from_handle(fd))
buf: [1024]byte
b: bufio.Writer
defer bufio.writer_flush(&b)
bufio.writer_init_with_buf(&b, os.stream_from_handle(fd), buf[:])
w := bufio.writer_to_writer(&b)
return wprint_type(w, info)
}
fprint_typeid :: proc(fd: os.Handle, id: typeid) -> (n: int, err: io.Error) {
w := io.to_writer(os.stream_from_handle(fd))
buf: [1024]byte
b: bufio.Writer
defer bufio.writer_flush(&b)
bufio.writer_init_with_buf(&b, os.stream_from_handle(fd), buf[:])
w := bufio.writer_to_writer(&b)
return wprint_typeid(w, id)
}
// print formats using the default print settings and writes to os.stdout
print :: proc(args: ..any, sep := " ") -> int { return fprint(fd=os.stdout, args=args, sep=sep) }
print :: proc(args: ..any, sep := " ") -> int { return fprint(os.stdout, ..args, sep=sep) }
// println formats using the default print settings and writes to os.stdout
println :: proc(args: ..any, sep := " ") -> int { return fprintln(fd=os.stdout, args=args, sep=sep) }
println :: proc(args: ..any, sep := " ") -> int { return fprintln(os.stdout, ..args, sep=sep) }
// printf formats according to the specified format string and writes to os.stdout
printf :: proc(fmt: string, args: ..any) -> int { return fprintf(os.stdout, fmt, ..args) }
// eprint formats using the default print settings and writes to os.stderr
eprint :: proc(args: ..any, sep := " ") -> int { return fprint(fd=os.stderr, args=args, sep=sep) }
eprint :: proc(args: ..any, sep := " ") -> int { return fprint(os.stderr, ..args, sep=sep) }
// eprintln formats using the default print settings and writes to os.stderr
eprintln :: proc(args: ..any, sep := " ") -> int { return fprintln(fd=os.stderr, args=args, sep=sep) }
eprintln :: proc(args: ..any, sep := " ") -> int { return fprintln(os.stderr, ..args, sep=sep) }
// eprintf formats according to the specified format string and writes to os.stderr
eprintf :: proc(fmt: string, args: ..any) -> int { return fprintf(os.stderr, fmt, ..args) }

View File

@@ -118,7 +118,7 @@ XXH_mul_64_to_128_fold_64 :: #force_inline proc(lhs, rhs: xxh_u64) -> (res: xxh_
}
@(optimization_mode="speed")
XXH_xorshift_64 :: #force_inline proc(v: xxh_u64, auto_cast shift: uint) -> (res: xxh_u64) {
XXH_xorshift_64 :: #force_inline proc(v: xxh_u64, #any_int shift: uint) -> (res: xxh_u64) {
return v ~ (v >> shift)
}

View File

@@ -634,7 +634,7 @@ alpha_add_if_missing :: proc(img: ^Image, alpha_key := Alpha_Key{}, allocator :=
buf := bytes.Buffer{}
// Can we allocate the return buffer?
if !resize(&buf.buf, bytes_wanted) {
if resize(&buf.buf, bytes_wanted) != nil {
delete(buf.buf)
return false
}
@@ -826,7 +826,7 @@ alpha_drop_if_present :: proc(img: ^Image, options := Options{}, alpha_key := Al
buf := bytes.Buffer{}
// Can we allocate the return buffer?
if !resize(&buf.buf, bytes_wanted) {
if resize(&buf.buf, bytes_wanted) != nil {
delete(buf.buf)
return false
}
@@ -1075,7 +1075,7 @@ apply_palette_rgb :: proc(img: ^Image, palette: [256]RGB_Pixel, allocator := con
// Can we allocate the return buffer?
buf := bytes.Buffer{}
bytes_wanted := compute_buffer_size(img.width, img.height, 3, 8)
if !resize(&buf.buf, bytes_wanted) {
if resize(&buf.buf, bytes_wanted) != nil {
delete(buf.buf)
return false
}
@@ -1112,7 +1112,7 @@ apply_palette_rgba :: proc(img: ^Image, palette: [256]RGBA_Pixel, allocator := c
// Can we allocate the return buffer?
buf := bytes.Buffer{}
bytes_wanted := compute_buffer_size(img.width, img.height, 4, 8)
if !resize(&buf.buf, bytes_wanted) {
if resize(&buf.buf, bytes_wanted) != nil {
delete(buf.buf)
return false
}
@@ -1147,7 +1147,7 @@ expand_grayscale :: proc(img: ^Image, allocator := context.allocator) -> (ok: bo
// Can we allocate the return buffer?
buf := bytes.Buffer{}
bytes_wanted := compute_buffer_size(img.width, img.height, img.channels + 2, img.depth)
if !resize(&buf.buf, bytes_wanted) {
if resize(&buf.buf, bytes_wanted) != nil {
delete(buf.buf)
return false
}

View File

@@ -1,6 +1,48 @@
package image
import "core:os"
import "core:mem"
import "core:bytes"
Loader_Proc :: #type proc(data: []byte, options: Options, allocator: mem.Allocator) -> (img: ^Image, err: Error)
Destroy_Proc :: #type proc(img: ^Image)
@(private)
_internal_loaders: [Which_File_Type]Loader_Proc
_internal_destroyers: [Which_File_Type]Destroy_Proc
register :: proc(kind: Which_File_Type, loader: Loader_Proc, destroyer: Destroy_Proc) {
assert(loader != nil)
assert(destroyer != nil)
assert(_internal_loaders[kind] == nil)
_internal_loaders[kind] = loader
assert(_internal_destroyers[kind] == nil)
_internal_destroyers[kind] = destroyer
}
load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
loader := _internal_loaders[which(data)]
if loader == nil {
return nil, .Unsupported_Format
}
return loader(data, options, allocator)
}
destroy :: proc(img: ^Image, allocator := context.allocator) {
if img == nil {
return
}
context.allocator = allocator
destroyer := _internal_destroyers[img.which]
if destroyer != nil {
destroyer(img)
} else {
assert(img.metadata == nil)
bytes.buffer_destroy(&img.pixels)
free(img)
}
}
Which_File_Type :: enum {
Unknown,
@@ -28,11 +70,6 @@ Which_File_Type :: enum {
XBM, // X BitMap
}
which :: proc{
which_bytes,
which_file,
}
which_bytes :: proc(data: []byte) -> Which_File_Type {
test_tga :: proc(s: string) -> bool {
get8 :: #force_inline proc(s: ^string) -> u8 {
@@ -164,16 +201,3 @@ which_bytes :: proc(data: []byte) -> Which_File_Type {
}
return .Unknown
}
which_file :: proc(path: string) -> Which_File_Type {
f, err := os.open(path)
if err != 0 {
return .Unknown
}
header: [128]byte
os.read(f, header[:])
file_type := which_bytes(header[:])
os.close(f)
return file_type
}

View File

@@ -0,0 +1,10 @@
//+build js
package image
load :: proc{
load_from_bytes,
}
which :: proc{
which_bytes,
}

View File

@@ -1,61 +0,0 @@
package image
import "core:mem"
import "core:os"
import "core:bytes"
Loader_Proc :: #type proc(data: []byte, options: Options, allocator: mem.Allocator) -> (img: ^Image, err: Error)
Destroy_Proc :: #type proc(img: ^Image)
@(private)
_internal_loaders: [Which_File_Type]Loader_Proc
_internal_destroyers: [Which_File_Type]Destroy_Proc
register :: proc(kind: Which_File_Type, loader: Loader_Proc, destroyer: Destroy_Proc) {
assert(loader != nil)
assert(destroyer != nil)
assert(_internal_loaders[kind] == nil)
_internal_loaders[kind] = loader
assert(_internal_destroyers[kind] == nil)
_internal_destroyers[kind] = destroyer
}
load :: proc{
load_from_bytes,
load_from_file,
}
load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
loader := _internal_loaders[which(data)]
if loader == nil {
return nil, .Unsupported_Format
}
return loader(data, options, allocator)
}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
data, ok := os.read_entire_file(filename, allocator)
defer delete(data, allocator)
if ok {
return load_from_bytes(data, options, allocator)
} else {
return nil, .Unable_To_Read_File
}
}
destroy :: proc(img: ^Image, allocator := context.allocator) {
if img == nil {
return
}
context.allocator = allocator
destroyer := _internal_destroyers[img.which]
if destroyer != nil {
destroyer(img)
} else {
assert(img.metadata == nil)
bytes.buffer_destroy(&img.pixels)
free(img)
}
}

View File

@@ -0,0 +1,38 @@
//+build !js
package image
import "core:os"
load :: proc{
load_from_bytes,
load_from_file,
}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
data, ok := os.read_entire_file(filename, allocator)
defer delete(data, allocator)
if ok {
return load_from_bytes(data, options, allocator)
} else {
return nil, .Unable_To_Read_File
}
}
which :: proc{
which_bytes,
which_file,
}
which_file :: proc(path: string) -> Which_File_Type {
f, err := os.open(path)
if err != 0 {
return .Unknown
}
header: [128]byte
os.read(f, header[:])
file_type := which_bytes(header[:])
os.close(f)
return file_type
}

View File

@@ -4,10 +4,10 @@ import "core:bytes"
import "core:fmt"
import "core:image"
import "core:mem"
import "core:os"
import "core:strconv"
import "core:strings"
import "core:unicode"
import "core:runtime"
Image :: image.Image
Format :: image.Netpbm_Format
@@ -26,23 +26,6 @@ PFM :: Formats{.Pf, .PF}
ASCII :: Formats{.P1, .P2, .P3}
BINARY :: Formats{.P4, .P5, .P6} + PAM + PFM
load :: proc {
load_from_file,
load_from_bytes,
}
load_from_file :: proc(filename: string, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename); defer delete(data)
if !ok {
err = .Unable_To_Read_File
return
}
return load_from_bytes(data)
}
load_from_bytes :: proc(data: []byte, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
@@ -66,24 +49,6 @@ load_from_bytes :: proc(data: []byte, allocator := context.allocator) -> (img: ^
return img, nil
}
save :: proc {
save_to_file,
save_to_buffer,
}
save_to_file :: proc(filename: string, img: ^Image, custom_info: Info = {}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
data: []byte; defer delete(data)
data = save_to_buffer(img, custom_info) or_return
if ok := os.write_entire_file(filename, data); !ok {
return .Unable_To_Write_File
}
return Format_Error.None
}
save_to_buffer :: proc(img: ^Image, custom_info: Info = {}, allocator := context.allocator) -> (buffer: []byte, err: Error) {
context.allocator = allocator
@@ -407,6 +372,8 @@ _parse_header_pam :: proc(data: []byte, allocator := context.allocator) -> (head
}
length = header_end_index + len(HEADER_END)
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD(ignore = context.temp_allocator == allocator)
// string buffer for the tupltype
tupltype: strings.Builder
strings.builder_init(&tupltype, context.temp_allocator); defer strings.builder_destroy(&tupltype)

View File

@@ -0,0 +1,10 @@
//+build js
package netpbm
load :: proc {
load_from_bytes,
}
save :: proc {
save_to_buffer,
}

View File

@@ -0,0 +1,41 @@
//+build !js
package netpbm
import "core:os"
load :: proc {
load_from_file,
load_from_bytes,
}
load_from_file :: proc(filename: string, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename); defer delete(data)
if !ok {
err = .Unable_To_Read_File
return
}
return load_from_bytes(data)
}
save :: proc {
save_to_file,
save_to_buffer,
}
save_to_file :: proc(filename: string, img: ^Image, custom_info: Info = {}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
data: []byte; defer delete(data)
data = save_to_buffer(img, custom_info) or_return
if ok := os.write_entire_file(filename, data); !ok {
return .Unable_To_Write_File
}
return Format_Error.None
}

View File

@@ -16,6 +16,7 @@ import coretime "core:time"
import "core:strings"
import "core:bytes"
import "core:mem"
import "core:runtime"
/*
Cleanup of image-specific data.
@@ -91,12 +92,14 @@ core_time :: proc(c: image.PNG_Chunk) -> (t: coretime.Time, ok: bool) {
}
text :: proc(c: image.PNG_Chunk) -> (res: Text, ok: bool) {
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD(ignore = context.temp_allocator == context.allocator)
assert(len(c.data) == int(c.header.length))
#partial switch c.header.type {
case .tEXt:
ok = true
fields := bytes.split(s=c.data, sep=[]u8{0}, allocator=context.temp_allocator)
fields := bytes.split(c.data, sep=[]u8{0}, allocator=context.temp_allocator)
if len(fields) == 2 {
res.keyword = strings.clone(string(fields[0]))
res.text = strings.clone(string(fields[1]))
@@ -107,7 +110,7 @@ text :: proc(c: image.PNG_Chunk) -> (res: Text, ok: bool) {
case .zTXt:
ok = true
fields := bytes.split_n(s=c.data, sep=[]u8{0}, n=3, allocator=context.temp_allocator)
fields := bytes.split_n(c.data, sep=[]u8{0}, n=3, allocator=context.temp_allocator)
if len(fields) != 3 || len(fields[1]) != 0 {
// Compression method must be 0=Deflate, which thanks to the split above turns
// into an empty slice
@@ -194,18 +197,18 @@ text_destroy :: proc(text: Text) {
}
iccp :: proc(c: image.PNG_Chunk) -> (res: iCCP, ok: bool) {
ok = true
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD(ignore = context.temp_allocator == context.allocator)
fields := bytes.split_n(s=c.data, sep=[]u8{0}, n=3, allocator=context.temp_allocator)
fields := bytes.split_n(c.data, sep=[]u8{0}, n=3, allocator=context.temp_allocator)
if len(fields[0]) < 1 || len(fields[0]) > 79 {
// Invalid profile name
ok = false; return
return
}
if len(fields[1]) != 0 {
// Compression method should be a zero, which the split turned into an empty slice.
ok = false; return
return
}
// Set up ZLIB context and decompress iCCP payload
@@ -213,12 +216,12 @@ iccp :: proc(c: image.PNG_Chunk) -> (res: iCCP, ok: bool) {
zlib_error := zlib.inflate_from_byte_array(fields[2], &buf)
if zlib_error != nil {
bytes.buffer_destroy(&buf)
ok = false; return
return
}
res.name = strings.clone(string(fields[0]))
res.profile = bytes.buffer_to_bytes(&buf)
ok = true
return
}
@@ -256,18 +259,18 @@ plte :: proc(c: image.PNG_Chunk) -> (res: PLTE, ok: bool) {
splt :: proc(c: image.PNG_Chunk) -> (res: sPLT, ok: bool) {
if c.header.type != .sPLT {
return {}, false
return
}
ok = true
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD(ignore = context.temp_allocator == context.allocator)
fields := bytes.split_n(s=c.data, sep=[]u8{0}, n=2, allocator=context.temp_allocator)
fields := bytes.split_n(c.data, sep=[]u8{0}, n=2, allocator=context.temp_allocator)
if len(fields) != 2 {
return {}, false
return
}
res.depth = fields[1][0]
if res.depth != 8 && res.depth != 16 {
return {}, false
return
}
data := fields[1][1:]
@@ -275,21 +278,21 @@ splt :: proc(c: image.PNG_Chunk) -> (res: sPLT, ok: bool) {
if res.depth == 8 {
if len(data) % 6 != 0 {
return {}, false
return
}
count = len(data) / 6
if count > 256 {
return {}, false
return
}
res.entries = mem.slice_data_cast([][4]u8, data)
} else { // res.depth == 16
if len(data) % 10 != 0 {
return {}, false
return
}
count = len(data) / 10
if count > 256 {
return {}, false
return
}
res.entries = mem.slice_data_cast([][4]u16, data)
@@ -297,7 +300,7 @@ splt :: proc(c: image.PNG_Chunk) -> (res: sPLT, ok: bool) {
res.name = strings.clone(string(fields[0]))
res.used = u16(count)
ok = true
return
}

View File

@@ -17,12 +17,12 @@ import "core:compress"
import "core:compress/zlib"
import "core:image"
import "core:os"
import "core:hash"
import "core:bytes"
import "core:io"
import "core:mem"
import "core:intrinsics"
import "core:runtime"
// Limit chunk sizes.
// By default: IDAT = 8k x 8k x 16-bits + 8k filter bytes.
@@ -335,19 +335,6 @@ load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context
return img, err
}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename)
defer delete(data)
if ok {
return load_from_bytes(data, options)
} else {
return nil, .Unable_To_Read_File
}
}
load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
options := options
@@ -744,7 +731,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
// We need to create a new image buffer
dest_raw_size := compute_buffer_size(int(header.width), int(header.height), out_image_channels, 8)
t := bytes.Buffer{}
if !resize(&t.buf, dest_raw_size) {
if resize(&t.buf, dest_raw_size) != nil {
return {}, .Unable_To_Allocate_Or_Resize
}
@@ -825,7 +812,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
// We need to create a new image buffer
dest_raw_size := compute_buffer_size(int(header.width), int(header.height), out_image_channels, 16)
t := bytes.Buffer{}
if !resize(&t.buf, dest_raw_size) {
if resize(&t.buf, dest_raw_size) != nil {
return {}, .Unable_To_Allocate_Or_Resize
}
@@ -1024,7 +1011,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
// We need to create a new image buffer
dest_raw_size := compute_buffer_size(int(header.width), int(header.height), out_image_channels, 8)
t := bytes.Buffer{}
if !resize(&t.buf, dest_raw_size) {
if resize(&t.buf, dest_raw_size) != nil {
return {}, .Unable_To_Allocate_Or_Resize
}
@@ -1247,6 +1234,8 @@ defilter_8 :: proc(params: ^Filter_Params) -> (ok: bool) {
// TODO: See about doing a Duff's #unroll where practicable
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
// Apron so we don't need to special case first rows.
up := make([]u8, row_stride, context.temp_allocator)
ok = true
@@ -1299,10 +1288,9 @@ defilter_8 :: proc(params: ^Filter_Params) -> (ok: bool) {
}
// @(optimization_mode="speed")
defilter_less_than_8 :: proc(params: ^Filter_Params) -> (ok: bool) #no_bounds_check {
defilter_less_than_8 :: proc(params: ^Filter_Params) -> bool #no_bounds_check {
using params
ok = true
row_stride_in := ((channels * width * depth) + 7) >> 3
row_stride_out := channels * width
@@ -1314,6 +1302,8 @@ defilter_less_than_8 :: proc(params: ^Filter_Params) -> (ok: bool) #no_bounds_ch
// TODO: See about doing a Duff's #unroll where practicable
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
// Apron so we don't need to special case first rows.
up := make([]u8, row_stride_out, context.temp_allocator)
@@ -1457,18 +1447,18 @@ defilter_less_than_8 :: proc(params: ^Filter_Params) -> (ok: bool) #no_bounds_ch
}
}
return
return true
}
// @(optimization_mode="speed")
defilter_16 :: proc(params: ^Filter_Params) -> (ok: bool) {
defilter_16 :: proc(params: ^Filter_Params) -> bool {
using params
ok = true
stride := channels * 2
row_stride := width * stride
runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
// TODO: See about doing a Duff's #unroll where practicable
// Apron so we don't need to special case first rows.
up := make([]u8, row_stride, context.temp_allocator)
@@ -1518,7 +1508,7 @@ defilter_16 :: proc(params: ^Filter_Params) -> (ok: bool) {
dest = dest[row_stride:]
}
return
return true
}
defilter :: proc(img: ^Image, filter_bytes: ^bytes.Buffer, header: ^image.PNG_IHDR, options: Options) -> (err: Error) {
@@ -1532,7 +1522,7 @@ defilter :: proc(img: ^Image, filter_bytes: ^bytes.Buffer, header: ^image.PNG_IH
bytes_per_channel := depth == 16 ? 2 : 1
num_bytes := compute_buffer_size(width, height, channels, depth == 16 ? 16 : 8)
if !resize(&img.pixels.buf, num_bytes) {
if resize(&img.pixels.buf, num_bytes) != nil {
return .Unable_To_Allocate_Or_Resize
}
@@ -1574,7 +1564,7 @@ defilter :: proc(img: ^Image, filter_bytes: ^bytes.Buffer, header: ^image.PNG_IH
if x > 0 && y > 0 {
temp: bytes.Buffer
temp_len := compute_buffer_size(x, y, channels, depth == 16 ? 16 : 8)
if !resize(&temp.buf, temp_len) {
if resize(&temp.buf, temp_len) != nil {
return .Unable_To_Allocate_Or_Resize
}
@@ -1637,8 +1627,6 @@ defilter :: proc(img: ^Image, filter_bytes: ^bytes.Buffer, header: ^image.PNG_IH
return nil
}
load :: proc{load_from_file, load_from_bytes, load_from_context}
@(init, private)
_register :: proc() {

View File

@@ -0,0 +1,4 @@
//+build js
package png
load :: proc{load_from_bytes, load_from_context}

View File

@@ -0,0 +1,19 @@
//+build !js
package png
import "core:os"
load :: proc{load_from_file, load_from_bytes, load_from_context}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename)
defer delete(data)
if ok {
return load_from_bytes(data, options)
} else {
return nil, .Unable_To_Read_File
}
}

View File

@@ -15,7 +15,6 @@ package qoi
import "core:image"
import "core:compress"
import "core:bytes"
import "core:os"
Error :: image.Error
Image :: image.Image
@@ -24,7 +23,7 @@ Options :: image.Options
RGB_Pixel :: image.RGB_Pixel
RGBA_Pixel :: image.RGBA_Pixel
save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
save_to_buffer :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
if img == nil {
@@ -54,7 +53,7 @@ save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}
// Calculate and allocate maximum size. We'll reclaim space to actually written output at the end.
max_size := pixels * (img.channels + 1) + size_of(image.QOI_Header) + size_of(u64be)
if !resize(&output.buf, max_size) {
if resize(&output.buf, max_size) != nil {
return .Unable_To_Allocate_Or_Resize
}
@@ -166,20 +165,6 @@ save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}
return nil
}
save_to_file :: proc(output: string, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
out := &bytes.Buffer{}
defer bytes.buffer_destroy(out)
save_to_memory(out, img, options) or_return
write_ok := os.write_entire_file(output, out.buf[:])
return nil if write_ok else .Unable_To_Write_File
}
save :: proc{save_to_memory, save_to_file}
load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
ctx := &compress.Context_Memory_Input{
input_data = data,
@@ -189,19 +174,6 @@ load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context
return img, err
}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename)
defer delete(data)
if ok {
return load_from_bytes(data, options)
} else {
return nil, .Unable_To_Read_File
}
}
@(optimization_mode="speed")
load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
@@ -261,7 +233,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
bytes_needed := image.compute_buffer_size(int(header.width), int(header.height), img.channels, 8)
if !resize(&img.pixels.buf, bytes_needed) {
if resize(&img.pixels.buf, bytes_needed) != nil {
return img, .Unable_To_Allocate_Or_Resize
}
@@ -359,8 +331,6 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
return
}
load :: proc{load_from_file, load_from_bytes, load_from_context}
/*
Cleanup of image-specific data.
*/

View File

@@ -0,0 +1,6 @@
//+build js
package qoi
save :: proc{save_to_buffer}
load :: proc{load_from_bytes, load_from_context}

View File

@@ -0,0 +1,37 @@
//+build !js
package qoi
import "core:os"
import "core:bytes"
save :: proc{save_to_buffer, save_to_file}
save_to_file :: proc(output: string, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
out := &bytes.Buffer{}
defer bytes.buffer_destroy(out)
save_to_buffer(out, img, options) or_return
write_ok := os.write_entire_file(output, out.buf[:])
return nil if write_ok else .Unable_To_Write_File
}
load :: proc{load_from_file, load_from_bytes, load_from_context}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename)
defer delete(data)
if ok {
return load_from_bytes(data, options)
} else {
return nil, .Unable_To_Read_File
}
}

View File

@@ -14,7 +14,6 @@ package tga
import "core:mem"
import "core:image"
import "core:bytes"
import "core:os"
import "core:compress"
import "core:strings"
@@ -28,7 +27,7 @@ GA_Pixel :: image.GA_Pixel
RGB_Pixel :: image.RGB_Pixel
RGBA_Pixel :: image.RGBA_Pixel
save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
save_to_buffer :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
if img == nil {
@@ -58,7 +57,7 @@ save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}
// Calculate and allocate necessary space.
necessary := pixels * img.channels + size_of(image.TGA_Header)
if !resize(&output.buf, necessary) {
if resize(&output.buf, necessary) != nil {
return .Unable_To_Allocate_Or_Resize
}
@@ -92,20 +91,6 @@ save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}
return nil
}
save_to_file :: proc(output: string, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
out := &bytes.Buffer{}
defer bytes.buffer_destroy(out)
save_to_memory(out, img, options) or_return
write_ok := os.write_entire_file(output, out.buf[:])
return nil if write_ok else .Unable_To_Write_File
}
save :: proc{save_to_memory, save_to_file}
load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
options := options
@@ -307,7 +292,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
return img, nil
}
if !resize(&img.pixels.buf, dest_channels * img.width * img.height) {
if resize(&img.pixels.buf, dest_channels * img.width * img.height) != nil {
return img, .Unable_To_Allocate_Or_Resize
}
@@ -398,20 +383,6 @@ load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context
return img, err
}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename)
defer delete(data)
if ok {
return load_from_bytes(data, options)
} else {
return nil, .Unable_To_Read_File
}
}
load :: proc{load_from_file, load_from_bytes, load_from_context}
destroy :: proc(img: ^Image) {
if img == nil || img.width == 0 || img.height == 0 {

View File

@@ -0,0 +1,5 @@
//+build js
package tga
save :: proc{save_to_buffer}
load :: proc{load_from_bytes, load_from_context}

View File

@@ -0,0 +1,34 @@
//+build !js
package tga
import "core:os"
import "core:bytes"
save :: proc{save_to_buffer, save_to_file}
save_to_file :: proc(output: string, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
out := &bytes.Buffer{}
defer bytes.buffer_destroy(out)
save_to_buffer(out, img, options) or_return
write_ok := os.write_entire_file(output, out.buf[:])
return nil if write_ok else .Unable_To_Write_File
}
load :: proc{load_from_file, load_from_bytes, load_from_context}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename)
defer delete(data)
if ok {
return load_from_bytes(data, options)
} else {
return nil, .Unable_To_Read_File
}
}

View File

@@ -192,6 +192,7 @@ type_map_info :: proc($T: typeid/map[$K]$V) -> ^runtime.Map_Info ---
type_map_cell_info :: proc($T: typeid) -> ^runtime.Map_Cell_Info ---
type_convert_variants_to_pointers :: proc($T: typeid) -> typeid where type_is_union(T) ---
type_merge :: proc($U, $V: typeid) -> typeid where type_is_union(U), type_is_union(V) ---
constant_utf16_cstring :: proc($literal: string) -> [^]u16 ---
@@ -283,7 +284,7 @@ wasm_memory_atomic_wait32 :: proc(ptr: ^u32, expected: u32, timeout_ns: i64) -
wasm_memory_atomic_notify32 :: proc(ptr: ^u32, waiters: u32) -> (waiters_woken_up: u32) ---
// x86 Targets (i386, amd64)
x86_cpuid :: proc(ax, cx: u32) -> (eax, ebc, ecx, edx: u32) ---
x86_cpuid :: proc(ax, cx: u32) -> (eax, ebx, ecx, edx: u32) ---
x86_xgetbv :: proc(cx: u32) -> (eax, edx: u32) ---
@@ -305,4 +306,4 @@ valgrind_client_request :: proc(default: uintptr, request: uintptr, a0, a1, a2,
// Internal compiler use only
__entry_point :: proc() ---
__entry_point :: proc() ---

View File

@@ -1,124 +1,80 @@
package io
to_reader :: proc(s: Stream) -> (r: Reader, ok: bool = true) #optional_ok {
r.stream = s
if s.stream_vtable == nil || s.impl_read == nil {
ok = false
}
r = s
ok = .Read in query(s)
return
}
to_writer :: proc(s: Stream) -> (w: Writer, ok: bool = true) #optional_ok {
w.stream = s
if s.stream_vtable == nil || s.impl_write == nil {
ok = false
}
w = s
ok = .Write in query(s)
return
}
to_closer :: proc(s: Stream) -> (c: Closer, ok: bool = true) #optional_ok {
c.stream = s
if s.stream_vtable == nil || s.impl_close == nil {
ok = false
}
c = s
ok = .Close in query(s)
return
}
to_flusher :: proc(s: Stream) -> (f: Flusher, ok: bool = true) #optional_ok {
f.stream = s
if s.stream_vtable == nil || s.impl_flush == nil {
ok = false
}
f = s
ok = .Flush in query(s)
return
}
to_seeker :: proc(s: Stream) -> (seeker: Seeker, ok: bool = true) #optional_ok {
seeker.stream = s
if s.stream_vtable == nil || s.impl_seek == nil {
ok = false
}
seeker = s
ok = .Seek in query(s)
return
}
to_read_writer :: proc(s: Stream) -> (r: Read_Writer, ok: bool = true) #optional_ok {
r.stream = s
if s.stream_vtable == nil || s.impl_read == nil || s.impl_write == nil {
ok = false
}
r = s
ok = query(s) >= {.Read, .Write}
return
}
to_read_closer :: proc(s: Stream) -> (r: Read_Closer, ok: bool = true) #optional_ok {
r.stream = s
if s.stream_vtable == nil || s.impl_read == nil || s.impl_close == nil {
ok = false
}
r = s
ok = query(s) >= {.Read, .Close}
return
}
to_read_write_closer :: proc(s: Stream) -> (r: Read_Write_Closer, ok: bool = true) #optional_ok {
r.stream = s
if s.stream_vtable == nil || s.impl_read == nil || s.impl_write == nil || s.impl_close == nil {
ok = false
}
r = s
ok = query(s) >= {.Read, .Write, .Close}
return
}
to_read_write_seeker :: proc(s: Stream) -> (r: Read_Write_Seeker, ok: bool = true) #optional_ok {
r.stream = s
if s.stream_vtable == nil || s.impl_read == nil || s.impl_write == nil || s.impl_seek == nil {
ok = false
}
r = s
ok = query(s) >= {.Read, .Write, .Seek}
return
}
to_write_flusher :: proc(s: Stream) -> (w: Write_Flusher, ok: bool = true) #optional_ok {
w.stream = s
if s.stream_vtable == nil || s.impl_write == nil || s.impl_flush == nil {
ok = false
}
w = s
ok = query(s) >= {.Write, .Flush}
return
}
to_write_flush_closer :: proc(s: Stream) -> (w: Write_Flush_Closer, ok: bool = true) #optional_ok {
w.stream = s
if s.stream_vtable == nil || s.impl_write == nil || s.impl_flush == nil || s.impl_close == nil {
ok = false
}
w = s
ok = query(s) >= {.Write, .Flush, .Close}
return
}
to_reader_at :: proc(s: Stream) -> (r: Reader_At, ok: bool = true) #optional_ok {
r.stream = s
if s.stream_vtable == nil || s.impl_read_at == nil {
ok = false
}
r = s
ok = query(s) >= {.Read_At}
return
}
to_writer_at :: proc(s: Stream) -> (w: Writer_At, ok: bool = true) #optional_ok {
w.stream = s
if s.stream_vtable == nil || s.impl_write_at == nil {
ok = false
}
return
}
to_reader_from :: proc(s: Stream) -> (r: Reader_From, ok: bool = true) #optional_ok {
r.stream = s
if s.stream_vtable == nil || s.impl_read_from == nil {
ok = false
}
return
}
to_writer_to :: proc(s: Stream) -> (w: Writer_To, ok: bool = true) #optional_ok {
w.stream = s
if s.stream_vtable == nil || s.impl_write_to == nil {
ok = false
}
w = s
ok = query(s) >= {.Write_At}
return
}
to_write_closer :: proc(s: Stream) -> (w: Write_Closer, ok: bool = true) #optional_ok {
w.stream = s
if s.stream_vtable == nil || s.impl_write == nil || s.impl_close == nil {
ok = false
}
w = s
ok = query(s) >= {.Write, .Close}
return
}
to_write_seeker :: proc(s: Stream) -> (w: Write_Seeker, ok: bool = true) #optional_ok {
w.stream = s
if s.stream_vtable == nil || s.impl_write == nil || s.impl_seek == nil {
ok = false
}
w = s
ok = query(s) >= {.Write, .Seek}
return
}

View File

@@ -53,137 +53,106 @@ Error :: enum i32 {
Empty = -1,
}
Close_Proc :: proc(using s: Stream) -> Error
Flush_Proc :: proc(using s: Stream) -> Error
Seek_Proc :: proc(using s: Stream, offset: i64, whence: Seek_From) -> (n: i64, err: Error)
Size_Proc :: proc(using s: Stream) -> i64
Read_Proc :: proc(using s: Stream, p: []byte) -> (n: int, err: Error)
Read_At_Proc :: proc(using s: Stream, p: []byte, off: i64) -> (n: int, err: Error)
Read_From_Proc :: proc(using s: Stream, r: Reader) -> (n: i64, err: Error)
Read_Byte_Proc :: proc(using s: Stream) -> (byte, Error)
Read_Rune_Proc :: proc(using s: Stream) -> (ch: rune, size: int, err: Error)
Unread_Byte_Proc :: proc(using s: Stream) -> Error
Unread_Rune_Proc :: proc(using s: Stream) -> Error
Write_Proc :: proc(using s: Stream, p: []byte) -> (n: int, err: Error)
Write_At_Proc :: proc(using s: Stream, p: []byte, off: i64) -> (n: int, err: Error)
Write_To_Proc :: proc(using s: Stream, w: Writer) -> (n: i64, err: Error)
Write_Byte_Proc :: proc(using s: Stream, c: byte) -> Error
Write_Rune_Proc :: proc(using s: Stream, r: rune) -> (size: int, err: Error)
Destroy_Proc :: proc(using s: Stream) -> Error
Stream_Mode :: enum {
Close,
Flush,
Read,
Read_At,
Write,
Write_At,
Seek,
Size,
Destroy,
Query, // query what modes are available
}
Stream_Mode_Set :: distinct bit_set[Stream_Mode; i64]
Stream_Proc :: #type proc(stream_data: rawptr, mode: Stream_Mode, p: []byte, offset: i64, whence: Seek_From) -> (n: i64, err: Error)
Stream :: struct {
using stream_vtable: ^Stream_VTable,
stream_data: rawptr,
}
Stream_VTable :: struct {
impl_close: Close_Proc,
impl_flush: Flush_Proc,
impl_seek: Seek_Proc,
impl_size: Size_Proc,
impl_read: Read_Proc,
impl_read_at: Read_At_Proc,
impl_read_byte: Read_Byte_Proc,
impl_read_rune: Read_Rune_Proc,
impl_write_to: Write_To_Proc,
impl_write: Write_Proc,
impl_write_at: Write_At_Proc,
impl_write_byte: Write_Byte_Proc,
impl_write_rune: Write_Rune_Proc,
impl_read_from: Read_From_Proc,
impl_unread_byte: Unread_Byte_Proc,
impl_unread_rune: Unread_Rune_Proc,
impl_destroy: Destroy_Proc,
procedure: Stream_Proc,
data: rawptr,
}
Reader :: Stream
Writer :: Stream
Closer :: Stream
Flusher :: Stream
Seeker :: Stream
Reader :: struct {using stream: Stream}
Writer :: struct {using stream: Stream}
Closer :: struct {using stream: Stream}
Flusher :: struct {using stream: Stream}
Seeker :: struct {using stream: Stream}
Read_Writer :: Stream
Read_Closer :: Stream
Read_Write_Closer :: Stream
Read_Write_Seeker :: Stream
Read_Writer :: struct {using stream: Stream}
Read_Closer :: struct {using stream: Stream}
Read_Write_Closer :: struct {using stream: Stream}
Read_Write_Seeker :: struct {using stream: Stream}
Write_Closer :: Stream
Write_Seeker :: Stream
Write_Flusher :: Stream
Write_Flush_Closer :: Stream
Write_Closer :: struct {using stream: Stream}
Write_Seeker :: struct {using stream: Stream}
Write_Flusher :: struct {using stream: Stream}
Write_Flush_Closer :: struct {using stream: Stream}
Reader_At :: struct {using stream: Stream}
Writer_At :: struct {using stream: Stream}
Reader_From :: struct {using stream: Stream}
Writer_To :: struct {using stream: Stream}
Reader_At :: Stream
Writer_At :: Stream
destroy :: proc(s: Stream) -> Error {
close_err := close({s})
if s.stream_vtable != nil && s.impl_destroy != nil {
return s->impl_destroy()
destroy :: proc(s: Stream) -> (err: Error) {
_ = flush(s)
_ = close(s)
if s.procedure != nil {
_, err = s.procedure(s.data, .Destroy, nil, 0, nil)
} else {
err = .Empty
}
if close_err != .None {
return close_err
}
return .Empty
return
}
query :: proc(s: Stream) -> (set: Stream_Mode_Set) {
if s.procedure != nil {
n, _ := s.procedure(s.data, .Query, nil, 0, nil)
set = transmute(Stream_Mode_Set)n
if set != nil {
set += {.Query}
}
}
return
}
query_utility :: #force_inline proc "contextless" (set: Stream_Mode_Set) -> (n: i64, err: Error) {
return transmute(i64)set, nil
}
_i64_err :: #force_inline proc "contextless" (n: int, err: Error) -> (i64, Error) {
return i64(n), err
}
// read reads up to len(p) bytes into s. It returns the number of bytes read and any error if occurred.
//
// When read encounters an .EOF or error after successfully reading n > 0 bytes, it returns the number of
// bytes read along with the error.
read :: proc(s: Reader, p: []byte, n_read: ^int = nil) -> (n: int, err: Error) {
if s.stream_vtable != nil {
if s.impl_read != nil {
n, err = s->impl_read(p)
if n_read != nil {
n_read^ += n
}
return
} else if s.impl_read_byte != nil {
bytes_read := 0
defer if n_read != nil {
n_read^ += bytes_read
}
for _, i in p {
p[i] = s->impl_read_byte() or_return
bytes_read += 1
}
return
}
if s.procedure != nil {
n64: i64
n64, err = s.procedure(s.data, .Read, p, 0, nil)
n = int(n64)
if n_read != nil { n_read^ += n }
} else {
err = .Empty
}
return 0, .Empty
return
}
// write writes up to len(p) bytes into s. It returns the number of bytes written and any error if occurred.
write :: proc(s: Writer, p: []byte, n_written: ^int = nil) -> (n: int, err: Error) {
if s.stream_vtable != nil {
if s.impl_write != nil {
n, err = s->impl_write(p)
if n_written != nil {
n_written^ += n
}
return
} else if s.impl_write_byte != nil {
bytes_written := 0
defer if n_written != nil {
n_written^ += bytes_written
}
for c in p {
s->impl_write_byte(c) or_return
bytes_written += 1
}
return
}
if s.procedure != nil {
n64: i64
n64, err = s.procedure(s.data, .Write, p, 0, nil)
n = int(n64)
if n_written != nil { n_written^ += n }
} else {
err = .Empty
}
return 0, .Empty
return
}
// seek sets the offset of the next read or write to offset.
@@ -194,57 +163,45 @@ write :: proc(s: Writer, p: []byte, n_written: ^int = nil) -> (n: int, err: Erro
//
// seek returns the new offset to the start of the file/stream, and any error if occurred.
seek :: proc(s: Seeker, offset: i64, whence: Seek_From) -> (n: i64, err: Error) {
if s.stream_vtable != nil && s.impl_seek != nil {
return s->impl_seek(offset, whence)
if s.procedure != nil {
n, err = s.procedure(s.data, .Seek, nil, offset, whence)
} else {
err = .Empty
}
return 0, .Empty
return
}
// The behaviour of close after the first call is stream implementation defined.
// Different streams may document their own behaviour.
close :: proc(s: Closer) -> Error {
if s.stream_vtable != nil && s.impl_close != nil {
return s->impl_close()
close :: proc(s: Closer) -> (err: Error) {
if s.procedure != nil {
_, err = s.procedure(s.data, .Close, nil, 0, nil)
}
// Instead of .Empty, .None is fine in this case
return .None
return
}
flush :: proc(s: Flusher) -> Error {
if s.stream_vtable != nil && s.impl_flush != nil {
return s->impl_flush()
flush :: proc(s: Flusher) -> (err: Error) {
if s.procedure != nil {
_, err = s.procedure(s.data, .Flush, nil, 0, nil)
}
// Instead of .Empty, .None is fine in this case
return .None
return
}
// size returns the size of the stream. If the stream does not support querying its size, 0 will be returned.
size :: proc(s: Stream) -> i64 {
if s.stream_vtable == nil {
return 0
size :: proc(s: Stream) -> (n: i64, err: Error) {
if s.procedure != nil {
n, err = s.procedure(s.data, .Size, nil, 0, nil)
if err == .Empty {
n = 0
curr := seek(s, 0, .Current) or_return
end := seek(s, 0, .End) or_return
seek(s, curr, .Start) or_return
n = end
}
} else {
err = .Empty
}
if s.impl_size != nil {
return s->impl_size()
}
if s.impl_seek == nil {
return 0
}
curr, end: i64
err: Error
if curr, err = s->impl_seek(0, .Current); err != nil {
return 0
}
if end, err = s->impl_seek(0, .End); err != nil {
return 0
}
if _, err = s->impl_seek(curr, .Start); err != nil {
return 0
}
return end
return
}
@@ -256,29 +213,24 @@ size :: proc(s: Stream) -> i64 {
//
// If n == len(p), err may be either nil or .EOF
read_at :: proc(r: Reader_At, p: []byte, offset: i64, n_read: ^int = nil) -> (n: int, err: Error) {
defer if n_read != nil {
n_read^ += n
}
if r.stream_vtable == nil {
return 0, .Empty
}
if r.impl_read_at != nil {
return r->impl_read_at(p, offset)
}
if r.impl_seek == nil || r.impl_read == nil {
return 0, .Empty
}
curr_offset := r->impl_seek(offset, .Current) or_return
n, err = r->impl_read(p)
_, err1 := r->impl_seek(curr_offset, .Start)
if err1 != nil && err == nil {
err = err1
if r.procedure != nil {
n64: i64
n64, err = r.procedure(r.data, .Read_At, p, offset, nil)
if err != .Empty {
n = int(n64)
} else {
curr := seek(r, offset, .Current) or_return
n, err = read(r, p)
_, err1 := seek(r, curr, .Start)
if err1 != nil && err == nil {
err = err1
}
}
if n_read != nil { n_read^ += n }
} else {
err = .Empty
}
return
}
// write_at writes len(p) bytes into p starting with the provided offset in the underlying Writer_At stream w.
@@ -287,97 +239,39 @@ read_at :: proc(r: Reader_At, p: []byte, offset: i64, n_read: ^int = nil) -> (n:
// If write_at is writing to a Writer_At which has a seek offset, then write_at should not affect the underlying
// seek offset.
write_at :: proc(w: Writer_At, p: []byte, offset: i64, n_written: ^int = nil) -> (n: int, err: Error) {
defer if n_written != nil {
n_written^ += n
}
if w.stream_vtable == nil {
return 0, .Empty
}
if w.impl_write_at != nil {
return w->impl_write_at(p, offset)
}
if w.impl_seek == nil || w.impl_write == nil {
return 0, .Empty
}
curr_offset: i64
curr_offset, err = w->impl_seek(offset, .Current)
if err != nil {
return 0, err
}
n, err = w->impl_write(p)
_, err1 := w->impl_seek(curr_offset, .Start)
if err1 != nil && err == nil {
err = err1
if w.procedure != nil {
n64: i64
n64, err = w.procedure(w.data, .Write_At, p, offset, nil)
if err != .Empty {
n = int(n64)
} else {
curr := seek(w, offset, .Current) or_return
n, err = write(w, p)
_, err1 := seek(w, curr, .Start)
if err1 != nil && err == nil {
err = err1
}
}
if n_written != nil { n_written^ += n }
} else {
err = .Empty
}
return
}
write_to :: proc(r: Writer_To, w: Writer) -> (n: i64, err: Error) {
if r.stream_vtable == nil || w.stream_vtable == nil {
return 0, .Empty
}
if r.impl_write_to != nil {
return r->impl_write_to(w)
}
return 0, .Empty
}
read_from :: proc(w: Reader_From, r: Reader) -> (n: i64, err: Error) {
if r.stream_vtable == nil || w.stream_vtable == nil {
return 0, .Empty
}
if r.impl_read_from != nil {
return w->impl_read_from(r)
}
return 0, .Empty
}
// read_byte reads and returns the next byte from r.
read_byte :: proc(r: Reader, n_read: ^int = nil) -> (b: byte, err: Error) {
defer if err == nil && n_read != nil {
n_read^ += 1
}
if r.stream_vtable == nil {
return 0, .Empty
}
if r.impl_read_byte != nil {
return r->impl_read_byte()
}
if r.impl_read == nil {
return 0, .Empty
}
buf: [1]byte
_, err = r->impl_read(buf[:])
return buf[0], err
_, err = read(r, buf[:], n_read)
b = buf[0]
return
}
write_byte :: proc(w: Writer, c: byte, n_written: ^int = nil) -> Error {
return _write_byte(auto_cast w, c, n_written)
}
@(private)
_write_byte :: proc(w: Writer, c: byte, n_written: ^int = nil) -> (err: Error) {
defer if err == nil && n_written != nil {
n_written^ += 1
}
if w.stream_vtable == nil {
return .Empty
}
if w.impl_write_byte != nil {
return w->impl_write_byte(c)
}
if w.impl_write == nil {
return .Empty
}
b := [1]byte{c}
_, err = w->impl_write(b[:])
return err
buf: [1]byte
buf[0] = c
write(w, buf[:], n_written) or_return
return nil
}
// read_rune reads a single UTF-8 encoded Unicode codepoint and returns the rune and its size in bytes.
@@ -385,19 +279,9 @@ read_rune :: proc(br: Reader, n_read: ^int = nil) -> (ch: rune, size: int, err:
defer if err == nil && n_read != nil {
n_read^ += size
}
if br.stream_vtable == nil {
return 0, 0, .Empty
}
if br.impl_read_rune != nil {
return br->impl_read_rune()
}
if br.impl_read == nil {
return 0, 0, .Empty
}
b: [utf8.UTF_MAX]byte
_, err = br->impl_read(b[:1])
_, err = read(br, b[:1])
s0 := b[0]
ch = rune(s0)
@@ -415,7 +299,7 @@ read_rune :: proc(br: Reader, n_read: ^int = nil) -> (ch: rune, size: int, err:
return
}
sz := int(x&7)
size, err = br->impl_read(b[1:sz])
size, err = read(br, b[1:sz])
if err != nil || size+1 < sz {
ch = utf8.RUNE_ERROR
return
@@ -425,28 +309,6 @@ read_rune :: proc(br: Reader, n_read: ^int = nil) -> (ch: rune, size: int, err:
return
}
unread_byte :: proc(s: Stream) -> Error {
if s.stream_vtable == nil {
return .Empty
}
if s.impl_unread_byte != nil {
return s->impl_unread_byte()
}
if s.impl_seek != nil {
_, err := s->impl_seek(-1, .Current)
return err
}
return .Empty
}
unread_rune :: proc(s: Writer) -> Error {
if s.stream_vtable != nil && s.impl_unread_rune != nil {
return s->impl_unread_rune()
}
return .Empty
}
// write_string writes the contents of the string s to w.
write_string :: proc(s: Writer, str: string, n_written: ^int = nil) -> (n: int, err: Error) {
return write(s, transmute([]byte)str, n_written)
@@ -457,14 +319,6 @@ write_rune :: proc(s: Writer, r: rune, n_written: ^int = nil) -> (size: int, err
defer if err == nil && n_written != nil {
n_written^ += size
}
if s.stream_vtable == nil {
return 0, .Empty
}
if s.impl_write_rune != nil {
return s->impl_write_rune(r)
}
if r < utf8.RUNE_SELF {
err = write_byte(s, byte(r))
if err == nil {
@@ -542,21 +396,15 @@ copy_n :: proc(dst: Writer, src: Reader, n: i64) -> (written: i64, err: Error) {
@(private)
_copy_buffer :: proc(dst: Writer, src: Reader, buf: []byte) -> (written: i64, err: Error) {
if dst.stream_vtable == nil || src.stream_vtable == nil {
if dst.procedure == nil || src.procedure == nil {
return 0, .Empty
}
if src.impl_write_to != nil {
return src->impl_write_to(dst)
}
if src.impl_read_from != nil {
return dst->impl_read_from(src)
}
buf := buf
if buf == nil {
DEFAULT_SIZE :: 4 * 1024
size := DEFAULT_SIZE
if src.stream_vtable == _limited_reader_vtable {
l := (^Limited_Reader)(src.stream_data)
if src.procedure == _limited_reader_proc {
l := (^Limited_Reader)(src.data)
if i64(size) > l.n {
if l.n < 1 {
size = 1

View File

@@ -5,33 +5,37 @@ Multi_Reader :: struct {
}
@(private)
_multi_reader_vtable := &Stream_VTable{
impl_read = proc(s: Stream, p: []byte) -> (n: int, err: Error) {
mr := (^Multi_Reader)(s.stream_data)
for len(mr.readers) > 0 {
r := mr.readers[0]
n, err = read(r, p)
if err == .EOF {
ordered_remove(&mr.readers, 0)
}
if n > 0 || err != .EOF {
if err == .EOF && len(mr.readers) > 0 {
// Don't return EOF yet, more readers remain
err = nil
}
return
}
_multi_reader_proc :: proc(stream_data: rawptr, mode: Stream_Mode, p: []byte, offset: i64, whence: Seek_From) -> (n: i64, err: Error) {
if mode == .Query {
return query_utility({.Read, .Query})
} else if mode != .Read {
return 0, .Empty
}
mr := (^Multi_Reader)(stream_data)
for len(mr.readers) > 0 {
r := mr.readers[0]
n, err = _i64_err(read(r, p))
if err == .EOF {
ordered_remove(&mr.readers, 0)
}
return 0, .EOF
},
if n > 0 || err != .EOF {
if err == .EOF && len(mr.readers) > 0 {
// Don't return EOF yet, more readers remain
err = nil
}
return
}
}
return 0, .EOF
}
multi_reader_init :: proc(mr: ^Multi_Reader, readers: ..Reader, allocator := context.allocator) -> (r: Reader) {
all_readers := make([dynamic]Reader, 0, len(readers), allocator)
for w in readers {
if w.stream_vtable == _multi_reader_vtable {
other := (^Multi_Reader)(w.stream_data)
if w.procedure == _multi_reader_proc {
other := (^Multi_Reader)(w.data)
append(&all_readers, ..other.readers[:])
} else {
append(&all_readers, w)
@@ -40,8 +44,8 @@ multi_reader_init :: proc(mr: ^Multi_Reader, readers: ..Reader, allocator := con
mr.readers = all_readers
r.stream_vtable = _multi_reader_vtable
r.stream_data = mr
r.procedure = _multi_reader_proc
r.data = mr
return
}
@@ -55,38 +59,42 @@ Multi_Writer :: struct {
}
@(private)
_multi_writer_vtable := &Stream_VTable{
impl_write = proc(s: Stream, p: []byte) -> (n: int, err: Error) {
mw := (^Multi_Writer)(s.stream_data)
for w in mw.writers {
n, err = write(w, p)
if err != nil {
return
}
if n != len(p) {
err = .Short_Write
return
}
_multi_writer_proc :: proc(stream_data: rawptr, mode: Stream_Mode, p: []byte, offset: i64, whence: Seek_From) -> (n: i64, err: Error) {
if mode == .Query {
return query_utility({.Write, .Query})
} else if mode != .Write {
return 0, .Empty
}
mw := (^Multi_Writer)(stream_data)
for w in mw.writers {
n, err = _i64_err(write(w, p))
if err != nil {
return
}
if n != i64(len(p)) {
err = .Short_Write
return
}
}
return len(p), nil
},
return i64(len(p)), nil
}
multi_writer_init :: proc(mw: ^Multi_Writer, writers: ..Writer, allocator := context.allocator) -> (out: Writer) {
mw.writers = make([dynamic]Writer, 0, len(writers), allocator)
for w in writers {
if w.stream_vtable == _multi_writer_vtable {
other := (^Multi_Writer)(w.stream_data)
if w.procedure == _multi_writer_proc {
other := (^Multi_Writer)(w.data)
append(&mw.writers, ..other.writers[:])
} else {
append(&mw.writers, w)
}
}
out.stream_vtable = _multi_writer_vtable
out.stream_data = mw
out.procedure = _multi_writer_proc
out.data = mw
return
}

View File

@@ -2,6 +2,7 @@ package io
import "core:strconv"
import "core:unicode/utf8"
import "core:unicode/utf16"
read_ptr :: proc(r: Reader, p: rawptr, byte_size: int, n_read: ^int = nil) -> (n: int, err: Error) {
return read(r, ([^]byte)(p)[:byte_size], n_read)
@@ -146,7 +147,7 @@ write_encoded_rune :: proc(w: Writer, r: rune, write_quote := true, n_written: ^
return
}
write_escaped_rune :: proc(w: Writer, r: rune, quote: byte, html_safe := false, n_written: ^int = nil) -> (n: int, err: Error) {
write_escaped_rune :: proc(w: Writer, r: rune, quote: byte, html_safe := false, n_written: ^int = nil, for_json := false) -> (n: int, err: Error) {
is_printable :: proc(r: rune) -> bool {
if r <= 0xff {
switch r {
@@ -163,7 +164,7 @@ write_escaped_rune :: proc(w: Writer, r: rune, quote: byte, html_safe := false,
defer if n_written != nil {
n_written^ += n
}
if html_safe {
switch r {
case '<', '>', '&':
@@ -211,17 +212,29 @@ write_escaped_rune :: proc(w: Writer, r: rune, quote: byte, html_safe := false,
write_byte(w, DIGITS_LOWER[c>>uint(s) & 0xf], &n) or_return
}
case:
write_byte(w, '\\', &n) or_return
write_byte(w, 'U', &n) or_return
for s := 28; s >= 0; s -= 4 {
write_byte(w, DIGITS_LOWER[c>>uint(s) & 0xf], &n) or_return
if for_json {
buf: [2]u16
utf16.encode(buf[:], []rune{c})
for bc in buf {
write_byte(w, '\\', &n) or_return
write_byte(w, 'u', &n) or_return
for s := 12; s >= 0; s -= 4 {
write_byte(w, DIGITS_LOWER[bc>>uint(s) & 0xf], &n) or_return
}
}
} else {
write_byte(w, '\\', &n) or_return
write_byte(w, 'U', &n) or_return
for s := 24; s >= 0; s -= 4 {
write_byte(w, DIGITS_LOWER[c>>uint(s) & 0xf], &n) or_return
}
}
}
}
return
}
write_quoted_string :: proc(w: Writer, str: string, quote: byte = '"', n_written: ^int = nil) -> (n: int, err: Error) {
write_quoted_string :: proc(w: Writer, str: string, quote: byte = '"', n_written: ^int = nil, for_json := false) -> (n: int, err: Error) {
defer if n_written != nil {
n_written^ += n
}
@@ -240,7 +253,7 @@ write_quoted_string :: proc(w: Writer, str: string, quote: byte = '"', n_written
continue
}
n_wrapper(write_escaped_rune(w, r, quote), &n) or_return
n_wrapper(write_escaped_rune(w, r, quote, false, nil, for_json), &n) or_return
}
write_byte(w, quote, &n) or_return
@@ -279,17 +292,21 @@ Tee_Reader :: struct {
}
@(private)
_tee_reader_vtable := &Stream_VTable{
impl_read = proc(s: Stream, p: []byte) -> (n: int, err: Error) {
t := (^Tee_Reader)(s.stream_data)
n, err = read(t.r, p)
_tee_reader_proc :: proc(stream_data: rawptr, mode: Stream_Mode, p: []byte, offset: i64, whence: Seek_From) -> (n: i64, err: Error) {
t := (^Tee_Reader)(stream_data)
#partial switch mode {
case .Read:
n, err = _i64_err(read(t.r, p))
if n > 0 {
if wn, werr := write(t.w, p[:n]); werr != nil {
return wn, werr
return i64(wn), werr
}
}
return
},
case .Query:
return query_utility({.Read, .Query})
}
return 0, .Empty
}
// tee_reader_init returns a Reader that writes to 'w' what it reads from 'r'
@@ -304,8 +321,8 @@ tee_reader_init :: proc(t: ^Tee_Reader, r: Reader, w: Writer, allocator := conte
}
tee_reader_to_reader :: proc(t: ^Tee_Reader) -> (r: Reader) {
r.stream_data = t
r.stream_vtable = _tee_reader_vtable
r.data = t
r.procedure = _tee_reader_proc
return
}
@@ -319,9 +336,10 @@ Limited_Reader :: struct {
}
@(private)
_limited_reader_vtable := &Stream_VTable{
impl_read = proc(s: Stream, p: []byte) -> (n: int, err: Error) {
l := (^Limited_Reader)(s.stream_data)
_limited_reader_proc :: proc(stream_data: rawptr, mode: Stream_Mode, p: []byte, offset: i64, whence: Seek_From) -> (n: i64, err: Error) {
l := (^Limited_Reader)(stream_data)
#partial switch mode {
case .Read:
if l.n <= 0 {
return 0, .EOF
}
@@ -329,10 +347,13 @@ _limited_reader_vtable := &Stream_VTable{
if i64(len(p)) > l.n {
p = p[0:l.n]
}
n, err = read(l.r, p)
n, err = _i64_err(read(l.r, p))
l.n -= i64(n)
return
},
case .Query:
return query_utility({.Read, .Query})
}
return 0, .Empty
}
limited_reader_init :: proc(l: ^Limited_Reader, r: Reader, n: i64) -> Reader {
@@ -342,8 +363,8 @@ limited_reader_init :: proc(l: ^Limited_Reader, r: Reader, n: i64) -> Reader {
}
limited_reader_to_reader :: proc(l: ^Limited_Reader) -> (r: Reader) {
r.stream_vtable = _limited_reader_vtable
r.stream_data = l
r.procedure = _limited_reader_proc
r.data = l
return
}
@@ -362,15 +383,16 @@ section_reader_init :: proc(s: ^Section_Reader, r: Reader_At, off: i64, n: i64)
return
}
section_reader_to_stream :: proc(s: ^Section_Reader) -> (out: Stream) {
out.stream_data = s
out.stream_vtable = _section_reader_vtable
out.data = s
out.procedure = _section_reader_proc
return
}
@(private)
_section_reader_vtable := &Stream_VTable{
impl_read = proc(stream: Stream, p: []byte) -> (n: int, err: Error) {
s := (^Section_Reader)(stream.stream_data)
_section_reader_proc :: proc(stream_data: rawptr, mode: Stream_Mode, p: []byte, offset: i64, whence: Seek_From) -> (n: i64, err: Error) {
s := (^Section_Reader)(stream_data)
#partial switch mode {
case .Read:
if s.off >= s.limit {
return 0, .EOF
}
@@ -378,13 +400,11 @@ _section_reader_vtable := &Stream_VTable{
if max := s.limit - s.off; i64(len(p)) > max {
p = p[0:max]
}
n, err = read_at(s.r, p, s.off)
n, err = _i64_err(read_at(s.r, p, s.off))
s.off += i64(n)
return
},
impl_read_at = proc(stream: Stream, p: []byte, off: i64) -> (n: int, err: Error) {
s := (^Section_Reader)(stream.stream_data)
p, off := p, off
case .Read_At:
p, off := p, offset
if off < 0 || off >= s.limit - s.base {
return 0, .EOF
@@ -392,17 +412,15 @@ _section_reader_vtable := &Stream_VTable{
off += s.base
if max := s.limit - off; i64(len(p)) > max {
p = p[0:max]
n, err = read_at(s.r, p, off)
n, err = _i64_err(read_at(s.r, p, off))
if err == nil {
err = .EOF
}
return
}
return read_at(s.r, p, off)
},
impl_seek = proc(stream: Stream, offset: i64, whence: Seek_From) -> (n: i64, err: Error) {
s := (^Section_Reader)(stream.stream_data)
return _i64_err(read_at(s.r, p, off))
case .Seek:
offset := offset
switch whence {
case:
@@ -420,10 +438,12 @@ _section_reader_vtable := &Stream_VTable{
s.off = offset
n = offset - s.base
return
},
impl_size = proc(stream: Stream) -> i64 {
s := (^Section_Reader)(stream.stream_data)
return s.limit - s.base
},
}
case .Size:
n = s.limit - s.base
return
case .Query:
return query_utility({.Read, .Read_At, .Seek, .Size, .Query})
}
return 0, nil
}

View File

@@ -76,43 +76,43 @@ nil_logger :: proc() -> Logger {
}
debugf :: proc(fmt_str: string, args: ..any, location := #caller_location) {
logf(level=.Debug, fmt_str=fmt_str, args=args, location=location)
logf(.Debug, fmt_str, ..args, location=location)
}
infof :: proc(fmt_str: string, args: ..any, location := #caller_location) {
logf(level=.Info, fmt_str=fmt_str, args=args, location=location)
logf(.Info, fmt_str, ..args, location=location)
}
warnf :: proc(fmt_str: string, args: ..any, location := #caller_location) {
logf(level=.Warning, fmt_str=fmt_str, args=args, location=location)
logf(.Warning, fmt_str, ..args, location=location)
}
errorf :: proc(fmt_str: string, args: ..any, location := #caller_location) {
logf(level=.Error, fmt_str=fmt_str, args=args, location=location)
logf(.Error, fmt_str, ..args, location=location)
}
fatalf :: proc(fmt_str: string, args: ..any, location := #caller_location) {
logf(level=.Fatal, fmt_str=fmt_str, args=args, location=location)
logf(.Fatal, fmt_str, ..args, location=location)
}
debug :: proc(args: ..any, sep := " ", location := #caller_location) {
log(level=.Debug, args=args, sep=sep, location=location)
log(.Debug, ..args, sep=sep, location=location)
}
info :: proc(args: ..any, sep := " ", location := #caller_location) {
log(level=.Info, args=args, sep=sep, location=location)
log(.Info, ..args, sep=sep, location=location)
}
warn :: proc(args: ..any, sep := " ", location := #caller_location) {
log(level=.Warning, args=args, sep=sep, location=location)
log(.Warning, ..args, sep=sep, location=location)
}
error :: proc(args: ..any, sep := " ", location := #caller_location) {
log(level=.Error, args=args, sep=sep, location=location)
log(.Error, ..args, sep=sep, location=location)
}
fatal :: proc(args: ..any, sep := " ", location := #caller_location) {
log(level=.Fatal, args=args, sep=sep, location=location)
log(.Fatal, ..args, sep=sep, location=location)
}
panic :: proc(args: ..any, location := #caller_location) -> ! {
log(level=.Fatal, args=args, location=location)
log(.Fatal, ..args, location=location)
runtime.panic("log.panic", location)
}
panicf :: proc(fmt_str: string, args: ..any, location := #caller_location) -> ! {
logf(level=.Fatal, fmt_str=fmt_str, args=args, location=location)
logf(.Fatal, fmt_str, ..args, location=location)
runtime.panic("log.panicf", location)
}
@@ -127,7 +127,7 @@ log :: proc(level: Level, args: ..any, sep := " ", location := #caller_location)
if level < logger.lowest_level {
return
}
str := fmt.tprint(args=args, sep=sep) //NOTE(Hoej): While tprint isn't thread-safe, no logging is.
str := fmt.tprint(..args, sep=sep) //NOTE(Hoej): While tprint isn't thread-safe, no logging is.
logger.procedure(logger.data, level, str, logger.options, location)
}

View File

@@ -38,60 +38,60 @@ log_allocator_proc :: proc(allocator_data: rawptr, mode: runtime.Allocator_Mode,
switch mode {
case .Alloc:
logf(
level=la.level,
fmt_str = "%s%s>>> ALLOCATOR(mode=.Alloc, size=%d, alignment=%d)",
args = {la.prefix, padding, size, alignment},
la.level,
"%s%s>>> ALLOCATOR(mode=.Alloc, size=%d, alignment=%d)",
la.prefix, padding, size, alignment,
location = location,
)
case .Alloc_Non_Zeroed:
logf(
level=la.level,
fmt_str = "%s%s>>> ALLOCATOR(mode=.Alloc_Non_Zeroed, size=%d, alignment=%d)",
args = {la.prefix, padding, size, alignment},
la.level,
"%s%s>>> ALLOCATOR(mode=.Alloc_Non_Zeroed, size=%d, alignment=%d)",
la.prefix, padding, size, alignment,
location = location,
)
case .Free:
if old_size != 0 {
logf(
level=la.level,
fmt_str = "%s%s<<< ALLOCATOR(mode=.Free, ptr=%p, size=%d)",
args = {la.prefix, padding, old_memory, old_size},
la.level,
"%s%s<<< ALLOCATOR(mode=.Free, ptr=%p, size=%d)",
la.prefix, padding, old_memory, old_size,
location = location,
)
} else {
logf(
level=la.level,
fmt_str = "%s%s<<< ALLOCATOR(mode=.Free, ptr=%p)",
args = {la.prefix, padding, old_memory},
la.level,
"%s%s<<< ALLOCATOR(mode=.Free, ptr=%p)",
la.prefix, padding, old_memory,
location = location,
)
}
case .Free_All:
logf(
level=la.level,
fmt_str = "%s%s<<< ALLOCATOR(mode=.Free_All)",
args = {la.prefix, padding},
la.level,
"%s%s<<< ALLOCATOR(mode=.Free_All)",
la.prefix, padding,
location = location,
)
case .Resize:
logf(
level=la.level,
fmt_str = "%s%s>>> ALLOCATOR(mode=.Resize, ptr=%p, old_size=%d, size=%d, alignment=%d)",
args = {la.prefix, padding, old_memory, old_size, size, alignment},
la.level,
"%s%s>>> ALLOCATOR(mode=.Resize, ptr=%p, old_size=%d, size=%d, alignment=%d)",
la.prefix, padding, old_memory, old_size, size, alignment,
location = location,
)
case .Query_Features:
logf(
level=la.level,
fmt_str = "%s%ALLOCATOR(mode=.Query_Features)",
args = {la.prefix, padding},
la.level,
"%s%ALLOCATOR(mode=.Query_Features)",
la.prefix, padding,
location = location,
)
case .Query_Info:
logf(
level=la.level,
fmt_str = "%s%ALLOCATOR(mode=.Query_Info)",
args = {la.prefix, padding},
la.level,
"%s%ALLOCATOR(mode=.Query_Info)",
la.prefix, padding,
location = location,
)
}
@@ -103,9 +103,9 @@ log_allocator_proc :: proc(allocator_data: rawptr, mode: runtime.Allocator_Mode,
defer la.locked = false
if err != nil {
logf(
level=la.level,
fmt_str = "%s%ALLOCATOR ERROR=%v",
args = {la.prefix, padding, error},
la.level,
"%s%ALLOCATOR ERROR=%v",
la.prefix, padding, error,
location = location,
)
}

View File

@@ -353,14 +353,14 @@ internal_int_is_prime :: proc(a: ^Int, miller_rabin_trials := int(-1), miller_ra
// Run the Miller-Rabin test with base 2 for the BPSW test.
internal_set(b, 2) or_return
if !internal_int_prime_miller_rabin(a, b) or_return { return }
if !(internal_int_prime_miller_rabin(a, b) or_return) { return }
// Rumours have it that Mathematica does a second M-R test with base 3.
// Other rumours have it that their strong L-S test is slightly different.
// It does not hurt, though, beside a bit of extra runtime.
b.digit[0] += 1
if !internal_int_prime_miller_rabin(a, b) or_return { return }
if !(internal_int_prime_miller_rabin(a, b) or_return) { return }
// Both, the Frobenius-Underwood test and the the Lucas-Selfridge test are quite
// slow so if speed is an issue, set `USE_MILLER_RABIN_ONLY` to use M-R tests with
@@ -369,9 +369,9 @@ internal_int_is_prime :: proc(a: ^Int, miller_rabin_trials := int(-1), miller_ra
if !miller_rabin_only {
if miller_rabin_trials >= 0 {
when MATH_BIG_USE_FROBENIUS_TEST {
if !internal_int_prime_frobenius_underwood(a) or_return { return }
if !(internal_int_prime_frobenius_underwood(a) or_return) { return }
} else {
if !internal_int_prime_strong_lucas_selfridge(a) or_return { return }
if !(internal_int_prime_strong_lucas_selfridge(a) or_return) { return }
}
}
}
@@ -410,7 +410,7 @@ internal_int_is_prime :: proc(a: ^Int, miller_rabin_trials := int(-1), miller_ra
// We did bases 2 and 3 already, skip them
for ix := 2; ix < p_max; ix += 1 {
internal_set(b, _private_prime_table[ix])
if !internal_int_prime_miller_rabin(a, b) or_return { return }
if !(internal_int_prime_miller_rabin(a, b) or_return) { return }
}
} else if miller_rabin_trials > 0 {
// Perform `miller_rabin_trials` M-R tests with random bases between 3 and "a".
@@ -490,7 +490,7 @@ internal_int_is_prime :: proc(a: ^Int, miller_rabin_trials := int(-1), miller_ra
ix -= 1
continue
}
if !internal_int_prime_miller_rabin(a, b) or_return { return }
if !(internal_int_prime_miller_rabin(a, b) or_return) { return }
}
}

View File

@@ -429,7 +429,7 @@ internal_int_write_to_ascii_file :: proc(a: ^Int, filename: string, radix := i8(
len = l,
}
ok := os.write_entire_file(name=filename, data=data, truncate=true)
ok := os.write_entire_file(filename, data, truncate=true)
return nil if ok else .Cannot_Write_File
}

View File

@@ -37,68 +37,96 @@ overflowing_sub :: intrinsics.overflow_sub
overflowing_mul :: intrinsics.overflow_mul
log2 :: proc(x: $T) -> T where intrinsics.type_is_integer(T), intrinsics.type_is_unsigned(T) {
@(require_results)
log2 :: proc "contextless" (x: $T) -> T where intrinsics.type_is_integer(T), intrinsics.type_is_unsigned(T) {
return (8*size_of(T)-1) - count_leading_zeros(x)
}
rotate_left8 :: proc(x: u8, k: int) -> u8 {
@(require_results)
rotate_left8 :: proc "contextless" (x: u8, k: int) -> u8 {
n :: 8
s := uint(k) & (n-1)
return x <<s | x>>(n-s)
}
rotate_left16 :: proc(x: u16, k: int) -> u16 {
@(require_results)
rotate_left16 :: proc "contextless" (x: u16, k: int) -> u16 {
n :: 16
s := uint(k) & (n-1)
return x <<s | x>>(n-s)
}
rotate_left32 :: proc(x: u32, k: int) -> u32 {
@(require_results)
rotate_left32 :: proc "contextless" (x: u32, k: int) -> u32 {
n :: 32
s := uint(k) & (n-1)
return x <<s | x>>(n-s)
}
rotate_left64 :: proc(x: u64, k: int) -> u64 {
@(require_results)
rotate_left64 :: proc "contextless" (x: u64, k: int) -> u64 {
n :: 64
s := uint(k) & (n-1)
return x <<s | x>>(n-s)
}
rotate_left :: proc(x: uint, k: int) -> uint {
@(require_results)
rotate_left :: proc "contextless" (x: uint, k: int) -> uint {
n :: 8*size_of(uint)
s := uint(k) & (n-1)
return x <<s | x>>(n-s)
}
from_be_u8 :: proc(i: u8) -> u8 { return i }
from_be_u16 :: proc(i: u16) -> u16 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
from_be_u32 :: proc(i: u32) -> u32 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
from_be_u64 :: proc(i: u64) -> u64 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
from_be_uint :: proc(i: uint) -> uint { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
from_be_u8 :: proc "contextless" (i: u8) -> u8 { return i }
@(require_results)
from_be_u16 :: proc "contextless" (i: u16) -> u16 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
from_be_u32 :: proc "contextless" (i: u32) -> u32 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
from_be_u64 :: proc "contextless" (i: u64) -> u64 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
from_be_uint :: proc "contextless" (i: uint) -> uint { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
from_le_u8 :: proc(i: u8) -> u8 { return i }
from_le_u16 :: proc(i: u16) -> u16 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
from_le_u32 :: proc(i: u32) -> u32 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
from_le_u64 :: proc(i: u64) -> u64 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
from_le_uint :: proc(i: uint) -> uint { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
from_le_u8 :: proc "contextless" (i: u8) -> u8 { return i }
@(require_results)
from_le_u16 :: proc "contextless" (i: u16) -> u16 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
from_le_u32 :: proc "contextless" (i: u32) -> u32 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
from_le_u64 :: proc "contextless" (i: u64) -> u64 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
from_le_uint :: proc "contextless" (i: uint) -> uint { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
to_be_u8 :: proc(i: u8) -> u8 { return i }
to_be_u16 :: proc(i: u16) -> u16 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
to_be_u32 :: proc(i: u32) -> u32 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
to_be_u64 :: proc(i: u64) -> u64 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
to_be_uint :: proc(i: uint) -> uint { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
to_be_u8 :: proc "contextless" (i: u8) -> u8 { return i }
@(require_results)
to_be_u16 :: proc "contextless" (i: u16) -> u16 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
to_be_u32 :: proc "contextless" (i: u32) -> u32 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
to_be_u64 :: proc "contextless" (i: u64) -> u64 { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
@(require_results)
to_be_uint :: proc "contextless" (i: uint) -> uint { when ODIN_ENDIAN == .Big { return i } else { return byte_swap(i) } }
to_le_u8 :: proc(i: u8) -> u8 { return i }
to_le_u16 :: proc(i: u16) -> u16 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
to_le_u32 :: proc(i: u32) -> u32 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
to_le_u64 :: proc(i: u64) -> u64 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
to_le_uint :: proc(i: uint) -> uint { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
to_le_u8 :: proc "contextless" (i: u8) -> u8 { return i }
@(require_results)
to_le_u16 :: proc "contextless" (i: u16) -> u16 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
to_le_u32 :: proc "contextless" (i: u32) -> u32 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
to_le_u64 :: proc "contextless" (i: u64) -> u64 { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
@(require_results)
to_le_uint :: proc "contextless" (i: uint) -> uint { when ODIN_ENDIAN == .Little { return i } else { return byte_swap(i) } }
len_u8 :: proc(x: u8) -> int {
@(require_results)
len_u8 :: proc "contextless" (x: u8) -> int {
return int(len_u8_table[x])
}
len_u16 :: proc(x: u16) -> (n: int) {
@(require_results)
len_u16 :: proc "contextless" (x: u16) -> (n: int) {
x := x
if x >= 1<<8 {
x >>= 8
@@ -106,7 +134,8 @@ len_u16 :: proc(x: u16) -> (n: int) {
}
return n + int(len_u8_table[x])
}
len_u32 :: proc(x: u32) -> (n: int) {
@(require_results)
len_u32 :: proc "contextless" (x: u32) -> (n: int) {
x := x
if x >= 1<<16 {
x >>= 16
@@ -118,7 +147,8 @@ len_u32 :: proc(x: u32) -> (n: int) {
}
return n + int(len_u8_table[x])
}
len_u64 :: proc(x: u64) -> (n: int) {
@(require_results)
len_u64 :: proc "contextless" (x: u64) -> (n: int) {
x := x
if x >= 1<<32 {
x >>= 32
@@ -134,7 +164,8 @@ len_u64 :: proc(x: u64) -> (n: int) {
}
return n + int(len_u8_table[x])
}
len_uint :: proc(x: uint) -> (n: int) {
@(require_results)
len_uint :: proc "contextless" (x: uint) -> (n: int) {
when size_of(uint) == size_of(u64) {
return len_u64(u64(x))
} else {
@@ -146,21 +177,24 @@ len_uint :: proc(x: uint) -> (n: int) {
len :: proc{len_u8, len_u16, len_u32, len_u64, len_uint}
add_u32 :: proc(x, y, carry: u32) -> (sum, carry_out: u32) {
@(require_results)
add_u32 :: proc "contextless" (x, y, carry: u32) -> (sum, carry_out: u32) {
tmp_carry, tmp_carry2: bool
sum, tmp_carry = intrinsics.overflow_add(x, y)
sum, tmp_carry2 = intrinsics.overflow_add(sum, carry)
carry_out = u32(tmp_carry | tmp_carry2)
return
}
add_u64 :: proc(x, y, carry: u64) -> (sum, carry_out: u64) {
@(require_results)
add_u64 :: proc "contextless" (x, y, carry: u64) -> (sum, carry_out: u64) {
tmp_carry, tmp_carry2: bool
sum, tmp_carry = intrinsics.overflow_add(x, y)
sum, tmp_carry2 = intrinsics.overflow_add(sum, carry)
carry_out = u64(tmp_carry | tmp_carry2)
return
}
add_uint :: proc(x, y, carry: uint) -> (sum, carry_out: uint) {
@(require_results)
add_uint :: proc "contextless" (x, y, carry: uint) -> (sum, carry_out: uint) {
when size_of(uint) == size_of(u64) {
a, b := add_u64(u64(x), u64(y), u64(carry))
} else {
@@ -172,21 +206,24 @@ add_uint :: proc(x, y, carry: uint) -> (sum, carry_out: uint) {
add :: proc{add_u32, add_u64, add_uint}
sub_u32 :: proc(x, y, borrow: u32) -> (diff, borrow_out: u32) {
@(require_results)
sub_u32 :: proc "contextless" (x, y, borrow: u32) -> (diff, borrow_out: u32) {
tmp_borrow, tmp_borrow2: bool
diff, tmp_borrow = intrinsics.overflow_sub(x, y)
diff, tmp_borrow2 = intrinsics.overflow_sub(diff, borrow)
borrow_out = u32(tmp_borrow | tmp_borrow2)
return
}
sub_u64 :: proc(x, y, borrow: u64) -> (diff, borrow_out: u64) {
@(require_results)
sub_u64 :: proc "contextless" (x, y, borrow: u64) -> (diff, borrow_out: u64) {
tmp_borrow, tmp_borrow2: bool
diff, tmp_borrow = intrinsics.overflow_sub(x, y)
diff, tmp_borrow2 = intrinsics.overflow_sub(diff, borrow)
borrow_out = u64(tmp_borrow | tmp_borrow2)
return
}
sub_uint :: proc(x, y, borrow: uint) -> (diff, borrow_out: uint) {
@(require_results)
sub_uint :: proc "contextless" (x, y, borrow: uint) -> (diff, borrow_out: uint) {
when size_of(uint) == size_of(u64) {
a, b := sub_u64(u64(x), u64(y), u64(borrow))
} else {
@@ -198,18 +235,21 @@ sub_uint :: proc(x, y, borrow: uint) -> (diff, borrow_out: uint) {
sub :: proc{sub_u32, sub_u64, sub_uint}
mul_u32 :: proc(x, y: u32) -> (hi, lo: u32) {
@(require_results)
mul_u32 :: proc "contextless" (x, y: u32) -> (hi, lo: u32) {
z := u64(x) * u64(y)
hi, lo = u32(z>>32), u32(z)
return
}
mul_u64 :: proc(x, y: u64) -> (hi, lo: u64) {
@(require_results)
mul_u64 :: proc "contextless" (x, y: u64) -> (hi, lo: u64) {
prod_wide := u128(x) * u128(y)
hi, lo = u64(prod_wide>>64), u64(prod_wide)
return
}
mul_uint :: proc(x, y: uint) -> (hi, lo: uint) {
@(require_results)
mul_uint :: proc "contextless" (x, y: uint) -> (hi, lo: uint) {
when size_of(uint) == size_of(u32) {
a, b := mul_u32(u32(x), u32(y))
} else {
@@ -222,13 +262,15 @@ mul_uint :: proc(x, y: uint) -> (hi, lo: uint) {
mul :: proc{mul_u32, mul_u64, mul_uint}
div_u32 :: proc(hi, lo, y: u32) -> (quo, rem: u32) {
@(require_results)
div_u32 :: proc "odin" (hi, lo, y: u32) -> (quo, rem: u32) {
assert(y != 0 && y <= hi)
z := u64(hi)<<32 | u64(lo)
quo, rem = u32(z/u64(y)), u32(z%u64(y))
return
}
div_u64 :: proc(hi, lo, y: u64) -> (quo, rem: u64) {
@(require_results)
div_u64 :: proc "odin" (hi, lo, y: u64) -> (quo, rem: u64) {
y := y
two32 :: 1 << 32
mask32 :: two32 - 1
@@ -273,7 +315,8 @@ div_u64 :: proc(hi, lo, y: u64) -> (quo, rem: u64) {
return q1*two32 + q0, (un21*two32 + un0 - q0*y) >> s
}
div_uint :: proc(hi, lo, y: uint) -> (quo, rem: uint) {
@(require_results)
div_uint :: proc "odin" (hi, lo, y: uint) -> (quo, rem: uint) {
when size_of(uint) == size_of(u32) {
a, b := div_u32(u32(hi), u32(lo), u32(y))
} else {
@@ -286,16 +329,26 @@ div :: proc{div_u32, div_u64, div_uint}
is_power_of_two_u8 :: proc(i: u8) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_i8 :: proc(i: i8) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_u16 :: proc(i: u16) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_i16 :: proc(i: i16) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_u32 :: proc(i: u32) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_i32 :: proc(i: i32) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_u64 :: proc(i: u64) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_i64 :: proc(i: i64) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_uint :: proc(i: uint) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two_int :: proc(i: int) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_u8 :: proc "contextless" (i: u8) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_i8 :: proc "contextless" (i: i8) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_u16 :: proc "contextless" (i: u16) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_i16 :: proc "contextless" (i: i16) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_u32 :: proc "contextless" (i: u32) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_i32 :: proc "contextless" (i: i32) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_u64 :: proc "contextless" (i: u64) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_i64 :: proc "contextless" (i: i64) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_uint :: proc "contextless" (i: uint) -> bool { return i > 0 && (i & (i-1)) == 0 }
@(require_results)
is_power_of_two_int :: proc "contextless" (i: int) -> bool { return i > 0 && (i & (i-1)) == 0 }
is_power_of_two :: proc{
is_power_of_two_u8, is_power_of_two_i8,
@@ -320,44 +373,56 @@ len_u8_table := [256]u8{
}
bitfield_extract_u8 :: proc(value: u8, offset, bits: uint) -> u8 { return (value >> offset) & u8(1<<bits - 1) }
bitfield_extract_u16 :: proc(value: u16, offset, bits: uint) -> u16 { return (value >> offset) & u16(1<<bits - 1) }
bitfield_extract_u32 :: proc(value: u32, offset, bits: uint) -> u32 { return (value >> offset) & u32(1<<bits - 1) }
bitfield_extract_u64 :: proc(value: u64, offset, bits: uint) -> u64 { return (value >> offset) & u64(1<<bits - 1) }
bitfield_extract_u128 :: proc(value: u128, offset, bits: uint) -> u128 { return (value >> offset) & u128(1<<bits - 1) }
bitfield_extract_uint :: proc(value: uint, offset, bits: uint) -> uint { return (value >> offset) & uint(1<<bits - 1) }
@(require_results)
bitfield_extract_u8 :: proc "contextless" (value: u8, offset, bits: uint) -> u8 { return (value >> offset) & u8(1<<bits - 1) }
@(require_results)
bitfield_extract_u16 :: proc "contextless" (value: u16, offset, bits: uint) -> u16 { return (value >> offset) & u16(1<<bits - 1) }
@(require_results)
bitfield_extract_u32 :: proc "contextless" (value: u32, offset, bits: uint) -> u32 { return (value >> offset) & u32(1<<bits - 1) }
@(require_results)
bitfield_extract_u64 :: proc "contextless" (value: u64, offset, bits: uint) -> u64 { return (value >> offset) & u64(1<<bits - 1) }
@(require_results)
bitfield_extract_u128 :: proc "contextless" (value: u128, offset, bits: uint) -> u128 { return (value >> offset) & u128(1<<bits - 1) }
@(require_results)
bitfield_extract_uint :: proc "contextless" (value: uint, offset, bits: uint) -> uint { return (value >> offset) & uint(1<<bits - 1) }
bitfield_extract_i8 :: proc(value: i8, offset, bits: uint) -> i8 {
@(require_results)
bitfield_extract_i8 :: proc "contextless" (value: i8, offset, bits: uint) -> i8 {
v := (u8(value) >> offset) & u8(1<<bits - 1)
m := u8(1<<(bits-1))
r := (v~m) - m
return i8(r)
}
bitfield_extract_i16 :: proc(value: i16, offset, bits: uint) -> i16 {
@(require_results)
bitfield_extract_i16 :: proc "contextless" (value: i16, offset, bits: uint) -> i16 {
v := (u16(value) >> offset) & u16(1<<bits - 1)
m := u16(1<<(bits-1))
r := (v~m) - m
return i16(r)
}
bitfield_extract_i32 :: proc(value: i32, offset, bits: uint) -> i32 {
@(require_results)
bitfield_extract_i32 :: proc "contextless" (value: i32, offset, bits: uint) -> i32 {
v := (u32(value) >> offset) & u32(1<<bits - 1)
m := u32(1<<(bits-1))
r := (v~m) - m
return i32(r)
}
bitfield_extract_i64 :: proc(value: i64, offset, bits: uint) -> i64 {
@(require_results)
bitfield_extract_i64 :: proc "contextless" (value: i64, offset, bits: uint) -> i64 {
v := (u64(value) >> offset) & u64(1<<bits - 1)
m := u64(1<<(bits-1))
r := (v~m) - m
return i64(r)
}
bitfield_extract_i128 :: proc(value: i128, offset, bits: uint) -> i128 {
@(require_results)
bitfield_extract_i128 :: proc "contextless" (value: i128, offset, bits: uint) -> i128 {
v := (u128(value) >> offset) & u128(1<<bits - 1)
m := u128(1<<(bits-1))
r := (v~m) - m
return i128(r)
}
bitfield_extract_int :: proc(value: int, offset, bits: uint) -> int {
@(require_results)
bitfield_extract_int :: proc "contextless" (value: int, offset, bits: uint) -> int {
v := (uint(value) >> offset) & uint(1<<bits - 1)
m := uint(1<<(bits-1))
r := (v~m) - m
@@ -381,52 +446,64 @@ bitfield_extract :: proc{
}
bitfield_insert_u8 :: proc(base, insert: u8, offset, bits: uint) -> u8 {
@(require_results)
bitfield_insert_u8 :: proc "contextless" (base, insert: u8, offset, bits: uint) -> u8 {
mask := u8(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_u16 :: proc(base, insert: u16, offset, bits: uint) -> u16 {
@(require_results)
bitfield_insert_u16 :: proc "contextless" (base, insert: u16, offset, bits: uint) -> u16 {
mask := u16(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_u32 :: proc(base, insert: u32, offset, bits: uint) -> u32 {
@(require_results)
bitfield_insert_u32 :: proc "contextless" (base, insert: u32, offset, bits: uint) -> u32 {
mask := u32(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_u64 :: proc(base, insert: u64, offset, bits: uint) -> u64 {
@(require_results)
bitfield_insert_u64 :: proc "contextless" (base, insert: u64, offset, bits: uint) -> u64 {
mask := u64(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_u128 :: proc(base, insert: u128, offset, bits: uint) -> u128 {
@(require_results)
bitfield_insert_u128 :: proc "contextless" (base, insert: u128, offset, bits: uint) -> u128 {
mask := u128(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_uint :: proc(base, insert: uint, offset, bits: uint) -> uint {
@(require_results)
bitfield_insert_uint :: proc "contextless" (base, insert: uint, offset, bits: uint) -> uint {
mask := uint(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_i8 :: proc(base, insert: i8, offset, bits: uint) -> i8 {
@(require_results)
bitfield_insert_i8 :: proc "contextless" (base, insert: i8, offset, bits: uint) -> i8 {
mask := i8(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_i16 :: proc(base, insert: i16, offset, bits: uint) -> i16 {
@(require_results)
bitfield_insert_i16 :: proc "contextless" (base, insert: i16, offset, bits: uint) -> i16 {
mask := i16(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_i32 :: proc(base, insert: i32, offset, bits: uint) -> i32 {
@(require_results)
bitfield_insert_i32 :: proc "contextless" (base, insert: i32, offset, bits: uint) -> i32 {
mask := i32(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_i64 :: proc(base, insert: i64, offset, bits: uint) -> i64 {
@(require_results)
bitfield_insert_i64 :: proc "contextless" (base, insert: i64, offset, bits: uint) -> i64 {
mask := i64(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_i128 :: proc(base, insert: i128, offset, bits: uint) -> i128 {
@(require_results)
bitfield_insert_i128 :: proc "contextless" (base, insert: i128, offset, bits: uint) -> i128 {
mask := i128(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}
bitfield_insert_int :: proc(base, insert: int, offset, bits: uint) -> int {
@(require_results)
bitfield_insert_int :: proc "contextless" (base, insert: int, offset, bits: uint) -> int {
mask := int(1<<bits - 1)
return (base &~ (mask<<offset)) | ((insert&mask) << offset)
}

View File

@@ -11,11 +11,13 @@ import "core:time"
// with additional enum based call
// Modeled after the parabola y = x^2
@(require_results)
quadratic_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return p * p
}
// Modeled after the parabola y = -x^2 + 2x
@(require_results)
quadratic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return -(p * (p - 2))
}
@@ -23,6 +25,7 @@ quadratic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(
// Modeled after the piecewise quadratic
// y = (1/2)((2x)^2) ; [0, 0.5)
// y = -(1/2)((2x-1)*(2x-3) - 1) ; [0.5, 1]
@(require_results)
quadratic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
return 2 * p * p
@@ -32,11 +35,13 @@ quadratic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_flo
}
// Modeled after the cubic y = x^3
@(require_results)
cubic_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return p * p * p
}
// Modeled after the cubic y = (x - 1)^3 + 1
@(require_results)
cubic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
f := p - 1
return f * f * f + 1
@@ -45,6 +50,7 @@ cubic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
// Modeled after the piecewise cubic
// y = (1/2)((2x)^3) ; [0, 0.5)
// y = (1/2)((2x-2)^3 + 2) ; [0.5, 1]
@(require_results)
cubic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
return 4 * p * p * p
@@ -55,11 +61,13 @@ cubic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T
}
// Modeled after the quartic x^4
@(require_results)
quartic_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return p * p * p * p
}
// Modeled after the quartic y = 1 - (x - 1)^4
@(require_results)
quartic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
f := p - 1
return f * f * f * (1 - p) + 1
@@ -68,6 +76,7 @@ quartic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T)
// Modeled after the piecewise quartic
// y = (1/2)((2x)^4) ; [0, 0.5)
// y = -(1/2)((2x-2)^4 - 2) ; [0.5, 1]
@(require_results)
quartic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
return 8 * p * p * p * p
@@ -78,11 +87,13 @@ quartic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float
}
// Modeled after the quintic y = x^5
@(require_results)
quintic_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return p * p * p * p * p
}
// Modeled after the quintic y = (x - 1)^5 + 1
@(require_results)
quintic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
f := p - 1
return f * f * f * f * f + 1
@@ -91,6 +102,7 @@ quintic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T)
// Modeled after the piecewise quintic
// y = (1/2)((2x)^5) ; [0, 0.5)
// y = (1/2)((2x-2)^5 + 2) ; [0.5, 1]
@(require_results)
quintic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
return 16 * p * p * p * p * p
@@ -101,26 +113,31 @@ quintic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float
}
// Modeled after quarter-cycle of sine wave
@(require_results)
sine_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return math.sin((p - 1) * PI_2) + 1
}
// Modeled after quarter-cycle of sine wave (different phase)
@(require_results)
sine_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return math.sin(p * PI_2)
}
// Modeled after half sine wave
@(require_results)
sine_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return 0.5 * (1 - math.cos(p * math.PI))
}
// Modeled after shifted quadrant IV of unit circle
@(require_results)
circular_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return 1 - math.sqrt(1 - (p * p))
}
// Modeled after shifted quadrant II of unit circle
@(require_results)
circular_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return math.sqrt((2 - p) * p)
}
@@ -128,6 +145,7 @@ circular_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T
// Modeled after the piecewise circular function
// y = (1/2)(1 - sqrt(1 - 4x^2)) ; [0, 0.5)
// y = (1/2)(sqrt(-(2x - 3)*(2x - 1)) + 1) ; [0.5, 1]
@(require_results)
circular_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
return 0.5 * (1 - math.sqrt(1 - 4 * (p * p)))
@@ -137,11 +155,13 @@ circular_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_floa
}
// Modeled after the exponential function y = 2^(10(x - 1))
@(require_results)
exponential_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return p == 0.0 ? p : math.pow(2, 10 * (p - 1))
}
// Modeled after the exponential function y = -2^(-10x) + 1
@(require_results)
exponential_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return p == 1.0 ? p : 1 - math.pow(2, -10 * p)
}
@@ -149,6 +169,7 @@ exponential_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_floa
// Modeled after the piecewise exponential
// y = (1/2)2^(10(2x - 1)) ; [0,0.5)
// y = -(1/2)*2^(-10(2x - 1))) + 1 ; [0.5,1]
@(require_results)
exponential_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p == 0.0 || p == 1.0 {
return p
@@ -162,11 +183,13 @@ exponential_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_f
}
// Modeled after the damped sine wave y = sin(13pi/2*x)*pow(2, 10 * (x - 1))
@(require_results)
elastic_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return math.sin(13 * PI_2 * p) * math.pow(2, 10 * (p - 1))
}
// Modeled after the damped sine wave y = sin(-13pi/2*(x + 1))*pow(2, -10x) + 1
@(require_results)
elastic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return math.sin(-13 * PI_2 * (p + 1)) * math.pow(2, -10 * p) + 1
}
@@ -174,6 +197,7 @@ elastic_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T)
// Modeled after the piecewise exponentially-damped sine wave:
// y = (1/2)*sin(13pi/2*(2*x))*pow(2, 10 * ((2*x) - 1)) ; [0,0.5)
// y = (1/2)*(sin(-13pi/2*((2x-1)+1))*pow(2,-10(2*x-1)) + 2) ; [0.5, 1]
@(require_results)
elastic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
return 0.5 * math.sin(13 * PI_2 * (2 * p)) * math.pow(2, 10 * ((2 * p) - 1))
@@ -183,11 +207,13 @@ elastic_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float
}
// Modeled after the overshooting cubic y = x^3-x*sin(x*pi)
@(require_results)
back_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return p * p * p - p * math.sin(p * math.PI)
}
// Modeled after overshooting cubic y = 1-((1-x)^3-(1-x)*sin((1-x)*pi))
@(require_results)
back_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
f := 1 - p
return 1 - (f * f * f - f * math.sin(f * math.PI))
@@ -196,6 +222,7 @@ back_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
// Modeled after the piecewise overshooting cubic function:
// y = (1/2)*((2x)^3-(2x)*sin(2*x*pi)) ; [0, 0.5)
// y = (1/2)*(1-((1-x)^3-(1-x)*sin((1-x)*pi))+1) ; [0.5, 1]
@(require_results)
back_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
f := 2 * p
@@ -206,10 +233,12 @@ back_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T)
}
}
@(require_results)
bounce_in :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
return 1 - bounce_out(1 - p)
}
@(require_results)
bounce_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 4/11.0 {
return (121 * p * p)/16.0
@@ -222,6 +251,7 @@ bounce_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T)
}
}
@(require_results)
bounce_in_out :: proc "contextless" (p: $T) -> T where intrinsics.type_is_float(T) {
if p < 0.5 {
return 0.5 * bounce_in(p*2)
@@ -276,50 +306,51 @@ Ease :: enum {
Bounce_In_Out,
}
@(require_results)
ease :: proc "contextless" (type: Ease, p: $T) -> T
where intrinsics.type_is_float(T) {
switch type {
case .Linear: return p
case .Quadratic_In: return quadratic_in(p)
case .Quadratic_Out: return quadratic_out(p)
case .Quadratic_In_Out: return quadratic_in_out(p)
case .Linear: return p
case .Cubic_In: return cubic_in(p)
case .Cubic_Out: return cubic_out(p)
case .Cubic_In_Out: return cubic_in_out(p)
case .Quadratic_In: return quadratic_in(p)
case .Quadratic_Out: return quadratic_out(p)
case .Quadratic_In_Out: return quadratic_in_out(p)
case .Quartic_In: return quartic_in(p)
case .Quartic_Out: return quartic_out(p)
case .Quartic_In_Out: return quartic_in_out(p)
case .Cubic_In: return cubic_in(p)
case .Cubic_Out: return cubic_out(p)
case .Cubic_In_Out: return cubic_in_out(p)
case .Quintic_In: return quintic_in(p)
case .Quintic_Out: return quintic_out(p)
case .Quintic_In_Out: return quintic_in_out(p)
case .Quartic_In: return quartic_in(p)
case .Quartic_Out: return quartic_out(p)
case .Quartic_In_Out: return quartic_in_out(p)
case .Sine_In: return sine_in(p)
case .Sine_Out: return sine_out(p)
case .Sine_In_Out: return sine_in_out(p)
case .Quintic_In: return quintic_in(p)
case .Quintic_Out: return quintic_out(p)
case .Quintic_In_Out: return quintic_in_out(p)
case .Circular_In: return circular_in(p)
case .Circular_Out: return circular_out(p)
case .Circular_In_Out: return circular_in_out(p)
case .Sine_In: return sine_in(p)
case .Sine_Out: return sine_out(p)
case .Sine_In_Out: return sine_in_out(p)
case .Exponential_In: return exponential_in(p)
case .Exponential_Out: return exponential_out(p)
case .Exponential_In_Out: return exponential_in_out(p)
case .Circular_In: return circular_in(p)
case .Circular_Out: return circular_out(p)
case .Circular_In_Out: return circular_in_out(p)
case .Elastic_In: return elastic_in(p)
case .Elastic_Out: return elastic_out(p)
case .Elastic_In_Out: return elastic_in_out(p)
case .Exponential_In: return exponential_in(p)
case .Exponential_Out: return exponential_out(p)
case .Exponential_In_Out: return exponential_in_out(p)
case .Back_In: return back_in(p)
case .Back_Out: return back_out(p)
case .Back_In_Out: return back_in_out(p)
case .Elastic_In: return elastic_in(p)
case .Elastic_Out: return elastic_out(p)
case .Elastic_In_Out: return elastic_in_out(p)
case .Bounce_In: return bounce_in(p)
case .Bounce_Out: return bounce_out(p)
case .Bounce_In_Out: return bounce_in_out(p)
case .Back_In: return back_in(p)
case .Back_Out: return back_out(p)
case .Back_In_Out: return back_in_out(p)
case .Bounce_In: return bounce_in(p)
case .Bounce_Out: return bounce_out(p)
case .Bounce_In_Out: return bounce_in_out(p)
}
// in case type was invalid
@@ -353,6 +384,7 @@ Flux_Tween :: struct($T: typeid) {
}
// init flux map to a float type and a wanted cap
@(require_results)
flux_init :: proc($T: typeid, value_capacity := 8) -> Flux_Map(T) where intrinsics.type_is_float(T) {
return {
values = make(map[^T]Flux_Tween(T), value_capacity),
@@ -374,6 +406,7 @@ flux_clear :: proc(flux: ^Flux_Map($T)) where intrinsics.type_is_float(T) {
// append / overwrite existing tween value to parameters
// rest is initialized in flux_tween_init, inside update
// return value can be used to set callbacks
@(require_results)
flux_to :: proc(
flux: ^Flux_Map($T),
value: ^T,
@@ -475,6 +508,7 @@ flux_update :: proc(flux: ^Flux_Map($T), dt: f64) where intrinsics.type_is_float
// stop a specific key inside the map
// returns true when it successfully removed the key
@(require_results)
flux_stop :: proc(flux: ^Flux_Map($T), key: ^T) -> bool where intrinsics.type_is_float(T) {
if key in flux.values {
delete_key(&flux.values, key)
@@ -486,6 +520,7 @@ flux_stop :: proc(flux: ^Flux_Map($T), key: ^T) -> bool where intrinsics.type_is
// returns the amount of time left for the tween animation, if the key exists in the map
// returns 0 if the tween doesnt exist on the map
@(require_results)
flux_tween_time_left :: proc(flux: Flux_Map($T), key: ^T) -> f64 {
if tween, ok := flux.values[key]; ok {
return ((1 - tween.progress) * tween.rate) + tween.delay

Some files were not shown because too many files have changed in this diff Show More