merge from upstream and convert to ^File types

This commit is contained in:
jason
2022-05-16 13:49:57 -04:00
276 changed files with 30251 additions and 7935 deletions

View File

@@ -39,7 +39,9 @@ jobs:
make
timeout-minutes: 10
- name: Odin issues tests
run: tests/issues/run.sh
run: |
cd tests/issues
./run.sh
timeout-minutes: 10
- name: Odin check examples/all for Linux i386
run: ./odin check examples/all -vet -strict-style -target:linux_i386
@@ -91,7 +93,9 @@ jobs:
make
timeout-minutes: 10
- name: Odin issues tests
run: tests/issues/run.sh
run: |
cd tests/issues
./run.sh
timeout-minutes: 10
- name: Odin check examples/all for Darwin arm64
run: ./odin check examples/all -vet -strict-style -target:darwin_arm64
@@ -163,7 +167,8 @@ jobs:
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat
call tests\issues\run.bat
cd tests\issues
call run.bat
timeout-minutes: 10
- name: Odin check examples/all for Windows 32bits
shell: cmd

2
.gitignore vendored
View File

@@ -269,6 +269,8 @@ bin/
# - Linux/MacOS
odin
odin.dSYM
*.bin
demo.bin
# shared collection
shared/

View File

@@ -1,4 +1,4 @@
Copyright (c) 2016-2021 Ginger Bill. All rights reserved.
Copyright (c) 2016-2022 Ginger Bill. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

View File

@@ -1,7 +1,7 @@
all: debug demo
all: debug
demo:
./odin run examples/demo/demo.odin
./odin run examples/demo/demo.odin -file
report:
./odin report

View File

@@ -1,90 +1,90 @@
// This is purely for documentation
package builtin
nil :: nil;
false :: 0!=0;
true :: 0==0;
nil :: nil
false :: 0!=0
true :: 0==0
ODIN_OS :: ODIN_OS;
ODIN_ARCH :: ODIN_ARCH;
ODIN_ENDIAN :: ODIN_ENDIAN;
ODIN_VENDOR :: ODIN_VENDOR;
ODIN_VERSION :: ODIN_VERSION;
ODIN_ROOT :: ODIN_ROOT;
ODIN_DEBUG :: ODIN_DEBUG;
ODIN_OS :: ODIN_OS
ODIN_ARCH :: ODIN_ARCH
ODIN_ENDIAN :: ODIN_ENDIAN
ODIN_VENDOR :: ODIN_VENDOR
ODIN_VERSION :: ODIN_VERSION
ODIN_ROOT :: ODIN_ROOT
ODIN_DEBUG :: ODIN_DEBUG
byte :: u8; // alias
byte :: u8 // alias
bool :: bool;
b8 :: b8;
b16 :: b16;
b32 :: b32;
b64 :: b64;
bool :: bool
b8 :: b8
b16 :: b16
b32 :: b32
b64 :: b64
i8 :: i8;
u8 :: u8;
i16 :: i16;
u16 :: u16;
i32 :: i32;
u32 :: u32;
i64 :: i64;
u64 :: u64;
i8 :: i8
u8 :: u8
i16 :: i16
u16 :: u16
i32 :: i32
u32 :: u32
i64 :: i64
u64 :: u64
i128 :: i128;
u128 :: u128;
i128 :: i128
u128 :: u128
rune :: rune;
rune :: rune
f16 :: f16;
f32 :: f32;
f64 :: f64;
f16 :: f16
f32 :: f32
f64 :: f64
complex32 :: complex32;
complex64 :: complex64;
complex128 :: complex128;
complex32 :: complex32
complex64 :: complex64
complex128 :: complex128
quaternion64 :: quaternion64;
quaternion128 :: quaternion128;
quaternion256 :: quaternion256;
quaternion64 :: quaternion64
quaternion128 :: quaternion128
quaternion256 :: quaternion256
int :: int;
uint :: uint;
uintptr :: uintptr;
int :: int
uint :: uint
uintptr :: uintptr
rawptr :: rawptr;
string :: string;
cstring :: cstring;
any :: any;
rawptr :: rawptr
string :: string
cstring :: cstring
any :: any
typeid :: typeid;
typeid :: typeid
// Endian Specific Types
i16le :: i16le;
u16le :: u16le;
i32le :: i32le;
u32le :: u32le;
i64le :: i64le;
u64le :: u64le;
i128le :: i128le;
u128le :: u128le;
i16le :: i16le
u16le :: u16le
i32le :: i32le
u32le :: u32le
i64le :: i64le
u64le :: u64le
i128le :: i128le
u128le :: u128le
i16be :: i16be;
u16be :: u16be;
i32be :: i32be;
u32be :: u32be;
i64be :: i64be;
u64be :: u64be;
i128be :: i128be;
u128be :: u128be;
i16be :: i16be
u16be :: u16be
i32be :: i32be
u32be :: u32be
i64be :: i64be
u64be :: u64be
i128be :: i128be
u128be :: u128be
f16le :: f16le;
f32le :: f32le;
f64le :: f64le;
f16le :: f16le
f32le :: f32le
f64le :: f64le
f16be :: f16be;
f32be :: f32be;
f64be :: f64be;
f16be :: f16be
f32be :: f32be
f64be :: f64be

View File

@@ -10,7 +10,14 @@ clone :: proc(s: []byte, allocator := context.allocator, loc := #caller_location
return c[:len(s)]
}
ptr_from_slice :: proc(str: []byte) -> ^byte {
clone_safe :: proc(s: []byte, allocator := context.allocator, loc := #caller_location) -> (data: []byte, err: mem.Allocator_Error) {
c := make([]byte, len(s), allocator, loc) or_return
copy(c, s)
return c[:len(s)], nil
}
ptr_from_slice :: ptr_from_bytes
ptr_from_bytes :: proc(str: []byte) -> ^byte {
d := transmute(mem.Raw_String)str
return d.data
}
@@ -134,6 +141,25 @@ join :: proc(a: [][]byte, sep: []byte, allocator := context.allocator) -> []byte
return b
}
join_safe :: proc(a: [][]byte, sep: []byte, allocator := context.allocator) -> (data: []byte, err: mem.Allocator_Error) {
if len(a) == 0 {
return nil, nil
}
n := len(sep) * (len(a) - 1)
for s in a {
n += len(s)
}
b := make([]byte, n, allocator) or_return
i := copy(b, a[0])
for s in a[1:] {
i += copy(b[i:], sep)
i += copy(b[i:], s)
}
return b, nil
}
concatenate :: proc(a: [][]byte, allocator := context.allocator) -> []byte {
if len(a) == 0 {
return nil
@@ -151,6 +177,24 @@ concatenate :: proc(a: [][]byte, allocator := context.allocator) -> []byte {
return b
}
concatenate_safe :: proc(a: [][]byte, allocator := context.allocator) -> (data: []byte, err: mem.Allocator_Error) {
if len(a) == 0 {
return nil, nil
}
n := 0
for s in a {
n += len(s)
}
b := make([]byte, n, allocator) or_return
i := 0
for s in a {
i += copy(b[i:], s)
}
return b, nil
}
@private
_split :: proc(s, sep: []byte, sep_save, n: int, allocator := context.allocator) -> [][]byte {
s, n := s, n

View File

@@ -128,7 +128,6 @@ Deflate_Error :: enum {
BType_3,
}
// General I/O context for ZLIB, LZW, etc.
Context_Memory_Input :: struct #packed {
input_data: []u8,
@@ -151,7 +150,6 @@ when size_of(rawptr) == 8 {
#assert(size_of(Context_Memory_Input) == 52)
}
Context_Stream_Input :: struct #packed {
input_data: []u8,
input: io.Stream,
@@ -185,8 +183,6 @@ Context_Stream_Input :: struct #packed {
This simplifies end-of-stream handling where bits may be left in the bit buffer.
*/
// TODO: Make these return compress.Error errors.
input_size_from_memory :: proc(z: ^Context_Memory_Input) -> (res: i64, err: Error) {
return i64(len(z.input_data)), nil
}

View File

@@ -45,7 +45,7 @@ main :: proc() {
if len(args) < 2 {
stderr("No input file specified.\n")
err := load(slice=TEST, buf=&buf, known_gzip_size=len(TEST))
err := load(data=TEST, buf=&buf, known_gzip_size=len(TEST))
if err == nil {
stdout("Displaying test vector: ")
stdout(bytes.buffer_to_string(&buf))

View File

@@ -102,7 +102,7 @@ E_Deflate :: compress.Deflate_Error
GZIP_MAX_PAYLOAD_SIZE :: i64(max(u32le))
load :: proc{load_from_slice, load_from_file, load_from_context}
load :: proc{load_from_bytes, load_from_file, load_from_context}
load_from_file :: proc(filename: string, buf: ^bytes.Buffer, expected_output_size := -1, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
@@ -112,16 +112,16 @@ load_from_file :: proc(filename: string, buf: ^bytes.Buffer, expected_output_siz
err = E_General.File_Not_Found
if ok {
err = load_from_slice(data, buf, len(data), expected_output_size)
err = load_from_bytes(data, buf, len(data), expected_output_size)
}
return
}
load_from_slice :: proc(slice: []u8, buf: ^bytes.Buffer, known_gzip_size := -1, expected_output_size := -1, allocator := context.allocator) -> (err: Error) {
load_from_bytes :: proc(data: []byte, buf: ^bytes.Buffer, known_gzip_size := -1, expected_output_size := -1, allocator := context.allocator) -> (err: Error) {
buf := buf
z := &compress.Context_Memory_Input{
input_data = slice,
input_data = data,
output = buf,
}
return load_from_context(z, buf, known_gzip_size, expected_output_size, allocator)

View File

@@ -0,0 +1,148 @@
/*
This file was generated, so don't edit this by hand.
Transliterated from https://github.com/Ed-von-Schleck/shoco/blob/master/shoco_model.h,
which is an English word model.
*/
// package shoco is an implementation of the shoco short string compressor
package shoco
DEFAULT_MODEL :: Shoco_Model {
min_char = 39,
max_char = 122,
characters_by_id = {
'e', 'a', 'i', 'o', 't', 'h', 'n', 'r', 's', 'l', 'u', 'c', 'w', 'm', 'd', 'b', 'p', 'f', 'g', 'v', 'y', 'k', '-', 'H', 'M', 'T', '\'', 'B', 'x', 'I', 'W', 'L',
},
ids_by_character = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 26, -1, -1, -1, -1, -1, 22, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 27, -1, -1, -1, -1, -1, 23, 29, -1, -1, 31, 24, -1, -1, -1, -1, -1, -1, 25, -1, -1, 30, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 15, 11, 14, 0, 17, 18, 5, 2, -1, 21, 9, 13, 6, 3, 16, -1, 7, 8, 4, 10, 19, 12, 28, 20, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
},
successors_by_bigram = {
7, 4, 12, -1, 6, -1, 1, 0, 3, 5, -1, 9, -1, 8, 2, -1, 15, 14, -1, 10, 11, -1, -1, -1, -1, -1, -1, -1, 13, -1, -1, -1,
1, -1, 6, -1, 1, -1, 0, 3, 2, 4, 15, 11, -1, 9, 5, 10, 13, -1, 12, 8, 7, 14, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
9, 11, -1, 4, 2, -1, 0, 8, 1, 5, -1, 6, -1, 3, 7, 15, -1, 12, 10, 13, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, 14, 7, 5, -1, 1, 2, 8, 9, 0, 15, 6, 4, 11, -1, 12, 3, -1, 10, -1, 13, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
2, 4, 3, 1, 5, 0, -1, 6, 10, 9, 7, 12, 11, -1, -1, -1, -1, 13, -1, -1, 8, -1, 15, -1, -1, -1, 14, -1, -1, -1, -1, -1,
0, 1, 2, 3, 4, -1, -1, 5, 9, 10, 6, -1, -1, 8, 15, 11, -1, 14, -1, -1, 7, -1, 13, -1, -1, -1, 12, -1, -1, -1, -1, -1,
2, 8, 7, 4, 3, -1, 9, -1, 6, 11, -1, 5, -1, -1, 0, -1, -1, 14, 1, 15, 10, 12, -1, -1, -1, -1, 13, -1, -1, -1, -1, -1,
0, 3, 1, 2, 6, -1, 9, 8, 4, 12, 13, 10, -1, 11, 7, -1, -1, 15, 14, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
0, 6, 3, 4, 1, 2, -1, -1, 5, 10, 7, 9, 11, 12, -1, -1, 8, 14, -1, -1, 15, 13, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
0, 6, 2, 5, 9, -1, -1, -1, 10, 1, 8, -1, 12, 14, 4, -1, 15, 7, -1, 13, 3, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
8, 10, 9, 15, 1, -1, 4, 0, 3, 2, -1, 6, -1, 12, 11, 13, 7, 14, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
1, 3, 6, 0, 4, 2, -1, 7, 13, 8, 9, 11, -1, -1, 15, -1, -1, -1, -1, -1, 10, 5, 14, -1, -1, -1, -1, -1, -1, -1, -1, -1,
3, 0, 1, 4, -1, 2, 5, 6, 7, 8, -1, 14, -1, -1, 9, 15, -1, 12, -1, -1, -1, 10, 11, -1, -1, -1, 13, -1, -1, -1, -1, -1,
0, 1, 3, 2, 15, -1, 12, -1, 7, 14, 4, -1, -1, 9, -1, 8, 5, 10, -1, -1, 6, -1, 13, -1, -1, -1, 11, -1, -1, -1, -1, -1,
0, 3, 1, 2, -1, -1, 12, 6, 4, 9, 7, -1, -1, 14, 8, -1, -1, 15, 11, 13, 5, -1, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1,
0, 5, 7, 2, 10, 13, -1, 6, 8, 1, 3, -1, -1, 14, 15, 11, -1, -1, -1, 12, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
0, 2, 6, 3, 7, 10, -1, 1, 9, 4, 8, -1, -1, 15, -1, 12, 5, -1, -1, -1, 11, -1, 13, -1, -1, -1, 14, -1, -1, -1, -1, -1,
1, 3, 4, 0, 7, -1, 12, 2, 11, 8, 6, 13, -1, -1, -1, -1, -1, 5, -1, -1, 10, 15, 9, -1, -1, -1, 14, -1, -1, -1, -1, -1,
1, 3, 5, 2, 13, 0, 9, 4, 7, 6, 8, -1, -1, 15, -1, 11, -1, -1, 10, -1, 14, -1, 12, -1, -1, -1, -1, -1, -1, -1, -1, -1,
0, 2, 1, 3, -1, -1, -1, 6, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
1, 11, 4, 0, 3, -1, 13, 12, 2, 7, -1, -1, 15, 10, 5, 8, 14, -1, -1, -1, -1, -1, 9, -1, -1, -1, 6, -1, -1, -1, -1, -1,
0, 9, 2, 14, 15, 4, 1, 13, 3, 5, -1, -1, 10, -1, -1, -1, -1, 6, 12, -1, 7, -1, 8, -1, -1, -1, 11, -1, -1, -1, -1, -1,
-1, 2, 14, -1, 1, 5, 8, 7, 4, 12, -1, 6, 9, 11, 13, 3, 10, 15, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1,
0, 1, 3, 2, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
4, 3, 1, 5, -1, -1, -1, 0, -1, -1, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
2, 8, 4, 1, -1, 0, -1, 6, -1, -1, 5, -1, 7, -1, -1, -1, -1, -1, -1, -1, 10, -1, -1, 9, -1, -1, -1, -1, -1, -1, -1, -1,
12, 5, -1, -1, 1, -1, -1, 7, 0, 3, -1, 2, -1, 4, 6, -1, -1, -1, -1, 8, -1, -1, 15, -1, 13, 9, -1, -1, -1, -1, -1, 11,
1, 3, 2, 4, -1, -1, -1, 5, -1, 7, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6, -1, -1, -1, -1, -1, -1, -1, -1, 8, -1, -1,
5, 3, 4, 12, 1, 6, -1, -1, -1, -1, 8, 2, -1, -1, -1, -1, 0, 9, -1, -1, 11, -1, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, 0, -1, 1, 12, 3, -1, -1, -1, -1, 5, -1, -1, -1, 2, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1, 6, -1, 10,
2, 3, 1, 4, -1, 0, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7, -1, -1, -1, -1, -1, -1, -1, -1, 6, -1, -1,
5, 1, 3, 0, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2, -1, -1, -1, -1, -1, 9, -1, -1, 6, -1, 7,
},
successors_reversed = {
's', 't', 'c', 'l', 'm', 'a', 'd', 'r', 'v', 'T', 'A', 'L', 'e', 'M', 'Y', '-',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'-', 't', 'a', 'b', 's', 'h', 'c', 'r', 'n', 'w', 'p', 'm', 'l', 'd', 'i', 'f',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'u', 'e', 'i', 'a', 'o', 'r', 'y', 'l', 'I', 'E', 'R', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'e', 'a', 'o', 'i', 'u', 'A', 'y', 'E', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
't', 'n', 'f', 's', '\'', 'm', 'I', 'N', 'A', 'E', 'L', 'Z', 'r', 'V', 'R', 'C',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'o', 'a', 'y', 'i', 'u', 'e', 'I', 'L', 'D', '\'', 'E', 'Y', '\x00', '\x00', '\x00', '\x00',
'r', 'i', 'y', 'a', 'e', 'o', 'u', 'Y', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'h', 'o', 'e', 'E', 'i', 'u', 'r', 'w', 'a', 'H', 'y', 'R', 'Z', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'h', 'i', 'e', 'a', 'o', 'r', 'I', 'y', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'n', 't', 's', 'r', 'l', 'd', 'i', 'y', 'v', 'm', 'b', 'c', 'g', 'p', 'k', 'u',
'e', 'l', 'o', 'u', 'y', 'a', 'r', 'i', 's', 'j', 't', 'b', 'v', 'h', 'm', 'd',
'o', 'e', 'h', 'a', 't', 'k', 'i', 'r', 'l', 'u', 'y', 'c', 'q', 's', '-', 'd',
'e', 'i', 'o', 'a', 's', 'y', 'r', 'u', 'd', 'l', '-', 'g', 'n', 'v', 'm', 'f',
'r', 'n', 'd', 's', 'a', 'l', 't', 'e', 'm', 'c', 'v', 'y', 'i', 'x', 'f', 'p',
'o', 'e', 'r', 'a', 'i', 'f', 'u', 't', 'l', '-', 'y', 's', 'n', 'c', '\'', 'k',
'h', 'e', 'o', 'a', 'r', 'i', 'l', 's', 'u', 'n', 'g', 'b', '-', 't', 'y', 'm',
'e', 'a', 'i', 'o', 't', 'r', 'u', 'y', 'm', 's', 'l', 'b', '\'', '-', 'f', 'd',
'n', 's', 't', 'm', 'o', 'l', 'c', 'd', 'r', 'e', 'g', 'a', 'f', 'v', 'z', 'b',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'e', 'n', 'i', 's', 'h', 'l', 'f', 'y', '-', 'a', 'w', '\'', 'g', 'r', 'o', 't',
'e', 'l', 'i', 'y', 'd', 'o', 'a', 'f', 'u', 't', 's', 'k', 'w', 'v', 'm', 'p',
'e', 'a', 'o', 'i', 'u', 'p', 'y', 's', 'b', 'm', 'f', '\'', 'n', '-', 'l', 't',
'd', 'g', 'e', 't', 'o', 'c', 's', 'i', 'a', 'n', 'y', 'l', 'k', '\'', 'f', 'v',
'u', 'n', 'r', 'f', 'm', 't', 'w', 'o', 's', 'l', 'v', 'd', 'p', 'k', 'i', 'c',
'e', 'r', 'a', 'o', 'l', 'p', 'i', 't', 'u', 's', 'h', 'y', 'b', '-', '\'', 'm',
'\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'e', 'i', 'o', 'a', 's', 'y', 't', 'd', 'r', 'n', 'c', 'm', 'l', 'u', 'g', 'f',
'e', 't', 'h', 'i', 'o', 's', 'a', 'u', 'p', 'c', 'l', 'w', 'm', 'k', 'f', 'y',
'h', 'o', 'e', 'i', 'a', 't', 'r', 'u', 'y', 'l', 's', 'w', 'c', 'f', '\'', '-',
'r', 't', 'l', 's', 'n', 'g', 'c', 'p', 'e', 'i', 'a', 'd', 'm', 'b', 'f', 'o',
'e', 'i', 'a', 'o', 'y', 'u', 'r', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00', '\x00',
'a', 'i', 'h', 'e', 'o', 'n', 'r', 's', 'l', 'd', 'k', '-', 'f', '\'', 'c', 'b',
'p', 't', 'c', 'a', 'i', 'e', 'h', 'q', 'u', 'f', '-', 'y', 'o', '\x00', '\x00', '\x00',
'o', 'e', 's', 't', 'i', 'd', '\'', 'l', 'b', '-', 'm', 'a', 'r', 'n', 'p', 'w',
},
character_count = 32,
successor_count = 16,
max_successor_n = 7,
packs = {
{ 0x80000000, 1, 2, { 26, 24, 24, 24, 24, 24, 24, 24 }, { 15, 3, 0, 0, 0, 0, 0, 0 }, 0xc0, 0x80 },
{ 0xc0000000, 2, 4, { 25, 22, 19, 16, 16, 16, 16, 16 }, { 15, 7, 7, 7, 0, 0, 0, 0 }, 0xe0, 0xc0 },
{ 0xe0000000, 4, 8, { 23, 19, 15, 11, 8, 5, 2, 0 }, { 31, 15, 15, 15, 7, 7, 7, 3 }, 0xf0, 0xe0 },
},
}

View File

@@ -0,0 +1,318 @@
/*
Copyright 2022 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
List of contributors:
Jeroen van Rijn: Initial implementation.
An implementation of [shoco](https://github.com/Ed-von-Schleck/shoco) by Christian Schramm.
*/
// package shoco is an implementation of the shoco short string compressor
package shoco
import "core:intrinsics"
import "core:compress"
Shoco_Pack :: struct {
word: u32,
bytes_packed: i8,
bytes_unpacked: i8,
offsets: [8]u16,
masks: [8]i16,
header_mask: u8,
header: u8,
}
Shoco_Model :: struct {
min_char: u8,
max_char: u8,
characters_by_id: []u8,
ids_by_character: [256]i16,
successors_by_bigram: []i8,
successors_reversed: []u8,
character_count: u8,
successor_count: u8,
max_successor_n: i8,
packs: []Shoco_Pack,
}
compress_bound :: proc(uncompressed_size: int) -> (worst_case_compressed_size: int) {
// Worst case compression happens when input is non-ASCII (128-255)
// Encoded as 0x00 + the byte in question.
return uncompressed_size * 2
}
decompress_bound :: proc(compressed_size: int, model := DEFAULT_MODEL) -> (maximum_decompressed_size: int) {
// Best case compression is 2:1
most: f64
for pack in model.packs {
val := f64(compressed_size) / f64(pack.bytes_packed) * f64(pack.bytes_unpacked)
most = max(most, val)
}
return int(most)
}
find_best_encoding :: proc(indices: []i16, n_consecutive: i8, model := DEFAULT_MODEL) -> (res: int) {
for p := len(model.packs); p > 0; p -= 1 {
pack := model.packs[p - 1]
if n_consecutive >= pack.bytes_unpacked {
have_index := true
for i := 0; i < int(pack.bytes_unpacked); i += 1 {
if indices[i] > pack.masks[i] {
have_index = false
break
}
}
if have_index {
return p - 1
}
}
}
return -1
}
validate_model :: proc(model: Shoco_Model) -> (int, compress.Error) {
if len(model.characters_by_id) != int(model.character_count) {
return 0, .Unknown_Compression_Method
}
if len(model.successors_by_bigram) != int(model.character_count) * int(model.character_count) {
return 0, .Unknown_Compression_Method
}
if len(model.successors_reversed) != int(model.successor_count) * int(model.max_char - model.min_char) {
return 0, .Unknown_Compression_Method
}
// Model seems legit.
return 0, nil
}
// Decompresses into provided buffer.
decompress_slice_to_output_buffer :: proc(input: []u8, output: []u8, model := DEFAULT_MODEL) -> (size: int, err: compress.Error) {
inp, inp_end := 0, len(input)
out, out_end := 0, len(output)
validate_model(model) or_return
for inp < inp_end {
val := transmute(i8)input[inp]
mark := int(-1)
for val < 0 {
val <<= 1
mark += 1
}
if mark > len(model.packs) {
return out, .Unknown_Compression_Method
}
if mark < 0 {
if out >= out_end {
return out, .Output_Too_Short
}
// Ignore the sentinel value for non-ASCII chars
if input[inp] == 0x00 {
inp += 1
if inp >= inp_end {
return out, .Stream_Too_Short
}
}
output[out] = input[inp]
inp, out = inp + 1, out + 1
} else {
pack := model.packs[mark]
if out + int(pack.bytes_unpacked) > out_end {
return out, .Output_Too_Short
} else if inp + int(pack.bytes_packed) > inp_end {
return out, .Stream_Too_Short
}
code := intrinsics.unaligned_load((^u32)(&input[inp]))
when ODIN_ENDIAN == .Little {
code = intrinsics.byte_swap(code)
}
// Unpack the leading char
offset := pack.offsets[0]
mask := pack.masks[0]
last_chr := model.characters_by_id[(code >> offset) & u32(mask)]
output[out] = last_chr
// Unpack the successor chars
for i := 1; i < int(pack.bytes_unpacked); i += 1 {
offset = pack.offsets[i]
mask = pack.masks[i]
index_major := u32(last_chr - model.min_char) * u32(model.successor_count)
index_minor := (code >> offset) & u32(mask)
last_chr = model.successors_reversed[index_major + index_minor]
output[out + i] = last_chr
}
out += int(pack.bytes_unpacked)
inp += int(pack.bytes_packed)
}
}
return out, nil
}
decompress_slice_to_string :: proc(input: []u8, model := DEFAULT_MODEL, allocator := context.allocator) -> (res: string, err: compress.Error) {
context.allocator = allocator
if len(input) == 0 {
return "", .Stream_Too_Short
}
max_output_size := decompress_bound(len(input), model)
buf: [dynamic]u8
if !resize(&buf, max_output_size) {
return "", .Out_Of_Memory
}
length, result := decompress_slice_to_output_buffer(input, buf[:])
resize(&buf, length)
return string(buf[:]), result
}
decompress :: proc{decompress_slice_to_output_buffer, decompress_slice_to_string}
compress_string_to_buffer :: proc(input: string, output: []u8, model := DEFAULT_MODEL, allocator := context.allocator) -> (size: int, err: compress.Error) {
inp, inp_end := 0, len(input)
out, out_end := 0, len(output)
output := output
validate_model(model) or_return
indices := make([]i16, model.max_successor_n + 1)
defer delete(indices)
last_resort := false
encode: for inp < inp_end {
if last_resort {
last_resort = false
if input[inp] & 0x80 == 0x80 {
// Non-ASCII case
if out + 2 > out_end {
return out, .Output_Too_Short
}
// Put in a sentinel byte
output[out] = 0x00
out += 1
} else {
// An ASCII byte
if out + 1 > out_end {
return out, .Output_Too_Short
}
}
output[out] = input[inp]
out, inp = out + 1, inp + 1
} else {
// Find the longest string of known successors
indices[0] = model.ids_by_character[input[inp]]
last_chr_index := indices[0]
if last_chr_index < 0 {
last_resort = true
continue encode
}
rest := inp_end - inp
n_consecutive: i8 = 1
for ; n_consecutive <= model.max_successor_n; n_consecutive += 1 {
if inp_end > 0 && int(n_consecutive) == rest {
break
}
current_index := model.ids_by_character[input[inp + int(n_consecutive)]]
if current_index < 0 { // '\0' is always -1
break
}
successor_index := model.successors_by_bigram[last_chr_index * i16(model.character_count) + current_index]
if successor_index < 0 {
break
}
indices[n_consecutive] = i16(successor_index)
last_chr_index = current_index
}
if n_consecutive < 2 {
last_resort = true
continue encode
}
pack_n := find_best_encoding(indices, n_consecutive)
if pack_n >= 0 {
if out + int(model.packs[pack_n].bytes_packed) > out_end {
return out, .Output_Too_Short
}
pack := model.packs[pack_n]
code := pack.word
for i := 0; i < int(pack.bytes_unpacked); i += 1 {
code |= u32(indices[i]) << pack.offsets[i]
}
// In the little-endian world, we need to swap what's in the register to match the memory representation.
when ODIN_ENDIAN == .Little {
code = intrinsics.byte_swap(code)
}
out_ptr := raw_data(output[out:])
switch pack.bytes_packed {
case 4:
intrinsics.unaligned_store(transmute(^u32)out_ptr, code)
case 2:
intrinsics.unaligned_store(transmute(^u16)out_ptr, u16(code))
case 1:
intrinsics.unaligned_store(transmute(^u8)out_ptr, u8(code))
case:
return out, .Unknown_Compression_Method
}
out += int(pack.bytes_packed)
inp += int(pack.bytes_unpacked)
} else {
last_resort = true
continue encode
}
}
}
return out, nil
}
compress_string :: proc(input: string, model := DEFAULT_MODEL, allocator := context.allocator) -> (output: []u8, err: compress.Error) {
context.allocator = allocator
if len(input) == 0 {
return {}, .Stream_Too_Short
}
max_output_size := compress_bound(len(input))
buf: [dynamic]u8
if !resize(&buf, max_output_size) {
return {}, .Out_Of_Memory
}
length, result := compress_string_to_buffer(input, buf[:])
resize(&buf, length)
return buf[:length], result
}
compress :: proc{compress_string_to_buffer, compress_string}

View File

@@ -0,0 +1,173 @@
package container_intrusive_list
import "core:intrinsics"
// An intrusive doubly-linked list
//
// As this is an intrusive container, a `Node` must be embedded in your own
// structure which is conventionally called a "link". The use of `push_front`
// and `push_back` take the address of this node. Retrieving the data
// associated with the node requires finding the relative offset of the node
// of the parent structure. The parent type and field name are given to
// `iterator_*` procedures, or to the built-in `container_of` procedure.
//
// This data structure is two-pointers in size:
// 8 bytes on 32-bit platforms and 16 bytes on 64-bit platforms
List :: struct {
head: ^Node,
tail: ^Node,
}
Node :: struct {
next, prev: ^Node,
}
push_front :: proc(list: ^List, node: ^Node) {
if list.head != nil {
list.head.prev = node
node.prev, node.next = nil, list.head
list.head = node
} else {
list.head, list.tail = node, node
node.prev, node.next = nil, nil
}
}
push_back :: proc(list: ^List, node: ^Node) {
if list.tail != nil {
list.tail.next = node
node.prev, node.next = list.tail, nil
list.tail = node
} else {
list.head, list.tail = node, node
node.prev, node.next = nil, nil
}
}
remove :: proc(list: ^List, node: ^Node) {
if node != nil {
if node.next != nil {
node.next.prev = node.prev
}
if node.prev != nil {
node.prev.next = node.next
}
if list.head == node {
list.head = node.next
}
if list.tail == node {
list.tail = node.prev
}
}
}
remove_by_proc :: proc(list: ^List, to_erase: proc(^Node) -> bool) {
for node := list.head; node != nil; {
next := node.next
if to_erase(node) {
if node.next != nil {
node.next.prev = node.prev
}
if node.prev != nil {
node.prev.next = node.next
}
if list.head == node {
list.head = node.next
}
if list.tail == node {
list.tail = node.prev
}
}
node = next
}
}
is_empty :: proc(list: ^List) -> bool {
return list.head == nil
}
pop_front :: proc(list: ^List) -> ^Node {
link := list.head
if link == nil {
return nil
}
if link.next != nil {
link.next.prev = link.prev
}
if link.prev != nil {
link.prev.next = link.next
}
if link == list.head {
list.head = link.next
}
if link == list.tail {
list.tail = link.prev
}
return link
}
pop_back :: proc(list: ^List) -> ^Node {
link := list.tail
if link == nil {
return nil
}
if link.next != nil {
link.next.prev = link.prev
}
if link.prev != nil {
link.prev.next = link.next
}
if link == list.head {
list.head = link.next
}
if link == list.tail {
list.tail = link.prev
}
return link
}
Iterator :: struct($T: typeid) {
curr: ^Node,
offset: uintptr,
}
iterator_head :: proc(list: List, $T: typeid, $field_name: string) -> Iterator(T)
where intrinsics.type_has_field(T, field_name),
intrinsics.type_field_type(T, field_name) == Node {
return {list.head, offset_of_by_string(T, field_name)}
}
iterator_tail :: proc(list: List, $T: typeid, $field_name: string) -> Iterator(T)
where intrinsics.type_has_field(T, field_name),
intrinsics.type_field_type(T, field_name) == Node {
return {list.tail, offset_of_by_string(T, field_name)}
}
iterator_from_node :: proc(node: ^Node, $T: typeid, $field_name: string) -> Iterator(T)
where intrinsics.type_has_field(T, field_name),
intrinsics.type_field_type(T, field_name) == Node {
return {node, offset_of_by_string(T, field_name)}
}
iterate_next :: proc(it: ^Iterator($T)) -> (ptr: ^T, ok: bool) {
node := it.curr
if node == nil {
return nil, false
}
it.curr = node.next
return (^T)(uintptr(node) - it.offset), true
}
iterate_prev :: proc(it: ^Iterator($T)) -> (ptr: ^T, ok: bool) {
node := it.curr
if node == nil {
return nil, false
}
it.curr = node.prev
return (^T)(uintptr(node) - it.offset), true
}

View File

@@ -60,19 +60,27 @@ clear :: proc(c: ^$C/Cache($Key, $Value), call_on_remove: bool) {
set :: proc(c: ^$C/Cache($Key, $Value), key: Key, value: Value) -> runtime.Allocator_Error {
if e, ok := c.entries[key]; ok {
e.value = value
_pop_node(c, e)
_push_front_node(c, e)
return nil
}
e := new(Node(Key, Value), c.node_allocator) or_return
e.key = key
e.value = value
_push_front_node(c, e)
if c.count > c.capacity {
_remove_node(c, c.tail)
e : ^Node(Key, Value) = nil
assert(c.count <= c.capacity)
if c.count == c.capacity {
e = c.tail
_remove_node(c, e)
}
else {
c.count += 1
e = new(Node(Key, Value), c.node_allocator) or_return
}
e.key = key
e.value = value
_push_front_node(c, e)
c.entries[key] = e
return nil
}
@@ -122,6 +130,8 @@ remove :: proc(c: ^$C/Cache($Key, $Value), key: Key) -> bool {
return false
}
_remove_node(c, e)
free(node, c.node_allocator)
c.count -= 1
return true
}
@@ -143,14 +153,9 @@ _remove_node :: proc(c: ^$C/Cache($Key, $Value), node: ^Node(Key, Value)) {
node.prev = nil
node.next = nil
c.count -= 1
delete_key(&c.entries, node.key)
_call_on_remove(c, node)
free(node, c.node_allocator)
}
@(private)
@@ -171,8 +176,6 @@ _push_front_node :: proc(c: ^$C/Cache($Key, $Value), e: ^Node(Key, Value)) {
c.tail = e
}
e.prev = nil
c.count += 1
}
@(private)
@@ -180,6 +183,12 @@ _pop_node :: proc(c: ^$C/Cache($Key, $Value), e: ^Node(Key, Value)) {
if e == nil {
return
}
if c.head == e {
c.head = e.next
}
if c.tail == e {
c.tail = e.prev
}
if e.prev != nil {
e.prev.next = e.next
}

View File

@@ -86,7 +86,7 @@ pop_back_safe :: proc(a: ^$A/Small_Array($N, $T)) -> (item: T, ok: bool) {
return
}
pop_front_safe :: proc(a: ^$A/Small_Array($N, $T)) -> (T, bool) {
pop_front_safe :: proc(a: ^$A/Small_Array($N, $T)) -> (item: T, ok: bool) {
if N > 0 && a.len > 0 {
item = a.data[0]
s := slice(a)
@@ -114,4 +114,4 @@ push_back_elems :: proc(a: ^$A/Small_Array($N, $T), items: ..T) {
append_elem :: push_back
append_elems :: push_back_elems
push :: proc{push_back, push_back_elems}
append :: proc{push_back, push_back_elems}
append :: proc{push_back, push_back_elems}

View File

@@ -419,8 +419,10 @@ update :: proc(ctx: ^$T, data: []byte) {
sha2_transf(ctx, shifted_message, block_nb)
rem_len = new_len % CURR_BLOCK_SIZE
when T == Sha256_Context {copy(ctx.block[:], shifted_message[block_nb << 6:rem_len])}
else when T == Sha512_Context {copy(ctx.block[:], shifted_message[block_nb << 7:rem_len])}
if rem_len > 0 {
when T == Sha256_Context {copy(ctx.block[:], shifted_message[block_nb << 6:rem_len])}
else when T == Sha512_Context {copy(ctx.block[:], shifted_message[block_nb << 7:rem_len])}
}
ctx.length = rem_len
when T == Sha256_Context {ctx.tot_len += (block_nb + 1) << 6}

View File

@@ -34,6 +34,10 @@ Reader :: struct {
// If lazy_quotes is true, a quote may appear in an unquoted field and a non-doubled quote may appear in a quoted field
lazy_quotes: bool,
// multiline_fields, when set to true, will treat a field starting with a " as a multiline string
// therefore, instead of reading until the next \n, it'll read until the next "
multiline_fields: bool,
// reuse_record controls whether calls to 'read' may return a slice using the backing buffer
// for performance
// By default, each call to 'read' returns a newly allocated slice
@@ -194,32 +198,72 @@ is_valid_delim :: proc(r: rune) -> bool {
@private
_read_record :: proc(r: ^Reader, dst: ^[dynamic]string, allocator := context.allocator) -> ([]string, Error) {
read_line :: proc(r: ^Reader) -> ([]byte, io.Error) {
line, err := bufio.reader_read_slice(&r.r, '\n')
if err == .Buffer_Full {
clear(&r.raw_buffer)
append(&r.raw_buffer, ..line)
for err == .Buffer_Full {
line, err = bufio.reader_read_slice(&r.r, '\n')
if !r.multiline_fields {
line, err := bufio.reader_read_slice(&r.r, '\n')
if err == .Buffer_Full {
clear(&r.raw_buffer)
append(&r.raw_buffer, ..line)
for err == .Buffer_Full {
line, err = bufio.reader_read_slice(&r.r, '\n')
append(&r.raw_buffer, ..line)
}
line = r.raw_buffer[:]
}
line = r.raw_buffer[:]
}
if len(line) > 0 && err == .EOF {
err = nil
if line[len(line)-1] == '\r' {
line = line[:len(line)-1]
if len(line) > 0 && err == .EOF {
err = nil
if line[len(line)-1] == '\r' {
line = line[:len(line)-1]
}
}
}
r.line_count += 1
r.line_count += 1
// normalize \r\n to \n
n := len(line)
for n >= 2 && string(line[n-2:]) == "\r\n" {
line[n-2] = '\n'
line = line[:n-1]
}
// normalize \r\n to \n
n := len(line)
for n >= 2 && string(line[n-2:]) == "\r\n" {
line[n-2] = '\n'
line = line[:n-1]
}
return line, err
return line, err
} else {
// Reading a "line" that can possibly contain multiline fields.
// Unfortunately, this means we need to read a character at a time.
err: io.Error
cur: rune
is_quoted: bool
field_length := 0
clear(&r.raw_buffer)
read_loop: for err == .None {
cur, _, err = bufio.reader_read_rune(&r.r)
if err != .None { break read_loop }
switch cur {
case '"':
is_quoted = field_length == 0
field_length += 1
case '\n', '\r':
if !is_quoted { break read_loop }
case r.comma:
field_length = 0
case:
field_length += 1
}
rune_buf, rune_len := utf8.encode_rune(cur)
append(&r.raw_buffer, ..rune_buf[:rune_len])
}
return r.raw_buffer[:], err
}
unreachable()
}
length_newline :: proc(b: []byte) -> int {

View File

@@ -0,0 +1,23 @@
/*
Package endian implements sa simple translation between bytes and numbers with
specific endian encodings.
buf: [100]u8
put_u16(buf[:], .Little, 16) or_return
You may ask yourself, why isn't `byte_order` platform Endianness by default, so we can write:
put_u16(buf[:], 16) or_return
The answer is that very few file formats are written in native/platform endianness. Most of them specify the endianness of
each of their fields, or use a header field which specifies it for the entire file.
e.g. a file which specifies it at the top for all fields could do this:
file_order := .Little if buf[0] == 0 else .Big
field := get_u16(buf[1:], file_order) or_return
If on the other hand a field is *always* Big-Endian, you're wise to explicitly state it for the benefit of the reader,
be that your future self or someone else.
field := get_u16(buf[:], .Big) or_return
*/
package encoding_endian

View File

@@ -0,0 +1,153 @@
package encoding_endian
Byte_Order :: enum u8 {
Little,
Big,
}
PLATFORM_BYTE_ORDER :: Byte_Order.Little when ODIN_ENDIAN == .Little else Byte_Order.Big
get_u16 :: proc(b: []byte, order: Byte_Order) -> (v: u16, ok: bool) {
if len(b) < 2 {
return 0, false
}
#no_bounds_check if order == .Little {
v = u16(b[0]) | u16(b[1])<<8
} else {
v = u16(b[1]) | u16(b[0])<<8
}
return v, true
}
get_u32 :: proc(b: []byte, order: Byte_Order) -> (v: u32, ok: bool) {
if len(b) < 4 {
return 0, false
}
#no_bounds_check if order == .Little {
v = u32(b[0]) | u32(b[1])<<8 | u32(b[2])<<16 | u32(b[3])<<24
} else {
v = u32(b[3]) | u32(b[2])<<8 | u32(b[1])<<16 | u32(b[0])<<24
}
return v, true
}
get_u64 :: proc(b: []byte, order: Byte_Order) -> (v: u64, ok: bool) {
if len(b) < 8 {
return 0, false
}
#no_bounds_check if order == .Little {
v = u64(b[0]) | u64(b[1])<<8 | u64(b[2])<<16 | u64(b[3])<<24 |
u64(b[4])<<32 | u64(b[5])<<40 | u64(b[6])<<48 | u64(b[7])<<56
} else {
v = u64(b[7]) | u64(b[6])<<8 | u64(b[5])<<16 | u64(b[4])<<24 |
u64(b[3])<<32 | u64(b[2])<<40 | u64(b[1])<<48 | u64(b[0])<<56
}
return v, true
}
get_i16 :: proc(b: []byte, order: Byte_Order) -> (i16, bool) {
v, ok := get_u16(b, order)
return i16(v), ok
}
get_i32 :: proc(b: []byte, order: Byte_Order) -> (i32, bool) {
v, ok := get_u32(b, order)
return i32(v), ok
}
get_i64 :: proc(b: []byte, order: Byte_Order) -> (i64, bool) {
v, ok := get_u64(b, order)
return i64(v), ok
}
get_f16 :: proc(b: []byte, order: Byte_Order) -> (f16, bool) {
v, ok := get_u16(b, order)
return transmute(f16)v, ok
}
get_f32 :: proc(b: []byte, order: Byte_Order) -> (f32, bool) {
v, ok := get_u32(b, order)
return transmute(f32)v, ok
}
get_f64 :: proc(b: []byte, order: Byte_Order) -> (f64, bool) {
v, ok := get_u64(b, order)
return transmute(f64)v, ok
}
put_u16 :: proc(b: []byte, order: Byte_Order, v: u16) -> bool {
if len(b) < 2 {
return false
}
#no_bounds_check if order == .Little {
b[0] = byte(v)
b[1] = byte(v >> 8)
} else {
b[0] = byte(v >> 8)
b[1] = byte(v)
}
return true
}
put_u32 :: proc(b: []byte, order: Byte_Order, v: u32) -> bool {
if len(b) < 4 {
return false
}
#no_bounds_check if order == .Little {
b[0] = byte(v)
b[1] = byte(v >> 8)
b[2] = byte(v >> 16)
b[3] = byte(v >> 24)
} else {
b[0] = byte(v >> 24)
b[1] = byte(v >> 16)
b[2] = byte(v >> 8)
b[3] = byte(v)
}
return true
}
put_u64 :: proc(b: []byte, order: Byte_Order, v: u64) -> bool {
if len(b) < 8 {
return false
}
#no_bounds_check if order == .Little {
b[0] = byte(v >> 0)
b[1] = byte(v >> 8)
b[2] = byte(v >> 16)
b[3] = byte(v >> 24)
b[4] = byte(v >> 32)
b[5] = byte(v >> 40)
b[6] = byte(v >> 48)
b[7] = byte(v >> 56)
} else {
b[0] = byte(v >> 56)
b[1] = byte(v >> 48)
b[2] = byte(v >> 40)
b[3] = byte(v >> 32)
b[4] = byte(v >> 24)
b[5] = byte(v >> 16)
b[6] = byte(v >> 8)
b[7] = byte(v)
}
return true
}
put_i16 :: proc(b: []byte, order: Byte_Order, v: i16) -> bool {
return put_u16(b, order, u16(v))
}
put_i32 :: proc(b: []byte, order: Byte_Order, v: i32) -> bool {
return put_u32(b, order, u32(v))
}
put_i64 :: proc(b: []byte, order: Byte_Order, v: i64) -> bool {
return put_u64(b, order, u64(v))
}
put_f16 :: proc(b: []byte, order: Byte_Order, v: f16) -> bool {
return put_u16(b, order, transmute(u16)v)
}
put_f32 :: proc(b: []byte, order: Byte_Order, v: f32) -> bool {
return put_u32(b, order, transmute(u32)v)
}
put_f64 :: proc(b: []byte, order: Byte_Order, v: f64) -> bool {
return put_u64(b, order, transmute(u64)v)
}

View File

@@ -0,0 +1,21 @@
# License
By obtaining, using and/or copying this work, you (the licensee) agree that you have read, understood, and will comply with the following terms and conditions.
Permission to copy, modify, and distribute this software and its documentation, with or without modification, for any purpose and without fee or royalty is hereby granted, provided that you include the following on ALL copies of the software and documentation or portions thereof, including modifications:
The full text of this NOTICE in a location viewable to users of the redistributed or derivative work.
Any pre-existing intellectual property disclaimers, notices, or terms and conditions. If none exist, the W3C Software Short Notice should be included (hypertext is preferred, text is permitted) within the body of any redistributed or derivative code.
Notice of any changes or modifications to the files, including the date changes were made. (We recommend you provide URIs to the location from which the code is derived.)
# Disclaimers
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE OR DOCUMENTATION WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS.
COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR DOCUMENTATION.
The name and trademarks of copyright holders may NOT be used in advertising or publicity pertaining to the software without specific, written prior permission. Title to copyright in this software and any associated documentation will at all times remain with copyright holders.
# Notes
This version: http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231

View File

@@ -0,0 +1,374 @@
package unicode_entity
/*
A unicode entity encoder/decoder
Copyright 2021 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
This code has several procedures to map unicode runes to/from different textual encodings.
- SGML/XML/HTML entity
-- &#<decimal>;
-- &#x<hexadecimal>;
-- &<entity name>; (If the lookup tables are compiled in).
Reference: https://www.w3.org/2003/entities/2007xml/unicode.xml
- URL encode / decode %hex entity
Reference: https://datatracker.ietf.org/doc/html/rfc3986/#section-2.1
List of contributors:
Jeroen van Rijn: Initial implementation.
*/
import "core:unicode/utf8"
import "core:unicode"
import "core:strings"
MAX_RUNE_CODEPOINT :: int(unicode.MAX_RUNE)
write_rune :: strings.write_rune_builder
write_string :: strings.write_string_builder
Error :: enum u8 {
None = 0,
Tokenizer_Is_Nil,
Illegal_NUL_Character,
Illegal_UTF_Encoding,
Illegal_BOM,
CDATA_Not_Terminated,
Comment_Not_Terminated,
Invalid_Entity_Encoding,
}
Tokenizer :: struct {
r: rune,
w: int,
src: string,
offset: int,
read_offset: int,
}
CDATA_START :: "<![CDATA["
CDATA_END :: "]]>"
COMMENT_START :: "<!--"
COMMENT_END :: "-->"
/*
Default: CDATA and comments are passed through unchanged.
*/
XML_Decode_Option :: enum u8 {
/*
Do not decode & entities. It decodes by default.
If given, overrides `Decode_CDATA`.
*/
No_Entity_Decode,
/*
CDATA is unboxed.
*/
Unbox_CDATA,
/*
Unboxed CDATA is decoded as well.
Ignored if `.Unbox_CDATA` is not given.
*/
Decode_CDATA,
/*
Comments are stripped.
*/
Comment_Strip,
}
XML_Decode_Options :: bit_set[XML_Decode_Option; u8]
/*
Decode a string that may include SGML/XML/HTML entities.
The caller has to free the result.
*/
decode_xml :: proc(input: string, options := XML_Decode_Options{}, allocator := context.allocator) -> (decoded: string, err: Error) {
context.allocator = allocator
l := len(input)
if l == 0 { return "", .None }
builder := strings.make_builder()
defer strings.destroy_builder(&builder)
t := Tokenizer{src=input}
in_data := false
loop: for {
advance(&t) or_return
if t.r < 0 { break loop }
/*
Below here we're never inside a CDATA tag.
At most we'll see the start of one, but that doesn't affect the logic.
*/
switch t.r {
case '<':
/*
Might be the start of a CDATA tag or comment.
We don't need to check if we need to write a `<`, because if it isn't CDATA or a comment,
it couldn't have been part of an XML tag body to be decoded here.
Keep in mind that we could already *be* inside a CDATA tag.
If so, write `>` as a literal and continue.
*/
if in_data {
write_rune(&builder, '<')
continue
}
in_data = _handle_xml_special(&t, &builder, options) or_return
case ']':
/*
If we're unboxing _and_ decoding CDATA, we'll have to check for the end tag.
*/
if in_data {
if t.read_offset + len(CDATA_END) < len(t.src) {
if string(t.src[t.offset:][:len(CDATA_END)]) == CDATA_END {
in_data = false
t.read_offset += len(CDATA_END) - 1
}
}
continue
} else {
write_rune(&builder, ']')
}
case:
if in_data && .Decode_CDATA not_in options {
/*
Unboxed, but undecoded.
*/
write_rune(&builder, t.r)
continue
}
if t.r == '&' {
if entity, entity_err := _extract_xml_entity(&t); entity_err != .None {
/*
We read to the end of the string without closing the entity.
Pass through as-is.
*/
write_string(&builder, entity)
} else {
if .No_Entity_Decode not_in options {
if decoded, ok := xml_decode_entity(entity); ok {
write_rune(&builder, decoded)
continue
}
}
/*
Literal passthrough because the decode failed or we want entities not decoded.
*/
write_string(&builder, "&")
write_string(&builder, entity)
write_string(&builder, ";")
}
} else {
write_rune(&builder, t.r)
}
}
}
return strings.clone(strings.to_string(builder), allocator), err
}
advance :: proc(t: ^Tokenizer) -> (err: Error) {
if t == nil { return .Tokenizer_Is_Nil }
using t
#no_bounds_check {
if read_offset < len(src) {
offset = read_offset
r, w = rune(src[read_offset]), 1
switch {
case r == 0:
return .Illegal_NUL_Character
case r >= utf8.RUNE_SELF:
r, w = utf8.decode_rune_in_string(src[read_offset:])
if r == utf8.RUNE_ERROR && w == 1 {
return .Illegal_UTF_Encoding
} else if r == utf8.RUNE_BOM && offset > 0 {
return .Illegal_BOM
}
}
read_offset += w
return .None
} else {
offset = len(src)
r = -1
return
}
}
}
xml_decode_entity :: proc(entity: string) -> (decoded: rune, ok: bool) {
entity := entity
if len(entity) == 0 { return -1, false }
switch entity[0] {
case '#':
base := 10
val := 0
entity = entity[1:]
if len(entity) == 0 { return -1, false }
if entity[0] == 'x' || entity[0] == 'X' {
base = 16
entity = entity[1:]
}
for len(entity) > 0 {
r := entity[0]
switch r {
case '0'..'9':
val *= base
val += int(r - '0')
case 'a'..'f':
if base == 10 { return -1, false }
val *= base
val += int(r - 'a' + 10)
case 'A'..'F':
if base == 10 { return -1, false }
val *= base
val += int(r - 'A' + 10)
case:
return -1, false
}
if val > MAX_RUNE_CODEPOINT { return -1, false }
entity = entity[1:]
}
return rune(val), true
case:
/*
Named entity.
*/
return named_xml_entity_to_rune(entity)
}
}
/*
Private XML helper to extract `&<stuff>;` entity.
*/
@(private="file")
_extract_xml_entity :: proc(t: ^Tokenizer) -> (entity: string, err: Error) {
assert(t != nil && t.r == '&')
/*
All of these would be in the ASCII range.
Even if one is not, it doesn't matter. All characters we need to compare to extract are.
*/
using t
length := len(t.src)
found := false
#no_bounds_check {
for read_offset < length {
if src[read_offset] == ';' {
found = true
read_offset += 1
break
}
read_offset += 1
}
}
if found {
return string(src[offset + 1 : read_offset - 1]), .None
}
return string(src[offset : read_offset]), .Invalid_Entity_Encoding
}
/*
Private XML helper for CDATA and comments.
*/
@(private="file")
_handle_xml_special :: proc(t: ^Tokenizer, builder: ^strings.Builder, options: XML_Decode_Options) -> (in_data: bool, err: Error) {
assert(t != nil && t.r == '<')
if t.read_offset + len(CDATA_START) >= len(t.src) { return false, .None }
if string(t.src[t.offset:][:len(CDATA_START)]) == CDATA_START {
t.read_offset += len(CDATA_START) - 1
if .Unbox_CDATA in options && .Decode_CDATA in options {
/*
We're unboxing _and_ decoding CDATA
*/
return true, .None
}
/*
CDATA is passed through.
*/
offset := t.offset
/*
Scan until end of CDATA.
*/
for {
advance(t) or_return
if t.r < 0 { return true, .CDATA_Not_Terminated }
if t.read_offset + len(CDATA_END) < len(t.src) {
if string(t.src[t.offset:][:len(CDATA_END)]) == CDATA_END {
t.read_offset += len(CDATA_END) - 1
cdata := string(t.src[offset : t.read_offset])
if .Unbox_CDATA in options {
cdata = cdata[len(CDATA_START):]
cdata = cdata[:len(cdata) - len(CDATA_END)]
}
write_string(builder, cdata)
return false, .None
}
}
}
} else if string(t.src[t.offset:][:len(COMMENT_START)]) == COMMENT_START {
t.read_offset += len(COMMENT_START)
/*
Comment is passed through by default.
*/
offset := t.offset
/*
Scan until end of Comment.
*/
for {
advance(t) or_return
if t.r < 0 { return true, .Comment_Not_Terminated }
if t.read_offset + len(COMMENT_END) < len(t.src) {
if string(t.src[t.offset:][:len(COMMENT_END)]) == COMMENT_END {
t.read_offset += len(COMMENT_END) - 1
if .Comment_Strip not_in options {
comment := string(t.src[offset : t.read_offset])
write_string(builder, comment)
}
return false, .None
}
}
}
}
return false, .None
}

View File

@@ -0,0 +1,76 @@
package unicode_entity_example
import "core:encoding/xml"
import "core:strings"
import "core:mem"
import "core:fmt"
import "core:time"
doc_print :: proc(doc: ^xml.Document) {
buf: strings.Builder
defer strings.destroy_builder(&buf)
w := strings.to_writer(&buf)
xml.print(w, doc)
fmt.println(strings.to_string(buf))
}
_entities :: proc() {
doc: ^xml.Document
err: xml.Error
DOC :: #load("../../../../tests/core/assets/XML/unicode.xml")
OPTIONS :: xml.Options{
flags = {
.Ignore_Unsupported, .Intern_Comments,
},
expected_doctype = "",
}
parse_duration: time.Duration
{
time.SCOPED_TICK_DURATION(&parse_duration)
doc, err = xml.parse(DOC, OPTIONS)
}
defer xml.destroy(doc)
doc_print(doc)
ms := time.duration_milliseconds(parse_duration)
speed := (f64(1000.0) / ms) * f64(len(DOC)) / 1_024.0 / 1_024.0
fmt.printf("Parse time: %.2f ms (%.2f MiB/s).\n", ms, speed)
fmt.printf("Error: %v\n", err)
}
_main :: proc() {
using fmt
options := xml.Options{ flags = { .Ignore_Unsupported, .Intern_Comments, .Unbox_CDATA, .Decode_SGML_Entities }}
doc, _ := xml.parse(#load("test.html"), options)
defer xml.destroy(doc)
doc_print(doc)
}
main :: proc() {
using fmt
track: mem.Tracking_Allocator
mem.tracking_allocator_init(&track, context.allocator)
context.allocator = mem.tracking_allocator(&track)
// _main()
_entities()
if len(track.allocation_map) > 0 {
println()
for _, v in track.allocation_map {
printf("%v Leaked %v bytes.\n", v.location, v.size)
}
}
}

View File

@@ -0,0 +1,28 @@
<html>
<head>
<title>Entity Reference Test</title>
<style>
body {
background: #000; color: #eee;
width: 40%;
margin-left: auto;
margin-right: auto;
font-size: 14pt;
}
</style>
</head>
<body>
<h1>Entity Reference Test</h1>
<div id="test_cdata_in_comment" foo="">
Foozle]!&#32;&copy;&#x20;<!-- <![CDATA[&#32;&reg;&#x20;]]> -->42&;1234&
</div>
<!-- EXPECTED: Foozle]! © 42&;1234& -->
<div id="test_cdata_unwrap_and_passthrough">
Foozle]!&#32;&copy;&#x20;<![CDATA[BOX&#32;&reg;&#x20;/BOX]]>42&;1234&
</div>
<!-- EXPECTED: Foozle]! © BOX ® /BOX42&;1234& -->
<div>
&verbar; &vert; &VerticalLine; &fjlig; &grave; &bsol; &reg; &rhov; &CounterClockwiseContourIntegral; &bsemi;
</div>
</body>
</html>

File diff suppressed because it is too large Load Diff

View File

@@ -354,6 +354,12 @@ unquote_string :: proc(token: Token, spec: Specification, allocator := context.a
b := bytes_make(len(s) + 2*utf8.UTF_MAX, 1, allocator) or_return
w := copy(b, s[0:i])
if len(b) == 0 && allocator.data == nil {
// `unmarshal_count_array` calls us with a nil allocator
return string(b[:w]), nil
}
loop: for i < len(s) {
c := s[i]
switch {

View File

@@ -4,24 +4,25 @@
Author of this Odin package: Jeroen van Rijn
Example:
```odin
import "core:encoding/varint"
import "core:fmt"
```odin
import "core:encoding/varint"
import "core:fmt"
main :: proc() {
buf: [varint.LEB128_MAX_BYTES]u8
main :: proc() {
buf: [varint.LEB128_MAX_BYTES]u8
value := u128(42)
value := u128(42)
encode_size, encode_err := varint.encode_uleb128(buf[:], value)
assert(encode_size == 1 && encode_err == .None)
encode_size, encode_err := varint.encode_uleb128(buf[:], value)
assert(encode_size == 1 && encode_err == .None)
fmt.println(buf[:encode_size])
fmt.printf("Encoded as %v\n", buf[:encode_size])
decoded_val, decode_size, decode_err := varint.decode_uleb128(buf[:])
decoded_val, decode_size, decode_err := varint.decode_uleb128(buf[:encode_size])
assert(decoded_val == value && decode_size == encode_size && decode_err == .None)
}
```
assert(decoded_val == value && decode_size == encode_size && decode_err == .None)
fmt.printf("Decoded as %v, using %v byte%v\n", decoded_val, decode_size, "" if decode_size == 1 else "s")
}
```
*/
package varint

View File

@@ -10,12 +10,10 @@
// the LEB128 format as used by DWARF debug info, Android .dex and other file formats.
package varint
import "core:fmt"
// In theory we should use the bigint package. In practice, varints bigger than this indicate a corrupted file.
// Instead we'll set limits on the values we'll encode/decode
// 18 * 7 bits = 126, which means that a possible 19th byte may at most be `0b0000_0011`.
LEB128_MAX_BYTES :: 19
LEB128_MAX_BYTES :: 19
Error :: enum {
None = 0,
@@ -25,61 +23,90 @@ Error :: enum {
// Decode a slice of bytes encoding an unsigned LEB128 integer into value and number of bytes used.
// Returns `size` == 0 for an invalid value, empty slice, or a varint > 18 bytes.
decode_uleb128 :: proc(buf: []u8) -> (val: u128, size: int, err: Error) {
more := true
for v, i in buf {
size = i + 1
// 18 * 7 bits = 126, which means that a possible 19th byte may at most be 0b0000_0011.
if size > LEB128_MAX_BYTES || size == LEB128_MAX_BYTES && v > 0b0000_0011 {
return 0, 0, .Value_Too_Large
}
val |= u128(v & 0x7f) << uint(i * 7)
if v < 128 {
more = false
break
}
}
// If the buffer runs out before the number ends, return an error.
if more {
return 0, 0, .Buffer_Too_Small
}
return
}
// Decode a slice of bytes encoding a signed LEB128 integer into value and number of bytes used.
// Returns `size` == 0 for an invalid value, empty slice, or a varint > 18 bytes.
decode_ileb128 :: proc(buf: []u8) -> (val: i128, size: int, err: Error) {
shift: uint
decode_uleb128_buffer :: proc(buf: []u8) -> (val: u128, size: int, err: Error) {
if len(buf) == 0 {
return 0, 0, .Buffer_Too_Small
}
for v in buf {
size += 1
// 18 * 7 bits = 126, which including sign means we can have a 19th byte.
if size > LEB128_MAX_BYTES || size == LEB128_MAX_BYTES && v > 0x7f {
return 0, 0, .Value_Too_Large
val, size, err = decode_uleb128_byte(v, size, val)
if err != .Buffer_Too_Small {
return
}
val |= i128(v & 0x7f) << shift
shift += 7
if v < 128 { break }
}
if buf[size - 1] & 0x40 == 0x40 {
val |= max(i128) << shift
if err == .Buffer_Too_Small {
val, size = 0, 0
}
return
}
// Decodes an unsigned LEB128 integer into value a byte at a time.
// Returns `.None` when decoded properly, `.Value_Too_Large` when they value
// exceeds the limits of a u128, and `.Buffer_Too_Small` when it's not yet fully decoded.
decode_uleb128_byte :: proc(input: u8, offset: int, accumulator: u128) -> (val: u128, size: int, err: Error) {
size = offset + 1
// 18 * 7 bits = 126, which means that a possible 19th byte may at most be 0b0000_0011.
if size > LEB128_MAX_BYTES || size == LEB128_MAX_BYTES && input > 0b0000_0011 {
return 0, 0, .Value_Too_Large
}
val = accumulator | u128(input & 0x7f) << uint(offset * 7)
if input < 128 {
// We're done
return
}
// If the buffer runs out before the number ends, return an error.
return val, size, .Buffer_Too_Small
}
decode_uleb128 :: proc {decode_uleb128_buffer, decode_uleb128_byte}
// Decode a slice of bytes encoding a signed LEB128 integer into value and number of bytes used.
// Returns `size` == 0 for an invalid value, empty slice, or a varint > 18 bytes.
decode_ileb128_buffer :: proc(buf: []u8) -> (val: i128, size: int, err: Error) {
if len(buf) == 0 {
return 0, 0, .Buffer_Too_Small
}
for v in buf {
val, size, err = decode_ileb128_byte(v, size, val)
if err != .Buffer_Too_Small {
return
}
}
if err == .Buffer_Too_Small {
val, size = 0, 0
}
return
}
// Decode a a signed LEB128 integer into value and number of bytes used, one byte at a time.
// Returns `size` == 0 for an invalid value, empty slice, or a varint > 18 bytes.
decode_ileb128_byte :: proc(input: u8, offset: int, accumulator: i128) -> (val: i128, size: int, err: Error) {
size = offset + 1
shift := uint(offset * 7)
// 18 * 7 bits = 126, which including sign means we can have a 19th byte.
if size > LEB128_MAX_BYTES || size == LEB128_MAX_BYTES && input > 0x7f {
return 0, 0, .Value_Too_Large
}
val = accumulator | i128(input & 0x7f) << shift
if input < 128 {
if input & 0x40 == 0x40 {
val |= max(i128) << (shift + 7)
}
return val, size, .None
}
return val, size, .Buffer_Too_Small
}
decode_ileb128 :: proc{decode_ileb128_buffer, decode_ileb128_byte}
// Encode `val` into `buf` as an unsigned LEB128 encoded series of bytes.
// `buf` must be appropriately sized.
encode_uleb128 :: proc(buf: []u8, val: u128) -> (size: int, err: Error) {
@@ -89,7 +116,6 @@ encode_uleb128 :: proc(buf: []u8, val: u128) -> (size: int, err: Error) {
size += 1
if size > len(buf) {
fmt.println(val, buf[:size - 1])
return 0, .Buffer_Too_Small
}
@@ -106,14 +132,12 @@ encode_uleb128 :: proc(buf: []u8, val: u128) -> (size: int, err: Error) {
return
}
@(private)
SIGN_MASK :: (i128(1) << 121) // sign extend mask
// Encode `val` into `buf` as a signed LEB128 encoded series of bytes.
// `buf` must be appropriately sized.
encode_ileb128 :: proc(buf: []u8, val: i128) -> (size: int, err: Error) {
val := val
more := true
SIGN_MASK :: i128(1) << 121 // sign extend mask
val, more := val, true
for more {
size += 1

View File

@@ -0,0 +1,86 @@
/*
An XML 1.0 / 1.1 parser
Copyright 2021-2022 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
A from-scratch XML implementation, loosely modeled on the [spec](https://www.w3.org/TR/2006/REC-xml11-20060816).
List of contributors:
Jeroen van Rijn: Initial implementation.
*/
package xml
import "core:io"
import "core:fmt"
/*
Just for debug purposes.
*/
print :: proc(writer: io.Writer, doc: ^Document) -> (written: int, err: io.Error) {
if doc == nil { return }
using fmt
written += wprintf(writer, "[XML Prolog]\n")
for attr in doc.prologue {
written += wprintf(writer, "\t%v: %v\n", attr.key, attr.val)
}
written += wprintf(writer, "[Encoding] %v\n", doc.encoding)
if len(doc.doctype.ident) > 0 {
written += wprintf(writer, "[DOCTYPE] %v\n", doc.doctype.ident)
if len(doc.doctype.rest) > 0 {
wprintf(writer, "\t%v\n", doc.doctype.rest)
}
}
for comment in doc.comments {
written += wprintf(writer, "[Pre-root comment] %v\n", comment)
}
if len(doc.elements) > 0 {
wprintln(writer, " --- ")
print_element(writer, doc, 0)
wprintln(writer, " --- ")
}
return written, .None
}
print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, indent := 0) -> (written: int, err: io.Error) {
using fmt
tab :: proc(writer: io.Writer, indent: int) {
for _ in 0..=indent {
wprintf(writer, "\t")
}
}
tab(writer, indent)
element := doc.elements[element_id]
if element.kind == .Element {
wprintf(writer, "<%v>\n", element.ident)
if len(element.value) > 0 {
tab(writer, indent + 1)
wprintf(writer, "[Value] %v\n", element.value)
}
for attr in element.attribs {
tab(writer, indent + 1)
wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val)
}
for child in element.children {
print_element(writer, doc, child, indent + 1)
}
} else if element.kind == .Comment {
wprintf(writer, "[COMMENT] %v\n", element.value)
}
return written, .None
}

View File

@@ -0,0 +1,112 @@
package xml_example
import "core:encoding/xml"
import "core:mem"
import "core:fmt"
import "core:time"
import "core:strings"
import "core:hash"
N :: 1
example :: proc() {
using fmt
docs: [N]^xml.Document
errs: [N]xml.Error
times: [N]time.Duration
defer for round in 0..<N {
xml.destroy(docs[round])
}
DOC :: #load("../../../../tests/core/assets/XML/unicode.xml")
input := DOC
for round in 0..<N {
start := time.tick_now()
docs[round], errs[round] = xml.parse(input, xml.Options{
flags={.Ignore_Unsupported},
expected_doctype = "",
})
end := time.tick_now()
times[round] = time.tick_diff(start, end)
}
fastest := max(time.Duration)
slowest := time.Duration(0)
total := time.Duration(0)
for round in 0..<N {
fastest = min(fastest, times[round])
slowest = max(slowest, times[round])
total += times[round]
}
fastest_ms := time.duration_milliseconds(fastest)
slowest_ms := time.duration_milliseconds(slowest)
average_ms := time.duration_milliseconds(time.Duration(f64(total) / f64(N)))
fastest_speed := (f64(1000.0) / fastest_ms) * f64(len(DOC)) / 1_024.0 / 1_024.0
slowest_speed := (f64(1000.0) / slowest_ms) * f64(len(DOC)) / 1_024.0 / 1_024.0
average_speed := (f64(1000.0) / average_ms) * f64(len(DOC)) / 1_024.0 / 1_024.0
fmt.printf("N = %v\n", N)
fmt.printf("[Fastest]: %v bytes in %.2f ms (%.2f MiB/s).\n", len(input), fastest_ms, fastest_speed)
fmt.printf("[Slowest]: %v bytes in %.2f ms (%.2f MiB/s).\n", len(input), slowest_ms, slowest_speed)
fmt.printf("[Average]: %v bytes in %.2f ms (%.2f MiB/s).\n", len(input), average_ms, average_speed)
if errs[0] != .None {
printf("Load/Parse error: %v\n", errs[0])
if errs[0] == .File_Error {
println("\"unicode.xml\" not found. Did you run \"tests\\download_assets.py\"?")
}
return
}
charlist, charlist_ok := xml.find_child_by_ident(docs[0], 0, "charlist")
if !charlist_ok {
eprintln("Could not locate top-level `<charlist>` tag.")
return
}
printf("Found `<charlist>` with %v children, %v elements total\n", len(docs[0].elements[charlist].children), docs[0].element_count)
crc32 := doc_hash(docs[0])
printf("[%v] CRC32: 0x%08x\n", "🎉" if crc32 == 0xcaa042b9 else "🤬", crc32)
for round in 0..<N {
defer xml.destroy(docs[round])
}
}
doc_hash :: proc(doc: ^xml.Document, print := false) -> (crc32: u32) {
buf: strings.Builder
defer strings.destroy_builder(&buf)
w := strings.to_writer(&buf)
xml.print(w, doc)
tree := strings.to_string(buf)
if print { fmt.println(tree) }
return hash.crc32(transmute([]u8)tree)
}
main :: proc() {
using fmt
track: mem.Tracking_Allocator
mem.tracking_allocator_init(&track, context.allocator)
context.allocator = mem.tracking_allocator(&track)
example()
if len(track.allocation_map) > 0 {
println()
for _, v in track.allocation_map {
printf("%v Leaked %v bytes.\n", v.location, v.size)
}
}
println("Done and cleaned up!")
}

View File

@@ -0,0 +1,45 @@
/*
An XML 1.0 / 1.1 parser
Copyright 2021-2022 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
This file contains helper functions.
*/
package xml
// Find parent's nth child with a given ident.
find_child_by_ident :: proc(doc: ^Document, parent_id: Element_ID, ident: string, nth := 0) -> (res: Element_ID, found: bool) {
tag := doc.elements[parent_id]
count := 0
for child_id in tag.children {
child := doc.elements[child_id]
/*
Skip commments. They have no name.
*/
if child.kind != .Element { continue }
/*
If the ident matches and it's the nth such child, return it.
*/
if child.ident == ident {
if count == nth { return child_id, true }
count += 1
}
}
return 0, false
}
// Find an attribute by key.
find_attribute_val_by_key :: proc(doc: ^Document, parent_id: Element_ID, key: string) -> (val: string, found: bool) {
tag := doc.elements[parent_id]
for attr in tag.attribs {
/*
If the ident matches, we're done. There can only ever be one attribute with the same name.
*/
if attr.key == key { return attr.val, true }
}
return "", false
}

View File

@@ -0,0 +1,436 @@
/*
An XML 1.0 / 1.1 parser
Copyright 2021-2022 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
A from-scratch XML implementation, loosely modeled on the [spec](https://www.w3.org/TR/2006/REC-xml11-20060816).
List of contributors:
Jeroen van Rijn: Initial implementation.
*/
package xml
import "core:fmt"
import "core:unicode"
import "core:unicode/utf8"
Error_Handler :: #type proc(pos: Pos, fmt: string, args: ..any)
Token :: struct {
kind: Token_Kind,
text: string,
pos: Pos,
}
Pos :: struct {
file: string,
offset: int, // starting at 0
line: int, // starting at 1
column: int, // starting at 1
}
Token_Kind :: enum {
Invalid,
Ident,
Literal,
Rune,
String,
Double_Quote, // "
Single_Quote, // '
Colon, // :
Eq, // =
Lt, // <
Gt, // >
Exclaim, // !
Question, // ?
Hash, // #
Slash, // /
Dash, // -
Open_Bracket, // [
Close_Bracket, // ]
EOF,
}
CDATA_START :: "<![CDATA["
CDATA_END :: "]]>"
COMMENT_START :: "<!--"
COMMENT_END :: "-->"
Tokenizer :: struct {
// Immutable data
path: string,
src: string,
err: Error_Handler,
// Tokenizing state
ch: rune,
offset: int,
read_offset: int,
line_offset: int,
line_count: int,
// Mutable data
error_count: int,
}
init :: proc(t: ^Tokenizer, src: string, path: string, err: Error_Handler = default_error_handler) {
t.src = src
t.err = err
t.ch = ' '
t.offset = 0
t.read_offset = 0
t.line_offset = 0
t.line_count = len(src) > 0 ? 1 : 0
t.error_count = 0
t.path = path
advance_rune(t)
if t.ch == utf8.RUNE_BOM {
advance_rune(t)
}
}
@(private)
offset_to_pos :: proc(t: ^Tokenizer, offset: int) -> Pos {
line := t.line_count
column := offset - t.line_offset + 1
return Pos {
file = t.path,
offset = offset,
line = line,
column = column,
}
}
default_error_handler :: proc(pos: Pos, msg: string, args: ..any) {
fmt.eprintf("%s(%d:%d) ", pos.file, pos.line, pos.column)
fmt.eprintf(msg, ..args)
fmt.eprintf("\n")
}
error :: proc(t: ^Tokenizer, offset: int, msg: string, args: ..any) {
pos := offset_to_pos(t, offset)
if t.err != nil {
t.err(pos, msg, ..args)
}
t.error_count += 1
}
@(optimization_mode="speed")
advance_rune :: proc(using t: ^Tokenizer) {
#no_bounds_check {
/*
Already bounds-checked here.
*/
if read_offset < len(src) {
offset = read_offset
if ch == '\n' {
line_offset = offset
line_count += 1
}
r, w := rune(src[read_offset]), 1
switch {
case r == 0:
error(t, t.offset, "illegal character NUL")
case r >= utf8.RUNE_SELF:
r, w = #force_inline utf8.decode_rune_in_string(src[read_offset:])
if r == utf8.RUNE_ERROR && w == 1 {
error(t, t.offset, "illegal UTF-8 encoding")
} else if r == utf8.RUNE_BOM && offset > 0 {
error(t, t.offset, "illegal byte order mark")
}
}
read_offset += w
ch = r
} else {
offset = len(src)
if ch == '\n' {
line_offset = offset
line_count += 1
}
ch = -1
}
}
}
peek_byte :: proc(t: ^Tokenizer, offset := 0) -> byte {
if t.read_offset+offset < len(t.src) {
#no_bounds_check return t.src[t.read_offset+offset]
}
return 0
}
@(optimization_mode="speed")
skip_whitespace :: proc(t: ^Tokenizer) {
for {
switch t.ch {
case ' ', '\t', '\r', '\n':
advance_rune(t)
case:
return
}
}
}
@(optimization_mode="speed")
is_letter :: proc(r: rune) -> bool {
if r < utf8.RUNE_SELF {
switch r {
case '_':
return true
case 'A'..='Z', 'a'..='z':
return true
}
}
return unicode.is_letter(r)
}
is_valid_identifier_rune :: proc(r: rune) -> bool {
if r < utf8.RUNE_SELF {
switch r {
case '_', '-', ':': return true
case 'A'..='Z', 'a'..='z': return true
case '0'..'9': return true
case -1: return false
}
}
if unicode.is_letter(r) || unicode.is_digit(r) {
return true
}
return false
}
scan_identifier :: proc(t: ^Tokenizer) -> string {
offset := t.offset
namespaced := false
for is_valid_identifier_rune(t.ch) {
advance_rune(t)
if t.ch == ':' {
/*
A namespaced attr can have at most two parts, `namespace:ident`.
*/
if namespaced {
break
}
namespaced = true
}
}
return string(t.src[offset : t.offset])
}
/*
A comment ends when we see -->, preceded by a character that's not a dash.
"For compatibility, the string "--" (double-hyphen) must not occur within comments."
See: https://www.w3.org/TR/2006/REC-xml11-20060816/#dt-comment
Thanks to the length (4) of the comment start, we also have enough lookback,
and the peek at the next byte asserts that there's at least one more character
that's a `>`.
*/
scan_comment :: proc(t: ^Tokenizer) -> (comment: string, err: Error) {
offset := t.offset
for {
advance_rune(t)
ch := t.ch
if ch < 0 {
error(t, offset, "[parse] Comment was not terminated\n")
return "", .Unclosed_Comment
}
if string(t.src[t.offset - 1:][:2]) == "--" {
if peek_byte(t) == '>' {
break
} else {
error(t, t.offset - 1, "Invalid -- sequence in comment.\n")
return "", .Invalid_Sequence_In_Comment
}
}
}
expect(t, .Dash)
expect(t, .Gt)
return string(t.src[offset : t.offset - 1]), .None
}
/*
Skip CDATA
*/
skip_cdata :: proc(t: ^Tokenizer) -> (err: Error) {
if t.read_offset + len(CDATA_START) >= len(t.src) {
/*
Can't be the start of a CDATA tag.
*/
return .None
}
if string(t.src[t.offset:][:len(CDATA_START)]) == CDATA_START {
t.read_offset += len(CDATA_START)
offset := t.offset
cdata_scan: for {
advance_rune(t)
if t.ch < 0 {
error(t, offset, "[scan_string] CDATA was not terminated\n")
return .Premature_EOF
}
/*
Scan until the end of a CDATA tag.
*/
if t.read_offset + len(CDATA_END) < len(t.src) {
if string(t.src[t.offset:][:len(CDATA_END)]) == CDATA_END {
t.read_offset += len(CDATA_END)
break cdata_scan
}
}
}
}
return
}
@(optimization_mode="speed")
scan_string :: proc(t: ^Tokenizer, offset: int, close: rune = '<', consume_close := false, multiline := true) -> (value: string, err: Error) {
err = .None
loop: for {
ch := t.ch
switch ch {
case -1:
error(t, t.offset, "[scan_string] Premature end of file.\n")
return "", .Premature_EOF
case '<':
if peek_byte(t) == '!' {
if peek_byte(t, 1) == '[' {
/*
Might be the start of a CDATA tag.
*/
skip_cdata(t) or_return
} else if peek_byte(t, 1) == '-' && peek_byte(t, 2) == '-' {
/*
Comment start. Eat comment.
*/
t.read_offset += 3
_ = scan_comment(t) or_return
}
}
case '\n':
if !multiline {
error(t, offset, string(t.src[offset : t.offset]))
error(t, offset, "[scan_string] Not terminated\n")
err = .Invalid_Tag_Value
break loop
}
}
if t.ch == close {
/*
If it's not a CDATA or comment, it's the end of this body.
*/
break loop
}
advance_rune(t)
}
/*
Strip trailing whitespace.
*/
lit := string(t.src[offset : t.offset])
end := len(lit)
eat: for ; end > 0; end -= 1 {
ch := lit[end - 1]
switch ch {
case ' ', '\t', '\r', '\n':
case:
break eat
}
}
lit = lit[:end]
if consume_close {
advance_rune(t)
}
/*
TODO: Handle decoding escape characters and unboxing CDATA.
*/
return lit, err
}
peek :: proc(t: ^Tokenizer) -> (token: Token) {
old := t^
token = scan(t)
t^ = old
return token
}
scan :: proc(t: ^Tokenizer) -> Token {
skip_whitespace(t)
offset := t.offset
kind: Token_Kind
err: Error
lit: string
pos := offset_to_pos(t, offset)
switch ch := t.ch; true {
case is_letter(ch):
lit = scan_identifier(t)
kind = .Ident
case:
advance_rune(t)
switch ch {
case -1:
kind = .EOF
case '<': kind = .Lt
case '>': kind = .Gt
case '!': kind = .Exclaim
case '?': kind = .Question
case '=': kind = .Eq
case '#': kind = .Hash
case '/': kind = .Slash
case '-': kind = .Dash
case ':': kind = .Colon
case '"', '\'':
kind = .Invalid
lit, err = scan_string(t, t.offset, ch, true, false)
if err == .None {
kind = .String
}
case '\n':
lit = "\n"
case:
kind = .Invalid
}
}
if kind != .String && lit == "" {
lit = string(t.src[offset : t.offset])
}
return Token{kind, lit, pos}
}

View File

@@ -0,0 +1,713 @@
/*
An XML 1.0 / 1.1 parser
Copyright 2021-2022 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
A from-scratch XML implementation, loosely modelled on the [spec](https://www.w3.org/TR/2006/REC-xml11-20060816).
Features:
- Supports enough of the XML 1.0/1.1 spec to handle the 99.9% of XML documents in common current usage.
- Simple to understand and use. Small.
Caveats:
- We do NOT support HTML in this package, as that may or may not be valid XML.
If it works, great. If it doesn't, that's not considered a bug.
- We do NOT support UTF-16. If you have a UTF-16 XML file, please convert it to UTF-8 first. Also, our condolences.
- <[!ELEMENT and <[!ATTLIST are not supported, and will be either ignored or return an error depending on the parser options.
MAYBE:
- XML writer?
- Serialize/deserialize Odin types?
List of contributors:
Jeroen van Rijn: Initial implementation.
*/
package xml
// An XML 1.0 / 1.1 parser
import "core:bytes"
import "core:encoding/entity"
import "core:intrinsics"
import "core:mem"
import "core:os"
import "core:strings"
likely :: intrinsics.expect
DEFAULT_OPTIONS :: Options{
flags = {.Ignore_Unsupported},
expected_doctype = "",
}
Option_Flag :: enum {
/*
If the caller says that input may be modified, we can perform in-situ parsing.
If this flag isn't provided, the XML parser first duplicates the input so that it can.
*/
Input_May_Be_Modified,
/*
Document MUST start with `<?xml` prologue.
*/
Must_Have_Prolog,
/*
Document MUST have a `<!DOCTYPE`.
*/
Must_Have_DocType,
/*
By default we skip comments. Use this option to intern a comment on a parented Element.
*/
Intern_Comments,
/*
How to handle unsupported parts of the specification, like <! other than <!DOCTYPE and <![CDATA[
*/
Error_on_Unsupported,
Ignore_Unsupported,
/*
By default CDATA tags are passed-through as-is.
This option unwraps them when encountered.
*/
Unbox_CDATA,
/*
By default SGML entities like `&gt;`, `&#32;` and `&#x20;` are passed-through as-is.
This option decodes them when encountered.
*/
Decode_SGML_Entities,
/*
If a tag body has a comment, it will be stripped unless this option is given.
*/
Keep_Tag_Body_Comments,
}
Option_Flags :: bit_set[Option_Flag; u16]
Document :: struct {
elements: [dynamic]Element,
element_count: Element_ID,
prologue: Attributes,
encoding: Encoding,
doctype: struct {
/*
We only scan the <!DOCTYPE IDENT part and skip the rest.
*/
ident: string,
rest: string,
},
/*
If we encounter comments before the root node, and the option to intern comments is given, this is where they'll live.
Otherwise they'll be in the element tree.
*/
comments: [dynamic]string,
/*
Internal
*/
tokenizer: ^Tokenizer,
allocator: mem.Allocator,
/*
Input. Either the original buffer, or a copy if `.Input_May_Be_Modified` isn't specified.
*/
input: []u8,
strings_to_free: [dynamic]string,
}
Element :: struct {
ident: string,
value: string,
attribs: Attributes,
kind: enum {
Element = 0,
Comment,
},
parent: Element_ID,
children: [dynamic]Element_ID,
}
Attribute :: struct {
key: string,
val: string,
}
Attributes :: [dynamic]Attribute
Options :: struct {
flags: Option_Flags,
expected_doctype: string,
}
Encoding :: enum {
Unknown,
UTF_8,
ISO_8859_1,
/*
Aliases
*/
LATIN_1 = ISO_8859_1,
}
Error :: enum {
/*
General return values.
*/
None = 0,
General_Error,
Unexpected_Token,
Invalid_Token,
/*
Couldn't find, open or read file.
*/
File_Error,
/*
File too short.
*/
Premature_EOF,
/*
XML-specific errors.
*/
No_Prolog,
Invalid_Prolog,
Too_Many_Prologs,
No_DocType,
Too_Many_DocTypes,
DocType_Must_Preceed_Elements,
/*
If a DOCTYPE is present _or_ the caller
asked for a specific DOCTYPE and the DOCTYPE
and root tag don't match, we return `.Invalid_DocType`.
*/
Invalid_DocType,
Invalid_Tag_Value,
Mismatched_Closing_Tag,
Unclosed_Comment,
Comment_Before_Root_Element,
Invalid_Sequence_In_Comment,
Unsupported_Version,
Unsupported_Encoding,
/*
<!FOO are usually skipped.
*/
Unhandled_Bang,
Duplicate_Attribute,
Conflicting_Options,
}
/*
Implementation starts here.
*/
parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_handler := default_error_handler, allocator := context.allocator) -> (doc: ^Document, err: Error) {
data := data
context.allocator = allocator
opts := validate_options(options) or_return
/*
If `.Input_May_Be_Modified` is not specified, we duplicate the input so that we can modify it in-place.
*/
if .Input_May_Be_Modified not_in opts.flags {
data = bytes.clone(data)
}
t := &Tokenizer{}
init(t, string(data), path, error_handler)
doc = new(Document)
doc.allocator = allocator
doc.tokenizer = t
doc.input = data
doc.elements = make([dynamic]Element, 1024, 1024, allocator)
// strings.intern_init(&doc.intern, allocator, allocator)
err = .Unexpected_Token
element, parent: Element_ID
tag_is_open := false
first_element := true
open: Token
/*
If a DOCTYPE is present, the root tag has to match.
If an expected DOCTYPE is given in options (i.e. it's non-empty), the DOCTYPE (if present) and root tag have to match.
*/
expected_doctype := options.expected_doctype
loop: for {
skip_whitespace(t)
// NOTE(Jeroen): This is faster as a switch.
switch t.ch {
case '<':
/*
Consume peeked `<`
*/
advance_rune(t)
open = scan(t)
// NOTE(Jeroen): We're not using a switch because this if-else chain ordered by likelihood is 2.5% faster at -o:size and -o:speed.
if likely(open.kind, Token_Kind.Ident) == .Ident {
/*
e.g. <odin - Start of new element.
*/
element = new_element(doc)
tag_is_open = true
if first_element {
/*
First element.
*/
parent = element
first_element = false
} else {
append(&doc.elements[parent].children, element)
}
doc.elements[element].parent = parent
doc.elements[element].ident = open.text
parse_attributes(doc, &doc.elements[element].attribs) or_return
/*
If a DOCTYPE is present _or_ the caller
asked for a specific DOCTYPE and the DOCTYPE
and root tag don't match, we return .Invalid_Root_Tag.
*/
if element == 0 { // Root tag?
if len(expected_doctype) > 0 && expected_doctype != open.text {
error(t, t.offset, "Root Tag doesn't match DOCTYPE. Expected: %v, got: %v\n", expected_doctype, open.text)
return doc, .Invalid_DocType
}
}
/*
One of these should follow:
- `>`, which means we've just opened this tag and expect a later element to close it.
- `/>`, which means this is an 'empty' or self-closing tag.
*/
end_token := scan(t)
#partial switch end_token.kind {
case .Gt:
/*
We're now the new parent.
*/
parent = element
case .Slash:
/*
Empty tag. Close it.
*/
expect(t, .Gt) or_return
parent = doc.elements[element].parent
element = parent
tag_is_open = false
case:
error(t, t.offset, "Expected close tag, got: %#v\n", end_token)
return
}
} else if open.kind == .Slash {
/*
Close tag.
*/
ident := expect(t, .Ident) or_return
_ = expect(t, .Gt) or_return
if doc.elements[element].ident != ident.text {
error(t, t.offset, "Mismatched Closing Tag. Expected %v, got %v\n", doc.elements[element].ident, ident.text)
return doc, .Mismatched_Closing_Tag
}
parent = doc.elements[element].parent
element = parent
tag_is_open = false
} else if open.kind == .Exclaim {
/*
<!
*/
next := scan(t)
#partial switch next.kind {
case .Ident:
switch next.text {
case "DOCTYPE":
if len(doc.doctype.ident) > 0 {
return doc, .Too_Many_DocTypes
}
if doc.element_count > 0 {
return doc, .DocType_Must_Preceed_Elements
}
parse_doctype(doc) or_return
if len(expected_doctype) > 0 && expected_doctype != doc.doctype.ident {
error(t, t.offset, "Invalid DOCTYPE. Expected: %v, got: %v\n", expected_doctype, doc.doctype.ident)
return doc, .Invalid_DocType
}
expected_doctype = doc.doctype.ident
case:
if .Error_on_Unsupported in opts.flags {
error(t, t.offset, "Unhandled: <!%v\n", next.text)
return doc, .Unhandled_Bang
}
skip_element(t) or_return
}
case .Dash:
/*
Comment: <!-- -->.
The grammar does not allow a comment to end in --->
*/
expect(t, .Dash)
comment := scan_comment(t) or_return
if .Intern_Comments in opts.flags {
if len(doc.elements) == 0 {
append(&doc.comments, comment)
} else {
el := new_element(doc)
doc.elements[el].parent = element
doc.elements[el].kind = .Comment
doc.elements[el].value = comment
append(&doc.elements[element].children, el)
}
}
case:
error(t, t.offset, "Invalid Token after <!. Expected .Ident, got %#v\n", next)
return
}
} else if open.kind == .Question {
/*
<?xml
*/
next := scan(t)
#partial switch next.kind {
case .Ident:
if len(next.text) == 3 && strings.to_lower(next.text, context.temp_allocator) == "xml" {
parse_prologue(doc) or_return
} else if len(doc.prologue) > 0 {
/*
We've already seen a prologue.
*/
return doc, .Too_Many_Prologs
} else {
/*
Could be `<?xml-stylesheet`, etc. Ignore it.
*/
skip_element(t) or_return
}
case:
error(t, t.offset, "Expected \"<?xml\", got \"<?%v\".", next.text)
return
}
} else {
error(t, t.offset, "Invalid Token after <: %#v\n", open)
return
}
case -1:
/*
End of file.
*/
if tag_is_open {
return doc, .Premature_EOF
}
break loop
case:
/*
This should be a tag's body text.
*/
body_text := scan_string(t, t.offset) or_return
needs_processing := .Unbox_CDATA in opts.flags
needs_processing |= .Decode_SGML_Entities in opts.flags
if !needs_processing {
doc.elements[element].value = body_text
continue
}
decode_opts := entity.XML_Decode_Options{}
if .Keep_Tag_Body_Comments not_in opts.flags {
decode_opts += { .Comment_Strip }
}
if .Decode_SGML_Entities not_in opts.flags {
decode_opts += { .No_Entity_Decode }
}
if .Unbox_CDATA in opts.flags {
decode_opts += { .Unbox_CDATA }
if .Decode_SGML_Entities in opts.flags {
decode_opts += { .Decode_CDATA }
}
}
decoded, decode_err := entity.decode_xml(body_text, decode_opts)
if decode_err == .None {
doc.elements[element].value = decoded
append(&doc.strings_to_free, decoded)
} else {
doc.elements[element].value = body_text
}
}
}
if .Must_Have_Prolog in opts.flags && len(doc.prologue) == 0 {
return doc, .No_Prolog
}
if .Must_Have_DocType in opts.flags && len(doc.doctype.ident) == 0 {
return doc, .No_DocType
}
resize(&doc.elements, int(doc.element_count))
return doc, .None
}
parse_string :: proc(data: string, options := DEFAULT_OPTIONS, path := "", error_handler := default_error_handler, allocator := context.allocator) -> (doc: ^Document, err: Error) {
_data := transmute([]u8)data
return parse_bytes(_data, options, path, error_handler, allocator)
}
parse :: proc { parse_string, parse_bytes }
// Load an XML file
load_from_file :: proc(filename: string, options := DEFAULT_OPTIONS, error_handler := default_error_handler, allocator := context.allocator) -> (doc: ^Document, err: Error) {
context.allocator = allocator
options := options
data, data_ok := os.read_entire_file(filename)
if !data_ok { return {}, .File_Error }
options.flags += { .Input_May_Be_Modified }
return parse_bytes(data, options, filename, error_handler, allocator)
}
destroy :: proc(doc: ^Document) {
if doc == nil { return }
for el in doc.elements {
delete(el.attribs)
delete(el.children)
}
delete(doc.elements)
delete(doc.prologue)
delete(doc.comments)
delete(doc.input)
for s in doc.strings_to_free {
delete(s)
}
delete(doc.strings_to_free)
free(doc)
}
/*
Helpers.
*/
validate_options :: proc(options: Options) -> (validated: Options, err: Error) {
validated = options
if .Error_on_Unsupported in validated.flags && .Ignore_Unsupported in validated.flags {
return options, .Conflicting_Options
}
return validated, .None
}
expect :: proc(t: ^Tokenizer, kind: Token_Kind) -> (tok: Token, err: Error) {
tok = scan(t)
if tok.kind == kind { return tok, .None }
error(t, t.offset, "Expected \"%v\", got \"%v\".", kind, tok.kind)
return tok, .Unexpected_Token
}
parse_attribute :: proc(doc: ^Document) -> (attr: Attribute, offset: int, err: Error) {
assert(doc != nil)
context.allocator = doc.allocator
t := doc.tokenizer
key := expect(t, .Ident) or_return
offset = t.offset - len(key.text)
_ = expect(t, .Eq) or_return
value := expect(t, .String) or_return
attr.key = key.text
attr.val = value.text
err = .None
return
}
check_duplicate_attributes :: proc(t: ^Tokenizer, attribs: Attributes, attr: Attribute, offset: int) -> (err: Error) {
for a in attribs {
if attr.key == a.key {
error(t, offset, "Duplicate attribute: %v\n", attr.key)
return .Duplicate_Attribute
}
}
return .None
}
parse_attributes :: proc(doc: ^Document, attribs: ^Attributes) -> (err: Error) {
assert(doc != nil)
context.allocator = doc.allocator
t := doc.tokenizer
for peek(t).kind == .Ident {
attr, offset := parse_attribute(doc) or_return
check_duplicate_attributes(t, attribs^, attr, offset) or_return
append(attribs, attr)
}
skip_whitespace(t)
return .None
}
parse_prologue :: proc(doc: ^Document) -> (err: Error) {
assert(doc != nil)
context.allocator = doc.allocator
t := doc.tokenizer
offset := t.offset
parse_attributes(doc, &doc.prologue) or_return
for attr in doc.prologue {
switch attr.key {
case "version":
switch attr.val {
case "1.0", "1.1":
case:
error(t, offset, "[parse_prologue] Warning: Unhandled XML version: %v\n", attr.val)
}
case "encoding":
switch strings.to_lower(attr.val, context.temp_allocator) {
case "utf-8", "utf8":
doc.encoding = .UTF_8
case "latin-1", "latin1", "iso-8859-1":
doc.encoding = .LATIN_1
case:
/*
Unrecognized encoding, assume UTF-8.
*/
error(t, offset, "[parse_prologue] Warning: Unrecognized encoding: %v\n", attr.val)
}
case:
// Ignored.
}
}
_ = expect(t, .Question) or_return
_ = expect(t, .Gt) or_return
return .None
}
skip_element :: proc(t: ^Tokenizer) -> (err: Error) {
close := 1
loop: for {
tok := scan(t)
#partial switch tok.kind {
case .EOF:
error(t, t.offset, "[skip_element] Premature EOF\n")
return .Premature_EOF
case .Lt:
close += 1
case .Gt:
close -= 1
if close == 0 {
break loop
}
case:
}
}
return .None
}
parse_doctype :: proc(doc: ^Document) -> (err: Error) {
/*
<!DOCTYPE greeting SYSTEM "hello.dtd">
<!DOCTYPE greeting [
<!ELEMENT greeting (#PCDATA)>
]>
*/
assert(doc != nil)
context.allocator = doc.allocator
t := doc.tokenizer
tok := expect(t, .Ident) or_return
doc.doctype.ident = tok.text
skip_whitespace(t)
offset := t.offset
skip_element(t) or_return
/*
-1 because the current offset is that of the closing tag, so the rest of the DOCTYPE tag ends just before it.
*/
doc.doctype.rest = string(t.src[offset : t.offset - 1])
return .None
}
Element_ID :: u32
new_element :: proc(doc: ^Document) -> (id: Element_ID) {
element_space := len(doc.elements)
// Need to resize
if int(doc.element_count) + 1 > element_space {
if element_space < 65536 {
element_space *= 2
} else {
element_space += 65536
}
resize(&doc.elements, element_space)
}
cur := doc.element_count
doc.element_count += 1
return cur
}

View File

@@ -119,17 +119,17 @@ tprintf :: proc(fmt: string, args: ..any) -> string {
// bprint procedures return a string using a buffer from an array
bprint :: proc(buf: []byte, args: ..any, sep := " ") -> string {
sb := strings.builder_from_slice(buf[0:len(buf)])
sb := strings.builder_from_bytes(buf[0:len(buf)])
return sbprint(buf=&sb, args=args, sep=sep)
}
// bprintln procedures return a string using a buffer from an array
bprintln :: proc(buf: []byte, args: ..any, sep := " ") -> string {
sb := strings.builder_from_slice(buf[0:len(buf)])
sb := strings.builder_from_bytes(buf[0:len(buf)])
return sbprintln(buf=&sb, args=args, sep=sep)
}
// bprintf procedures return a string using a buffer from an array
bprintf :: proc(buf: []byte, fmt: string, args: ..any) -> string {
sb := strings.builder_from_slice(buf[0:len(buf)])
sb := strings.builder_from_bytes(buf[0:len(buf)])
return sbprintf(&sb, fmt, ..args)
}

View File

@@ -52,9 +52,6 @@ XXH3_128_reset_with_seed :: proc(state: ^XXH3_state, seed: XXH64_hash) -> (err:
XXH3_64_reset_with_seed :: XXH3_128_reset_with_seed
XXH3_128_update :: proc(state: ^XXH3_state, input: []u8) -> (err: Error) {
if len(input) < XXH3_MIDSIZE_MAX {
return .Error
}
return XXH3_update(state, input, XXH3_accumulate_512, XXH3_scramble_accumulator)
}
XXH3_64_update :: XXH3_128_update
@@ -127,6 +124,7 @@ XXH3_create_state :: proc(allocator := context.allocator) -> (res: ^XXH3_state,
err = nil if mem_error == nil else .Error
XXH3_init_state(state)
XXH3_128_reset(state)
return state, nil
}
@@ -213,7 +211,9 @@ XXH3_update :: #force_inline proc(
length := len(input)
secret := state.custom_secret[:] if len(state.external_secret) == 0 else state.external_secret[:]
assert(len(input) > 0)
if len(input) == 0 {
return
}
state.total_length += u64(length)
assert(state.buffered_size <= XXH3_INTERNAL_BUFFER_SIZE)
@@ -234,7 +234,9 @@ XXH3_update :: #force_inline proc(
*/
if state.buffered_size > 0 {
load_size := int(XXH3_INTERNAL_BUFFER_SIZE - state.buffered_size)
mem_copy(&state.buffer[state.buffered_size], &input[0], load_size)
state_ptr := rawptr(uintptr(raw_data(state.buffer[:])) + uintptr(state.buffered_size))
mem_copy(state_ptr, raw_data(input), load_size)
input = input[load_size:]
XXH3_consume_stripes(

View File

@@ -197,6 +197,7 @@ XXH32 :: proc(input: []u8, seed := XXH32_DEFAULT_SEED) -> (digest: XXH32_hash) {
*/
XXH32_create_state :: proc(allocator := context.allocator) -> (res: ^XXH32_state, err: Error) {
state := new(XXH32_state, allocator)
XXH32_reset_state(state)
return state, .None if state != nil else .Error
}
@@ -258,7 +259,7 @@ XXH32_update :: proc(state: ^XXH32_state, input: []u8) -> (err: Error) {
v3 := state.v3
v4 := state.v4
for len(buf) >= 15 {
for len(buf) >= 16 {
#no_bounds_check v1 = XXH32_round(v1, XXH32_read32(buf, .Unaligned)); buf = buf[4:]
#no_bounds_check v2 = XXH32_round(v2, XXH32_read32(buf, .Unaligned)); buf = buf[4:]
#no_bounds_check v3 = XXH32_round(v3, XXH32_read32(buf, .Unaligned)); buf = buf[4:]

View File

@@ -163,6 +163,7 @@ XXH64 :: proc(input: []u8, seed := XXH64_DEFAULT_SEED) -> (digest: XXH64_hash) {
*/
XXH64_create_state :: proc(allocator := context.allocator) -> (res: ^XXH64_state, err: Error) {
state := new(XXH64_state, allocator)
XXH64_reset_state(state)
return state, .None if state != nil else .Error
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,61 @@
package image
import "core:mem"
import "core:os"
import "core:bytes"
Loader_Proc :: #type proc(data: []byte, options: Options, allocator: mem.Allocator) -> (img: ^Image, err: Error)
Destroy_Proc :: #type proc(img: ^Image)
@(private)
_internal_loaders: [Which_File_Type]Loader_Proc
_internal_destroyers: [Which_File_Type]Destroy_Proc
register :: proc(kind: Which_File_Type, loader: Loader_Proc, destroyer: Destroy_Proc) {
assert(loader != nil)
assert(destroyer != nil)
assert(_internal_loaders[kind] == nil)
_internal_loaders[kind] = loader
assert(_internal_destroyers[kind] == nil)
_internal_destroyers[kind] = destroyer
}
load :: proc{
load_from_bytes,
load_from_file,
}
load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
loader := _internal_loaders[which(data)]
if loader == nil {
return nil, .Unsupported_Format
}
return loader(data, options, allocator)
}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
data, ok := os.read_entire_file(filename, allocator)
defer delete(data, allocator)
if ok {
return load_from_bytes(data, options, allocator)
} else {
return nil, .Unable_To_Read_File
}
}
destroy :: proc(img: ^Image, allocator := context.allocator) {
if img == nil {
return
}
context.allocator = allocator
destroyer := _internal_destroyers[img.which]
if destroyer != nil {
destroyer(img)
} else {
assert(img.metadata == nil)
bytes.buffer_destroy(&img.pixels)
free(img)
}
}

View File

@@ -0,0 +1,33 @@
/*
Formats:
PBM (P1, P4): Portable Bit Map, stores black and white images (1 channel)
PGM (P2, P5): Portable Gray Map, stores greyscale images (1 channel, 1 or 2 bytes per value)
PPM (P3, P6): Portable Pixel Map, stores colour images (3 channel, 1 or 2 bytes per value)
PAM (P7 ): Portable Arbitrary Map, stores arbitrary channel images (1 or 2 bytes per value)
PFM (Pf, PF): Portable Float Map, stores floating-point images (Pf: 1 channel, PF: 3 channel)
Reading:
All formats fill out header fields `format`, `width`, `height`, `channels`, `depth`
Specific formats use more fields
PGM, PPM, and PAM set `maxval` (maximum of 65535)
PAM sets `tupltype` if there is one, and can set `channels` to any value (not just 1 or 3)
PFM sets `scale` (float equivalent of `maxval`) and `little_endian` (endianness of stored floats)
Currently doesn't support reading multiple images from one binary-format file
Writing:
You can use your own `Netpbm_Info` struct to control how images are written
All formats require the header field `format` to be specified
Additional header fields are required for specific formats
PGM, PPM, and PAM require `maxval` (maximum of 65535)
PAM also uses `tupltype`, though it may be left as default (empty or nil string)
PFM requires `scale`, and optionally `little_endian`
Some syntax differences from the specifications:
`channels` stores the number of values per pixel, what the PAM specification calls `depth`
`depth` instead is the number of bits for a single value (32 for PFM, 16 or 8 otherwise)
`scale` and `little_endian` are separated, so the `header` will always store a positive `scale`
`little_endian` will only be true for a negative `scale` PFM, every other format will be false
`little_endian` only describes the netpbm data being read/written, the image buffer will be native
*/
package netpbm

View File

@@ -0,0 +1,27 @@
package netpbm
import "core:bytes"
import "core:image"
destroy :: proc(img: ^image.Image) -> bool {
if img == nil do return false
defer free(img)
bytes.buffer_destroy(&img.pixels)
info, ok := img.metadata.(^image.Netpbm_Info)
if !ok do return false
header_destroy(&info.header)
free(info)
img.metadata = nil
return true
}
header_destroy :: proc(using header: ^Header) {
if format == .P7 && tupltype != "" {
delete(tupltype)
tupltype = ""
}
}

View File

@@ -0,0 +1,763 @@
package netpbm
import "core:bytes"
import "core:fmt"
import "core:image"
import "core:mem"
import "core:os"
import "core:strconv"
import "core:strings"
import "core:unicode"
Image :: image.Image
Format :: image.Netpbm_Format
Header :: image.Netpbm_Header
Info :: image.Netpbm_Info
Error :: image.Error
Format_Error :: image.Netpbm_Error
Formats :: bit_set[Format]
PBM :: Formats{.P1, .P4}
PGM :: Formats{.P2, .P5}
PPM :: Formats{.P3, .P6}
PNM :: PBM + PGM + PPM
PAM :: Formats{.P7}
PFM :: Formats{.Pf, .PF}
ASCII :: Formats{.P1, .P2, .P3}
BINARY :: Formats{.P4, .P5, .P6} + PAM + PFM
load :: proc {
load_from_file,
load_from_bytes,
}
load_from_file :: proc(filename: string, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename); defer delete(data)
if !ok {
err = .Unable_To_Read_File
return
}
return load_from_bytes(data)
}
load_from_bytes :: proc(data: []byte, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
img = new(Image)
img.which = .NetPBM
header: Header; defer header_destroy(&header)
header_size: int
header, header_size = parse_header(data) or_return
img_data := data[header_size:]
decode_image(img, header, img_data) or_return
info := new(Info)
info.header = header
if header.format == .P7 && header.tupltype != "" {
info.header.tupltype = strings.clone(header.tupltype)
}
img.metadata = info
return img, nil
}
save :: proc {
save_to_file,
save_to_buffer,
}
save_to_file :: proc(filename: string, img: ^Image, custom_info: Info = {}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
data: []byte; defer delete(data)
data = save_to_buffer(img, custom_info) or_return
if ok := os.write_entire_file(filename, data); !ok {
return .Unable_To_Write_File
}
return Format_Error.None
}
save_to_buffer :: proc(img: ^Image, custom_info: Info = {}, allocator := context.allocator) -> (buffer: []byte, err: Error) {
context.allocator = allocator
info: Info = {}
if custom_info.header.width > 0 {
// Custom info has been set, use it.
info = custom_info
} else {
img_info, ok := img.metadata.(^image.Netpbm_Info)
if !ok {
// image doesn't have .Netpbm info, guess it
auto_info, auto_info_found := autoselect_pbm_format_from_image(img)
if auto_info_found {
info = auto_info
} else {
return {}, .Invalid_Input_Image
}
} else {
// use info as stored on image
info = img_info^
}
}
// using info so we can just talk about the header
using info
// validation
if header.format in (PBM + PGM + Formats{.Pf}) && img.channels != 1 \
|| header.format in (PPM + Formats{.PF}) && img.channels != 3 {
err = .Invalid_Number_Of_Channels
return
}
if header.format in (PNM + PAM) {
if header.maxval <= int(max(u8)) && img.depth != 8 \
|| header.maxval > int(max(u8)) && header.maxval <= int(max(u16)) && img.depth != 16 {
err = .Invalid_Image_Depth
return
}
} else if header.format in PFM && img.depth != 32 {
err = .Invalid_Image_Depth
return
}
// we will write to a string builder
data: strings.Builder
strings.init_builder(&data)
// all PNM headers start with the format
fmt.sbprintf(&data, "%s\n", header.format)
if header.format in PNM {
fmt.sbprintf(&data, "%i %i\n", img.width, img.height)
if header.format not_in PBM {
fmt.sbprintf(&data, "%i\n", header.maxval)
}
} else if header.format in PAM {
if len(header.tupltype) > 0 {
fmt.sbprintf(&data, "WIDTH %i\nHEIGHT %i\nMAXVAL %i\nDEPTH %i\nTUPLTYPE %s\nENDHDR\n",
img.width, img.height, header.maxval, img.channels, header.tupltype)
} else {
fmt.sbprintf(&data, "WIDTH %i\nHEIGHT %i\nMAXVAL %i\nDEPTH %i\nENDHDR\n",
img.width, img.height, header.maxval, img.channels)
}
} else if header.format in PFM {
scale := -header.scale if header.little_endian else header.scale
fmt.sbprintf(&data, "%i %i\n%f\n", img.width, img.height, scale)
}
switch header.format {
// Compressed binary
case .P4:
header_buf := data.buf[:]
pixels := img.pixels.buf[:]
p4_buffer_size := (img.width / 8 + 1) * img.height
reserve(&data.buf, len(header_buf) + p4_buffer_size)
// we build up a byte value until it is completely filled
// or we reach the end the row
for y in 0 ..< img.height {
b: byte
for x in 0 ..< img.width {
i := y * img.width + x
bit := byte(7 - (x % 8))
v : byte = 0 if pixels[i] == 0 else 1
b |= (v << bit)
if bit == 0 {
append(&data.buf, b)
b = 0
}
}
if b != 0 {
append(&data.buf, b)
b = 0
}
}
// Simple binary
case .P5, .P6, .P7, .Pf, .PF:
header_buf := data.buf[:]
pixels := img.pixels.buf[:]
resize(&data.buf, len(header_buf) + len(pixels))
mem.copy(raw_data(data.buf[len(header_buf):]), raw_data(pixels), len(pixels))
// convert from native endianness
if img.depth == 16 {
pixels := mem.slice_data_cast([]u16be, data.buf[len(header_buf):])
for p in &pixels {
p = u16be(transmute(u16) p)
}
} else if header.format in PFM {
if header.little_endian {
pixels := mem.slice_data_cast([]f32le, data.buf[len(header_buf):])
for p in &pixels {
p = f32le(transmute(f32) p)
}
} else {
pixels := mem.slice_data_cast([]f32be, data.buf[len(header_buf):])
for p in &pixels {
p = f32be(transmute(f32) p)
}
}
}
// If-it-looks-like-a-bitmap ASCII
case .P1:
pixels := img.pixels.buf[:]
for y in 0 ..< img.height {
for x in 0 ..< img.width {
i := y * img.width + x
append(&data.buf, '0' if pixels[i] == 0 else '1')
}
append(&data.buf, '\n')
}
// Token ASCII
case .P2, .P3:
switch img.depth {
case 8:
pixels := img.pixels.buf[:]
for y in 0 ..< img.height {
for x in 0 ..< img.width {
i := y * img.width + x
for c in 0 ..< img.channels {
i := i * img.channels + c
fmt.sbprintf(&data, "%i ", pixels[i])
}
fmt.sbprint(&data, "\n")
}
fmt.sbprint(&data, "\n")
}
case 16:
pixels := mem.slice_data_cast([]u16, img.pixels.buf[:])
for y in 0 ..< img.height {
for x in 0 ..< img.width {
i := y * img.width + x
for c in 0 ..< img.channels {
i := i * img.channels + c
fmt.sbprintf(&data, "%i ", pixels[i])
}
fmt.sbprint(&data, "\n")
}
fmt.sbprint(&data, "\n")
}
case:
return data.buf[:], .Invalid_Image_Depth
}
case:
return data.buf[:], .Invalid_Format
}
return data.buf[:], Format_Error.None
}
parse_header :: proc(data: []byte, allocator := context.allocator) -> (header: Header, length: int, err: Error) {
context.allocator = allocator
// we need the signature and a space
if len(data) < 3 {
err = Format_Error.Incomplete_Header
return
}
if data[0] == 'P' {
switch data[1] {
case '1' ..= '6':
return _parse_header_pnm(data)
case '7':
return _parse_header_pam(data, allocator)
case 'F', 'f':
return _parse_header_pfm(data)
}
}
err = .Invalid_Signature
return
}
@(private)
_parse_header_pnm :: proc(data: []byte) -> (header: Header, length: int, err: Error) {
SIG_LENGTH :: 2
{
header_formats := []Format{.P1, .P2, .P3, .P4, .P5, .P6}
header.format = header_formats[data[1] - '0' - 1]
}
// have a list of fielda for easy iteration
header_fields: []^int
if header.format in PBM {
header_fields = {&header.width, &header.height}
header.maxval = 1 // we know maxval for a bitmap
} else {
header_fields = {&header.width, &header.height, &header.maxval}
}
// we're keeping track of the header byte length
length = SIG_LENGTH
// loop state
in_comment := false
already_in_space := true
current_field := 0
current_value := header_fields[0]
parse_loop: for d, i in data[SIG_LENGTH:] {
length += 1
// handle comments
if in_comment {
switch d {
// comments only go up to next carriage return or line feed
case '\r', '\n':
in_comment = false
}
continue
} else if d == '#' {
in_comment = true
continue
}
// handle whitespace
in_space := unicode.is_white_space(rune(d))
if in_space {
if already_in_space {
continue
}
already_in_space = true
// switch to next value
current_field += 1
if current_field == len(header_fields) {
// header byte length is 1-index so we'll increment again
length += 1
break parse_loop
}
current_value = header_fields[current_field]
} else {
already_in_space = false
if !unicode.is_digit(rune(d)) {
err = Format_Error.Invalid_Header_Token_Character
return
}
val := int(d - '0')
current_value^ = current_value^ * 10 + val
}
}
// set extra info
header.channels = 3 if header.format in PPM else 1
header.depth = 16 if header.maxval > int(max(u8)) else 8
// limit checking
if current_field < len(header_fields) {
err = Format_Error.Incomplete_Header
return
}
if header.width < 1 \
|| header.height < 1 \
|| header.maxval < 1 || header.maxval > int(max(u16)) {
fmt.printf("[pnm] Header: {{width = %v, height = %v, maxval: %v}}\n", header.width, header.height, header.maxval)
err = .Invalid_Header_Value
return
}
length -= 1
err = Format_Error.None
return
}
@(private)
_parse_header_pam :: proc(data: []byte, allocator := context.allocator) -> (header: Header, length: int, err: Error) {
context.allocator = allocator
// the spec needs the newline apparently
if string(data[0:3]) != "P7\n" {
err = .Invalid_Signature
return
}
header.format = .P7
SIGNATURE_LENGTH :: 3
HEADER_END :: "ENDHDR\n"
// we can already work out the size of the header
header_end_index := strings.index(string(data), HEADER_END)
if header_end_index == -1 {
err = Format_Error.Incomplete_Header
return
}
length = header_end_index + len(HEADER_END)
// string buffer for the tupltype
tupltype: strings.Builder
strings.init_builder(&tupltype, context.temp_allocator); defer strings.destroy_builder(&tupltype)
fmt.sbprint(&tupltype, "")
// PAM uses actual lines, so we can iterate easily
line_iterator := string(data[SIGNATURE_LENGTH : header_end_index])
parse_loop: for line in strings.split_lines_iterator(&line_iterator) {
line := line
if len(line) == 0 || line[0] == '#' {
continue
}
field, ok := strings.fields_iterator(&line)
value := strings.trim_space(line)
// the field will change, but the logic stays the same
current_field: ^int
switch field {
case "WIDTH": current_field = &header.width
case "HEIGHT": current_field = &header.height
case "DEPTH": current_field = &header.channels
case "MAXVAL": current_field = &header.maxval
case "TUPLTYPE":
if len(value) == 0 {
err = .Invalid_Header_Value
return
}
if len(tupltype.buf) == 0 {
fmt.sbprint(&tupltype, value)
} else {
fmt.sbprint(&tupltype, "", value)
}
continue
case:
continue
}
if current_field^ != 0 {
err = Format_Error.Duplicate_Header_Field
return
}
current_field^, ok = strconv.parse_int(value)
if !ok {
err = Format_Error.Invalid_Header_Value
return
}
}
// extra info
header.depth = 16 if header.maxval > int(max(u8)) else 8
// limit checking
if header.width < 1 \
|| header.height < 1 \
|| header.maxval < 1 \
|| header.maxval > int(max(u16)) {
fmt.printf("[pam] Header: {{width = %v, height = %v, maxval: %v}}\n", header.width, header.height, header.maxval)
err = Format_Error.Invalid_Header_Value
return
}
header.tupltype = strings.clone(strings.to_string(tupltype))
err = Format_Error.None
return
}
@(private)
_parse_header_pfm :: proc(data: []byte) -> (header: Header, length: int, err: Error) {
// we can just cycle through tokens for PFM
field_iterator := string(data)
field, ok := strings.fields_iterator(&field_iterator)
switch field {
case "Pf":
header.format = .Pf
header.channels = 1
case "PF":
header.format = .PF
header.channels = 3
case:
err = .Invalid_Signature
return
}
// floating point
header.depth = 32
// width
field, ok = strings.fields_iterator(&field_iterator)
if !ok {
err = Format_Error.Incomplete_Header
return
}
header.width, ok = strconv.parse_int(field)
if !ok {
err = Format_Error.Invalid_Header_Value
return
}
// height
field, ok = strings.fields_iterator(&field_iterator)
if !ok {
err = Format_Error.Incomplete_Header
return
}
header.height, ok = strconv.parse_int(field)
if !ok {
err = Format_Error.Invalid_Header_Value
return
}
// scale (sign is endianness)
field, ok = strings.fields_iterator(&field_iterator)
if !ok {
err = Format_Error.Incomplete_Header
return
}
header.scale, ok = strconv.parse_f32(field)
if !ok {
err = Format_Error.Invalid_Header_Value
return
}
if header.scale < 0.0 {
header.little_endian = true
header.scale = -header.scale
}
// pointer math to get header size
length = int((uintptr(raw_data(field_iterator)) + 1) - uintptr(raw_data(data)))
// limit checking
if header.width < 1 \
|| header.height < 1 \
|| header.scale == 0.0 {
fmt.printf("[pfm] Header: {{width = %v, height = %v, scale: %v}}\n", header.width, header.height, header.scale)
err = .Invalid_Header_Value
return
}
err = Format_Error.None
return
}
decode_image :: proc(img: ^Image, header: Header, data: []byte, allocator := context.allocator) -> (err: Error) {
assert(img != nil)
context.allocator = allocator
img.width = header.width
img.height = header.height
img.channels = header.channels
img.depth = header.depth
buffer_size := image.compute_buffer_size(img.width, img.height, img.channels, img.depth)
// we can check data size for binary formats
if header.format in BINARY {
if len(data) < buffer_size {
fmt.printf("len(data): %v, buffer size: %v\n", len(data), buffer_size)
return .Buffer_Too_Small
}
}
// for ASCII and P4, we use length for the termination condition, so start at 0
// BINARY will be a simple memcopy so the buffer length should also be initialised
if header.format in ASCII || header.format == .P4 {
bytes.buffer_init_allocator(&img.pixels, 0, buffer_size)
} else {
bytes.buffer_init_allocator(&img.pixels, buffer_size, buffer_size)
}
switch header.format {
// Compressed binary
case .P4:
for d in data {
for b in 1 ..= 8 {
bit := byte(8 - b)
pix := (d >> bit) & 1
bytes.buffer_write_byte(&img.pixels, pix)
if len(img.pixels.buf) % img.width == 0 {
break
}
}
if len(img.pixels.buf) == cap(img.pixels.buf) {
break
}
}
// Simple binary
case .P5, .P6, .P7, .Pf, .PF:
copy(img.pixels.buf[:], data[:])
// convert to native endianness
if header.format in PFM {
pixels := mem.slice_data_cast([]f32, img.pixels.buf[:])
if header.little_endian {
for p in &pixels {
p = f32(transmute(f32le) p)
}
} else {
for p in &pixels {
p = f32(transmute(f32be) p)
}
}
} else {
if img.depth == 16 {
pixels := mem.slice_data_cast([]u16, img.pixels.buf[:])
for p in &pixels {
p = u16(transmute(u16be) p)
}
}
}
// If-it-looks-like-a-bitmap ASCII
case .P1:
for c in data {
switch c {
case '0', '1':
bytes.buffer_write_byte(&img.pixels, c - '0')
}
if len(img.pixels.buf) == cap(img.pixels.buf) {
break
}
}
if len(img.pixels.buf) < cap(img.pixels.buf) {
err = Format_Error.Buffer_Too_Small
return
}
// Token ASCII
case .P2, .P3:
field_iterator := string(data)
for field in strings.fields_iterator(&field_iterator) {
value, ok := strconv.parse_int(field)
if !ok {
err = Format_Error.Invalid_Buffer_ASCII_Token
return
}
//? do we want to enforce the maxval, the limit, or neither
if value > int(max(u16)) /*header.maxval*/ {
err = Format_Error.Invalid_Buffer_Value
return
}
switch img.depth {
case 8:
bytes.buffer_write_byte(&img.pixels, u8(value))
case 16:
vb := transmute([2]u8) u16(value)
bytes.buffer_write(&img.pixels, vb[:])
}
if len(img.pixels.buf) == cap(img.pixels.buf) {
break
}
}
if len(img.pixels.buf) < cap(img.pixels.buf) {
err = Format_Error.Buffer_Too_Small
return
}
}
err = Format_Error.None
return
}
// Automatically try to select an appropriate format to save to based on `img.channel` and `img.depth`
autoselect_pbm_format_from_image :: proc(img: ^Image, prefer_binary := true, force_black_and_white := false, pfm_scale := f32(1.0)) -> (res: Info, ok: bool) {
/*
PBM (P1, P4): Portable Bit Map, stores black and white images (1 channel)
PGM (P2, P5): Portable Gray Map, stores greyscale images (1 channel, 1 or 2 bytes per value)
PPM (P3, P6): Portable Pixel Map, stores colour images (3 channel, 1 or 2 bytes per value)
PAM (P7 ): Portable Arbitrary Map, stores arbitrary channel images (1 or 2 bytes per value)
PFM (Pf, PF): Portable Float Map, stores floating-point images (Pf: 1 channel, PF: 3 channel)
ASCII :: Formats{.P1, .P2, .P3}
*/
using res.header
width = img.width
height = img.height
channels = img.channels
depth = img.depth
maxval = 255 if img.depth == 8 else 65535
little_endian = true if ODIN_ENDIAN == .Little else false
// Assume we'll find a suitable format
ok = true
switch img.channels {
case 1:
// Must be Portable Float Map
if img.depth == 32 {
format = .Pf
return
}
if force_black_and_white {
// Portable Bit Map
format = .P4 if prefer_binary else .P1
maxval = 1
return
} else {
// Portable Gray Map
format = .P5 if prefer_binary else .P2
return
}
case 3:
// Must be Portable Float Map
if img.depth == 32 {
format = .PF
return
}
// Portable Pixel Map
format = .P6 if prefer_binary else .P3
return
case:
// Portable Arbitrary Map
if img.depth == 8 || img.depth == 16 {
format = .P7
scale = pfm_scale
return
}
}
// We couldn't find a suitable format
return {}, false
}
@(init, private)
_register :: proc() {
loader :: proc(data: []byte, options: image.Options, allocator: mem.Allocator) -> (img: ^Image, err: Error) {
return load_from_bytes(data, allocator)
}
destroyer :: proc(img: ^Image) {
_ = destroy(img)
}
image.register(.NetPBM, loader, destroyer)
}

View File

@@ -242,17 +242,16 @@ srgb :: proc(c: image.PNG_Chunk) -> (res: sRGB, ok: bool) {
}
plte :: proc(c: image.PNG_Chunk) -> (res: PLTE, ok: bool) {
if c.header.type != .PLTE {
if c.header.type != .PLTE || c.header.length % 3 != 0 || c.header.length > 768 {
return {}, false
}
i := 0; j := 0; ok = true
for j < int(c.header.length) {
res.entries[i] = {c.data[j], c.data[j+1], c.data[j+2]}
i += 1; j += 3
plte := mem.slice_data_cast([]image.RGB_Pixel, c.data[:])
for color, i in plte {
res.entries[i] = color
}
res.used = u16(i)
return
res.used = u16(len(plte))
return res, true
}
splt :: proc(c: image.PNG_Chunk) -> (res: sPLT, ok: bool) {

View File

@@ -18,23 +18,16 @@ import "core:compress/zlib"
import "core:image"
import "core:os"
import "core:strings"
import "core:hash"
import "core:bytes"
import "core:io"
import "core:mem"
import "core:intrinsics"
/*
67_108_864 pixels max by default.
Maximum allowed dimensions are capped at 65535 * 65535.
*/
MAX_DIMENSIONS :: min(#config(PNG_MAX_DIMENSIONS, 8192 * 8192), 65535 * 65535)
// Limit chunk sizes.
// By default: IDAT = 8k x 8k x 16-bits + 8k filter bytes.
// The total number of pixels defaults to 64 Megapixel and can be tuned in image/common.odin.
/*
Limit chunk sizes.
By default: IDAT = 8k x 8k x 16-bits + 8k filter bytes.
*/
_MAX_IDAT_DEFAULT :: ( 8192 /* Width */ * 8192 /* Height */ * 2 /* 16-bit */) + 8192 /* Filter bytes */
_MAX_IDAT :: (65535 /* Width */ * 65535 /* Height */ * 2 /* 16-bit */) + 65535 /* Filter bytes */
@@ -64,7 +57,7 @@ Row_Filter :: enum u8 {
Paeth = 4,
}
PLTE_Entry :: [3]u8
PLTE_Entry :: image.RGB_Pixel
PLTE :: struct #packed {
entries: [256]PLTE_Entry,
@@ -244,7 +237,7 @@ append_chunk :: proc(list: ^[dynamic]image.PNG_Chunk, src: image.PNG_Chunk, allo
append(list, c)
if len(list) != length + 1 {
// Resize during append failed.
return mem.Allocator_Error.Out_Of_Memory
return .Unable_To_Allocate_Or_Resize
}
return
@@ -259,7 +252,7 @@ read_header :: proc(ctx: ^$C) -> (image.PNG_IHDR, Error) {
header := (^image.PNG_IHDR)(raw_data(c.data))^
// Validate IHDR
using header
if width == 0 || height == 0 || u128(width) * u128(height) > MAX_DIMENSIONS {
if width == 0 || height == 0 || u128(width) * u128(height) > image.MAX_DIMENSIONS {
return {}, .Invalid_Image_Dimensions
}
@@ -324,13 +317,12 @@ read_header :: proc(ctx: ^$C) -> (image.PNG_IHDR, Error) {
}
chunk_type_to_name :: proc(type: ^image.PNG_Chunk_Type) -> string {
t := transmute(^u8)type
return strings.string_from_ptr(t, 4)
return string(([^]u8)(type)[:4])
}
load_from_slice :: proc(slice: []u8, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
ctx := &compress.Context_Memory_Input{
input_data = slice,
input_data = data,
}
/*
@@ -350,10 +342,9 @@ load_from_file :: proc(filename: string, options := Options{}, allocator := cont
defer delete(data)
if ok {
return load_from_slice(data, options)
return load_from_bytes(data, options)
} else {
img = new(Image)
return img, compress.General_Error.File_Not_Found
return nil, .Unable_To_Read_File
}
}
@@ -366,6 +357,10 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
options -= {.info}
}
if .return_header in options && .return_metadata in options {
options -= {.return_header}
}
if .alpha_drop_if_present in options && .alpha_add_if_missing in options {
return {}, compress.General_Error.Incompatible_Options
}
@@ -377,13 +372,14 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
if img == nil {
img = new(Image)
}
img.which = .PNG
info := new(image.PNG_Info)
img.metadata = info
signature, io_error := compress.read_data(ctx, Signature)
if io_error != .None || signature != .PNG {
return img, .Invalid_PNG_Signature
return img, .Invalid_Signature
}
idat: []u8
@@ -392,7 +388,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
idat_length := u64(0)
c: image.PNG_Chunk
c: image.PNG_Chunk
ch: image.PNG_Chunk_Header
e: io.Error
@@ -473,6 +469,10 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
}
info.header = h
if .return_header in options && .return_metadata not_in options && .do_not_decompress_image not_in options {
return img, nil
}
case .PLTE:
seen_plte = true
// PLTE must appear before IDAT and can't appear for color types 0, 4.
@@ -540,9 +540,6 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
seen_iend = true
case .bKGD:
// TODO: Make sure that 16-bit bKGD + tRNS chunks return u16 instead of u16be
c = read_chunk(ctx) or_return
seen_bkgd = true
if .return_metadata in options {
@@ -594,23 +591,36 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
*/
final_image_channels += 1
seen_trns = true
if .Paletted in header.color_type {
if len(c.data) > 256 {
return img, .TNRS_Invalid_Length
}
} else if .Color in header.color_type {
if len(c.data) != 6 {
return img, .TNRS_Invalid_Length
}
} else if len(c.data) != 2 {
return img, .TNRS_Invalid_Length
}
if info.header.bit_depth < 8 && .Paletted not_in info.header.color_type {
// Rescale tRNS data so key matches intensity
dsc := depth_scale_table
dsc := depth_scale_table
scale := dsc[info.header.bit_depth]
if scale != 1 {
key := mem.slice_data_cast([]u16be, c.data)[0] * u16be(scale)
c.data = []u8{0, u8(key & 255)}
}
}
trns = c
case .iDOT, .CbGI:
case .iDOT, .CgBI:
/*
iPhone PNG bastardization that doesn't adhere to spec with broken IDAT chunk.
We're not going to add support for it. If you have the misfortunte of coming
We're not going to add support for it. If you have the misfortune of coming
across one of these files, use a utility to defry it.
*/
return img, .Image_Does_Not_Adhere_to_Spec
@@ -635,6 +645,10 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
return img, .IDAT_Missing
}
if .Paletted in header.color_type && !seen_plte {
return img, .PLTE_Missing
}
/*
Calculate the expected output size, to help `inflate` make better decisions about the output buffer.
We'll also use it to check the returned buffer size is what we expected it to be.
@@ -683,15 +697,6 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
return {}, defilter_error
}
/*
Now we'll handle the relocoring of paletted images, handling of tRNS chunks,
and we'll expand grayscale images to RGB(A).
For the sake of convenience we return only RGB(A) images. In the future we
may supply an option to return Gray/Gray+Alpha as-is, in which case RGB(A)
will become the default.
*/
if .Paletted in header.color_type && .do_not_expand_indexed in options {
return img, nil
}
@@ -699,7 +704,10 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
return img, nil
}
/*
Now we're going to optionally apply various post-processing stages,
to for example expand grayscale, apply a palette, premultiply alpha, etc.
*/
raw_image_channels := img.channels
out_image_channels := 3
@@ -737,7 +745,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
dest_raw_size := compute_buffer_size(int(header.width), int(header.height), out_image_channels, 8)
t := bytes.Buffer{}
if !resize(&t.buf, dest_raw_size) {
return {}, mem.Allocator_Error.Out_Of_Memory
return {}, .Unable_To_Allocate_Or_Resize
}
i := 0; j := 0
@@ -818,7 +826,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
dest_raw_size := compute_buffer_size(int(header.width), int(header.height), out_image_channels, 16)
t := bytes.Buffer{}
if !resize(&t.buf, dest_raw_size) {
return {}, mem.Allocator_Error.Out_Of_Memory
return {}, .Unable_To_Allocate_Or_Resize
}
p16 := mem.slice_data_cast([]u16, temp.buf[:])
@@ -1017,7 +1025,7 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
dest_raw_size := compute_buffer_size(int(header.width), int(header.height), out_image_channels, 8)
t := bytes.Buffer{}
if !resize(&t.buf, dest_raw_size) {
return {}, mem.Allocator_Error.Out_Of_Memory
return {}, .Unable_To_Allocate_Or_Resize
}
p := mem.slice_data_cast([]u8, temp.buf[:])
@@ -1204,7 +1212,6 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
return img, nil
}
filter_paeth :: #force_inline proc(left, up, up_left: u8) -> u8 {
aa, bb, cc := i16(left), i16(up), i16(up_left)
p := aa + bb - cc
@@ -1526,7 +1533,7 @@ defilter :: proc(img: ^Image, filter_bytes: ^bytes.Buffer, header: ^image.PNG_IH
num_bytes := compute_buffer_size(width, height, channels, depth == 16 ? 16 : 8)
if !resize(&img.pixels.buf, num_bytes) {
return mem.Allocator_Error.Out_Of_Memory
return .Unable_To_Allocate_Or_Resize
}
filter_ok: bool
@@ -1568,7 +1575,7 @@ defilter :: proc(img: ^Image, filter_bytes: ^bytes.Buffer, header: ^image.PNG_IH
temp: bytes.Buffer
temp_len := compute_buffer_size(x, y, channels, depth == 16 ? 16 : 8)
if !resize(&temp.buf, temp_len) {
return mem.Allocator_Error.Out_Of_Memory
return .Unable_To_Allocate_Or_Resize
}
params := Filter_Params{
@@ -1630,4 +1637,10 @@ defilter :: proc(img: ^Image, filter_bytes: ^bytes.Buffer, header: ^image.PNG_IH
return nil
}
load :: proc{load_from_file, load_from_slice, load_from_context}
load :: proc{load_from_file, load_from_bytes, load_from_context}
@(init, private)
_register :: proc() {
image.register(.PNG, load_from_bytes, destroy)
}

411
core/image/qoi/qoi.odin Normal file
View File

@@ -0,0 +1,411 @@
/*
Copyright 2022 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
List of contributors:
Jeroen van Rijn: Initial implementation.
*/
// package qoi implements a QOI image reader
//
// The QOI specification is at https://qoiformat.org.
package qoi
import "core:image"
import "core:compress"
import "core:bytes"
import "core:os"
Error :: image.Error
Image :: image.Image
Options :: image.Options
RGB_Pixel :: image.RGB_Pixel
RGBA_Pixel :: image.RGBA_Pixel
save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
if img == nil {
return .Invalid_Input_Image
}
if output == nil {
return .Invalid_Output
}
pixels := img.width * img.height
if pixels == 0 || pixels > image.MAX_DIMENSIONS {
return .Invalid_Input_Image
}
// QOI supports only 8-bit images with 3 or 4 channels.
if img.depth != 8 || img.channels < 3 || img.channels > 4 {
return .Invalid_Input_Image
}
if img.channels * pixels != len(img.pixels.buf) {
return .Invalid_Input_Image
}
written := 0
// Calculate and allocate maximum size. We'll reclaim space to actually written output at the end.
max_size := pixels * (img.channels + 1) + size_of(image.QOI_Header) + size_of(u64be)
if !resize(&output.buf, max_size) {
return .Unable_To_Allocate_Or_Resize
}
header := image.QOI_Header{
magic = image.QOI_Magic,
width = u32be(img.width),
height = u32be(img.height),
channels = u8(img.channels),
color_space = .Linear if .qoi_all_channels_linear in options else .sRGB,
}
header_bytes := transmute([size_of(image.QOI_Header)]u8)header
copy(output.buf[written:], header_bytes[:])
written += size_of(image.QOI_Header)
/*
Encode loop starts here.
*/
seen: [64]RGBA_Pixel
pix := RGBA_Pixel{0, 0, 0, 255}
prev := pix
seen[qoi_hash(pix)] = pix
input := img.pixels.buf[:]
run := u8(0)
for len(input) > 0 {
if img.channels == 4 {
pix = (^RGBA_Pixel)(raw_data(input))^
} else {
pix.rgb = (^RGB_Pixel)(raw_data(input))^
}
input = input[img.channels:]
if pix == prev {
run += 1
// As long as the pixel matches the last one, accumulate the run total.
// If we reach the max run length or the end of the image, write the run.
if run == 62 || len(input) == 0 {
// Encode and write run
output.buf[written] = u8(QOI_Opcode_Tag.RUN) | (run - 1)
written += 1
run = 0
}
} else {
if run > 0 {
// The pixel differs from the previous one, but we still need to write the pending run.
// Encode and write run
output.buf[written] = u8(QOI_Opcode_Tag.RUN) | (run - 1)
written += 1
run = 0
}
index := qoi_hash(pix)
if seen[index] == pix {
// Write indexed pixel
output.buf[written] = u8(QOI_Opcode_Tag.INDEX) | index
written += 1
} else {
// Add pixel to index
seen[index] = pix
// If the alpha matches the previous pixel's alpha, we don't need to write a full RGBA literal.
if pix.a == prev.a {
// Delta
d := pix.rgb - prev.rgb
// DIFF, biased and modulo 256
_d := d + 2
// LUMA, biased and modulo 256
_l := RGB_Pixel{ d.r - d.g + 8, d.g + 32, d.b - d.g + 8 }
if _d.r < 4 && _d.g < 4 && _d.b < 4 {
// Delta is between -2 and 1 inclusive
output.buf[written] = u8(QOI_Opcode_Tag.DIFF) | _d.r << 4 | _d.g << 2 | _d.b
written += 1
} else if _l.r < 16 && _l.g < 64 && _l.b < 16 {
// Biased luma is between {-8..7, -32..31, -8..7}
output.buf[written ] = u8(QOI_Opcode_Tag.LUMA) | _l.g
output.buf[written + 1] = _l.r << 4 | _l.b
written += 2
} else {
// Write RGB literal
output.buf[written] = u8(QOI_Opcode_Tag.RGB)
pix_bytes := transmute([4]u8)pix
copy(output.buf[written + 1:], pix_bytes[:3])
written += 4
}
} else {
// Write RGBA literal
output.buf[written] = u8(QOI_Opcode_Tag.RGBA)
pix_bytes := transmute([4]u8)pix
copy(output.buf[written + 1:], pix_bytes[:])
written += 5
}
}
}
prev = pix
}
trailer := []u8{0, 0, 0, 0, 0, 0, 0, 1}
copy(output.buf[written:], trailer[:])
written += len(trailer)
resize(&output.buf, written)
return nil
}
save_to_file :: proc(output: string, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
out := &bytes.Buffer{}
defer bytes.buffer_destroy(out)
save_to_memory(out, img, options) or_return
write_ok := os.write_entire_file(output, out.buf[:])
return nil if write_ok else .Unable_To_Write_File
}
save :: proc{save_to_memory, save_to_file}
load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
ctx := &compress.Context_Memory_Input{
input_data = data,
}
img, err = load_from_context(ctx, options, allocator)
return img, err
}
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
data, ok := os.read_entire_file(filename)
defer delete(data)
if ok {
return load_from_bytes(data, options)
} else {
return nil, .Unable_To_Read_File
}
}
@(optimization_mode="speed")
load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
context.allocator = allocator
options := options
if .info in options {
options |= {.return_metadata, .do_not_decompress_image}
options -= {.info}
}
if .return_header in options && .return_metadata in options {
options -= {.return_header}
}
header := image.read_data(ctx, image.QOI_Header) or_return
if header.magic != image.QOI_Magic {
return img, .Invalid_Signature
}
if img == nil {
img = new(Image)
}
img.which = .QOI
if .return_metadata in options {
info := new(image.QOI_Info)
info.header = header
img.metadata = info
}
if header.channels != 3 && header.channels != 4 {
return img, .Invalid_Number_Of_Channels
}
if header.color_space != .sRGB && header.color_space != .Linear {
return img, .Invalid_Color_Space
}
if header.width == 0 || header.height == 0 {
return img, .Invalid_Image_Dimensions
}
total_pixels := header.width * header.height
if total_pixels > image.MAX_DIMENSIONS {
return img, .Image_Dimensions_Too_Large
}
img.width = int(header.width)
img.height = int(header.height)
img.channels = 4 if .alpha_add_if_missing in options else int(header.channels)
img.depth = 8
if .do_not_decompress_image in options {
img.channels = int(header.channels)
return
}
bytes_needed := image.compute_buffer_size(int(header.width), int(header.height), img.channels, 8)
if !resize(&img.pixels.buf, bytes_needed) {
return img, .Unable_To_Allocate_Or_Resize
}
/*
Decode loop starts here.
*/
seen: [64]RGBA_Pixel
pix := RGBA_Pixel{0, 0, 0, 255}
seen[qoi_hash(pix)] = pix
pixels := img.pixels.buf[:]
decode: for len(pixels) > 0 {
data := image.read_u8(ctx) or_return
tag := QOI_Opcode_Tag(data)
#partial switch tag {
case .RGB:
pix.rgb = image.read_data(ctx, RGB_Pixel) or_return
#no_bounds_check {
seen[qoi_hash(pix)] = pix
}
case .RGBA:
pix = image.read_data(ctx, RGBA_Pixel) or_return
#no_bounds_check {
seen[qoi_hash(pix)] = pix
}
case:
// 2-bit tag
tag = QOI_Opcode_Tag(data & QOI_Opcode_Mask)
#partial switch tag {
case .INDEX:
pix = seen[data & 63]
case .DIFF:
diff_r := ((data >> 4) & 3) - 2
diff_g := ((data >> 2) & 3) - 2
diff_b := ((data >> 0) & 3) - 2
pix += {diff_r, diff_g, diff_b, 0}
#no_bounds_check {
seen[qoi_hash(pix)] = pix
}
case .LUMA:
data2 := image.read_u8(ctx) or_return
diff_g := (data & 63) - 32
diff_r := diff_g - 8 + ((data2 >> 4) & 15)
diff_b := diff_g - 8 + (data2 & 15)
pix += {diff_r, diff_g, diff_b, 0}
#no_bounds_check {
seen[qoi_hash(pix)] = pix
}
case .RUN:
if length := int(data & 63) + 1; (length * img.channels) > len(pixels) {
return img, .Corrupt
} else {
#no_bounds_check for in 0..<length {
copy(pixels, pix[:img.channels])
pixels = pixels[img.channels:]
}
}
continue decode
case:
unreachable()
}
}
#no_bounds_check {
copy(pixels, pix[:img.channels])
pixels = pixels[img.channels:]
}
}
// The byte stream's end is marked with 7 0x00 bytes followed by a single 0x01 byte.
trailer, trailer_err := compress.read_data(ctx, u64be)
if trailer_err != nil || trailer != 0x1 {
return img, .Missing_Or_Corrupt_Trailer
}
if .alpha_premultiply in options && !image.alpha_drop_if_present(img, options) {
return img, .Post_Processing_Error
}
return
}
load :: proc{load_from_file, load_from_bytes, load_from_context}
/*
Cleanup of image-specific data.
*/
destroy :: proc(img: ^Image) {
if img == nil {
/*
Nothing to do.
Load must've returned with an error.
*/
return
}
bytes.buffer_destroy(&img.pixels)
if v, ok := img.metadata.(^image.QOI_Info); ok {
free(v)
}
free(img)
}
QOI_Opcode_Tag :: enum u8 {
// 2-bit tags
INDEX = 0b0000_0000, // 6-bit index into color array follows
DIFF = 0b0100_0000, // 3x (RGB) 2-bit difference follows (-2..1), bias of 2.
LUMA = 0b1000_0000, // Luma difference
RUN = 0b1100_0000, // Run length encoding, bias -1
// 8-bit tags
RGB = 0b1111_1110, // Raw RGB pixel follows
RGBA = 0b1111_1111, // Raw RGBA pixel follows
}
QOI_Opcode_Mask :: 0b1100_0000
QOI_Data_Mask :: 0b0011_1111
qoi_hash :: #force_inline proc(pixel: RGBA_Pixel) -> (index: u8) {
i1 := u16(pixel.r) * 3
i2 := u16(pixel.g) * 5
i3 := u16(pixel.b) * 7
i4 := u16(pixel.a) * 11
return u8((i1 + i2 + i3 + i4) & 63)
}
@(init, private)
_register :: proc() {
image.register(.QOI, load_from_bytes, destroy)
}

101
core/image/tga/tga.odin Normal file
View File

@@ -0,0 +1,101 @@
/*
Copyright 2022 Jeroen van Rijn <nom@duclavier.com>.
Made available under Odin's BSD-3 license.
List of contributors:
Jeroen van Rijn: Initial implementation.
*/
// package tga implements a TGA image writer for 8-bit RGB and RGBA images.
package tga
import "core:mem"
import "core:image"
import "core:bytes"
import "core:os"
Error :: image.Error
Image :: image.Image
Options :: image.Options
RGB_Pixel :: image.RGB_Pixel
RGBA_Pixel :: image.RGBA_Pixel
save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
if img == nil {
return .Invalid_Input_Image
}
if output == nil {
return .Invalid_Output
}
pixels := img.width * img.height
if pixels == 0 || pixels > image.MAX_DIMENSIONS || img.width > 65535 || img.height > 65535 {
return .Invalid_Input_Image
}
// Our TGA writer supports only 8-bit images with 3 or 4 channels.
if img.depth != 8 || img.channels < 3 || img.channels > 4 {
return .Invalid_Input_Image
}
if img.channels * pixels != len(img.pixels.buf) {
return .Invalid_Input_Image
}
written := 0
// Calculate and allocate necessary space.
necessary := pixels * img.channels + size_of(image.TGA_Header)
if !resize(&output.buf, necessary) {
return .Unable_To_Allocate_Or_Resize
}
header := image.TGA_Header{
data_type_code = 0x02, // Color, uncompressed.
dimensions = {u16le(img.width), u16le(img.height)},
bits_per_pixel = u8(img.depth * img.channels),
image_descriptor = 1 << 5, // Origin is top left.
}
header_bytes := transmute([size_of(image.TGA_Header)]u8)header
copy(output.buf[written:], header_bytes[:])
written += size_of(image.TGA_Header)
/*
Encode loop starts here.
*/
if img.channels == 3 {
pix := mem.slice_data_cast([]RGB_Pixel, img.pixels.buf[:])
out := mem.slice_data_cast([]RGB_Pixel, output.buf[written:])
for p, i in pix {
out[i] = p.bgr
}
} else if img.channels == 4 {
pix := mem.slice_data_cast([]RGBA_Pixel, img.pixels.buf[:])
out := mem.slice_data_cast([]RGBA_Pixel, output.buf[written:])
for p, i in pix {
out[i] = p.bgra
}
}
return nil
}
save_to_file :: proc(output: string, img: ^Image, options := Options{}, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator
out := &bytes.Buffer{}
defer bytes.buffer_destroy(out)
save_to_memory(out, img, options) or_return
write_ok := os.write_entire_file(output, out.buf[:])
return nil if write_ok else .Unable_To_Write_File
}
save :: proc{save_to_memory, save_to_file}

179
core/image/which.odin Normal file
View File

@@ -0,0 +1,179 @@
package image
import "core:os"
Which_File_Type :: enum {
Unknown,
BMP,
DjVu, // AT&T DjVu file format
EXR,
FLIF,
GIF,
HDR, // Radiance RGBE HDR
ICNS, // Apple Icon Image
JPEG,
JPEG_2000,
JPEG_XL,
NetPBM, // NetPBM family
PIC, // Softimage PIC
PNG, // Portable Network Graphics
PSD, // Photoshop PSD
QOI, // Quite Okay Image
SGI_RGB, // Silicon Graphics Image RGB file format
Sun_Rast, // Sun Raster Graphic
TGA, // Targa Truevision
TIFF, // Tagged Image File Format
WebP,
XBM, // X BitMap
}
which :: proc{
which_bytes,
which_file,
}
which_bytes :: proc(data: []byte) -> Which_File_Type {
test_tga :: proc(s: string) -> bool {
get8 :: #force_inline proc(s: ^string) -> u8 {
v := s[0]
s^ = s[1:]
return v
}
get16le :: #force_inline proc(s: ^string) -> u16 {
v := u16(s[0]) | u16(s[1])<<16
s^ = s[2:]
return v
}
s := s
s = s[1:] // skip offset
color_type := get8(&s)
if color_type > 1 {
return false
}
image_type := get8(&s) // image type
if color_type == 1 { // Colormap (Paletted) Image
if image_type != 1 && image_type != 9 { // color type requires 1 or 9
return false
}
s = s[4:] // skip index of first colormap
bpcme := get8(&s) // check bits per colormap entry
if bpcme != 8 && bpcme != 15 && bpcme != 16 && bpcme != 24 && bpcme != 32 {
return false
}
s = s[4:] // skip image origin (x, y)
} else { // Normal image without colormap
if image_type != 2 && image_type != 3 && image_type != 10 && image_type != 11 {
return false
}
s = s[9:] // skip colormap specification
}
if get16le(&s) < 1 || get16le(&s) < 1 { // test width and height
return false
}
bpp := get8(&s) // bits per pixel
if color_type == 1 && bpp != 8 && bpp != 16 {
return false
}
if bpp != 8 && bpp != 15 && bpp != 16 && bpp != 24 && bpp != 32 {
return false
}
return true
}
header: [128]byte
copy(header[:], data)
s := string(header[:])
switch {
case s[:2] == "BM":
return .BMP
case s[:8] == "AT&TFORM":
switch s[12:16] {
case "DJVU", "DJVM":
return .DjVu
}
case s[:4] == "\x76\x2f\x31\x01":
return .EXR
case s[:6] == "GIF87a", s[:6] == "GIF89a":
return .GIF
case s[6:10] == "JFIF", s[6:10] == "Exif":
return .JPEG
case s[:3] == "\xff\xd8\xff":
switch s[4] {
case 0xdb, 0xee, 0xe1, 0xe0:
return .JPEG
}
switch {
case s[:12] == "\xff\xd8\xff\xe0\x00\x10\x4a\x46\x49\x46\x00\x01":
return .JPEG
}
case s[:4] == "\xff\x4f\xff\x51", s[:12] == "\x00\x00\x00\x0c\x6a\x50\x20\x20\x0d\x0a\x87\x0a":
return .JPEG_2000
case s[:12] == "\x00\x00\x00\x0c\x4a\x58\x4c\x20\x0d\x0a\x87\x0a":
return .JPEG_XL
case s[0] == 'P':
switch s[2] {
case '\t', '\n', '\r':
switch s[1] {
case '1', '4': // PBM
return .NetPBM
case '2', '5': // PGM
return .NetPBM
case '3', '6': // PPM
return .NetPBM
case '7': // PAM
return .NetPBM
case 'F', 'f': // PFM
return .NetPBM
}
}
case s[:8] == "\x89PNG\r\n\x1a\n":
return .PNG
case s[:4] == "qoif":
return .QOI
case s[:2] == "\x01\xda":
return .SGI_RGB
case s[:4] == "\x59\xA6\x6A\x95":
return .Sun_Rast
case s[:4] == "MM\x2a\x00", s[:4] == "II\x00\x2A":
return .TIFF
case s[:4] == "RIFF" && s[8:12] == "WEBP":
return .WebP
case s[:8] == "#define ":
return .XBM
case s[:11] == "#?RADIANCE\n", s[:7] == "#?RGBE\n":
return .HDR
case s[:4] == "\x38\x42\x50\x53":
return .PSD
case s[:4] != "\x53\x80\xF6\x34" && s[88:92] == "PICT":
return .PIC
case s[:4] == "\x69\x63\x6e\x73":
return .ICNS
case s[:4] == "\x46\x4c\x49\x46":
return .FLIF
case:
// More complex formats
if test_tga(s) {
return .TGA
}
}
return .Unknown
}
which_file :: proc(path: string) -> Which_File_Type {
f, err := os.open(path)
if err != 0 {
return .Unknown
}
header: [128]byte
os.read(f, header[:])
file_type := which_bytes(header[:])
os.close(f)
return file_type
}

View File

@@ -41,6 +41,10 @@ mem_copy_non_overlapping :: proc(dst, src: rawptr, len: int) ---
mem_zero :: proc(ptr: rawptr, len: int) ---
mem_zero_volatile :: proc(ptr: rawptr, len: int) ---
// prefer [^]T operations if possible
ptr_offset :: proc(ptr: ^$T, offset: int) -> ^T ---
ptr_sub :: proc(a, b: ^$T) -> int ---
unaligned_load :: proc(src: ^$T) -> T ---
unaligned_store :: proc(dst: ^$T, val: T) -> T ---
@@ -82,20 +86,21 @@ atomic_store_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) ---
atomic_load :: proc(dst: ^$T) -> T ---
atomic_load_explicit :: proc(dst: ^$T, order: Atomic_Memory_Order) -> T ---
atomic_add :: proc(dst; ^$T, val: T) -> T ---
atomic_add_explicit :: proc(dst; ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_sub :: proc(dst; ^$T, val: T) -> T ---
atomic_sub_explicit :: proc(dst; ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_and :: proc(dst; ^$T, val: T) -> T ---
atomic_and_explicit :: proc(dst; ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_nand :: proc(dst; ^$T, val: T) -> T ---
atomic_nand_explicit :: proc(dst; ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_or :: proc(dst; ^$T, val: T) -> T ---
atomic_or_explicit :: proc(dst; ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_xor :: proc(dst; ^$T, val: T) -> T ---
atomic_xor_explicit :: proc(dst; ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_exchange :: proc(dst; ^$T, val: T) -> T ---
atomic_exchange_explicit :: proc(dst; ^$T, val: T, order: Atomic_Memory_Order) -> T ---
// fetch then operator
atomic_add :: proc(dst: ^$T, val: T) -> T ---
atomic_add_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_sub :: proc(dst: ^$T, val: T) -> T ---
atomic_sub_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_and :: proc(dst: ^$T, val: T) -> T ---
atomic_and_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_nand :: proc(dst: ^$T, val: T) -> T ---
atomic_nand_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_or :: proc(dst: ^$T, val: T) -> T ---
atomic_or_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_xor :: proc(dst: ^$T, val: T) -> T ---
atomic_xor_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_exchange :: proc(dst: ^$T, val: T) -> T ---
atomic_exchange_explicit :: proc(dst: ^$T, val: T, order: Atomic_Memory_Order) -> T ---
atomic_compare_exchange_strong :: proc(dst: ^$T, old, new: T) -> (T, bool) #optional_ok ---
atomic_compare_exchange_strong_explicit :: proc(dst: ^$T, old, new: T, success, failure: Atomic_Memory_Order) -> (T, bool) #optional_ok ---
@@ -119,22 +124,24 @@ type_is_string :: proc($T: typeid) -> bool ---
type_is_typeid :: proc($T: typeid) -> bool ---
type_is_any :: proc($T: typeid) -> bool ---
type_is_endian_platform :: proc($T: typeid) -> bool ---
type_is_endian_little :: proc($T: typeid) -> bool ---
type_is_endian_big :: proc($T: typeid) -> bool ---
type_is_unsigned :: proc($T: typeid) -> bool ---
type_is_numeric :: proc($T: typeid) -> bool ---
type_is_ordered :: proc($T: typeid) -> bool ---
type_is_ordered_numeric :: proc($T: typeid) -> bool ---
type_is_indexable :: proc($T: typeid) -> bool ---
type_is_sliceable :: proc($T: typeid) -> bool ---
type_is_comparable :: proc($T: typeid) -> bool ---
type_is_simple_compare :: proc($T: typeid) -> bool --- // easily compared using memcmp (== and !=)
type_is_dereferenceable :: proc($T: typeid) -> bool ---
type_is_valid_map_key :: proc($T: typeid) -> bool ---
type_is_endian_platform :: proc($T: typeid) -> bool ---
type_is_endian_little :: proc($T: typeid) -> bool ---
type_is_endian_big :: proc($T: typeid) -> bool ---
type_is_unsigned :: proc($T: typeid) -> bool ---
type_is_numeric :: proc($T: typeid) -> bool ---
type_is_ordered :: proc($T: typeid) -> bool ---
type_is_ordered_numeric :: proc($T: typeid) -> bool ---
type_is_indexable :: proc($T: typeid) -> bool ---
type_is_sliceable :: proc($T: typeid) -> bool ---
type_is_comparable :: proc($T: typeid) -> bool ---
type_is_simple_compare :: proc($T: typeid) -> bool --- // easily compared using memcmp (== and !=)
type_is_dereferenceable :: proc($T: typeid) -> bool ---
type_is_valid_map_key :: proc($T: typeid) -> bool ---
type_is_valid_matrix_elements :: proc($T: typeid) -> bool ---
type_is_named :: proc($T: typeid) -> bool ---
type_is_pointer :: proc($T: typeid) -> bool ---
type_is_multi_pointer :: proc($T: typeid) -> bool ---
type_is_array :: proc($T: typeid) -> bool ---
type_is_enumerated_array :: proc($T: typeid) -> bool ---
type_is_slice :: proc($T: typeid) -> bool ---
@@ -146,6 +153,7 @@ type_is_enum :: proc($T: typeid) -> bool ---
type_is_proc :: proc($T: typeid) -> bool ---
type_is_bit_set :: proc($T: typeid) -> bool ---
type_is_simd_vector :: proc($T: typeid) -> bool ---
type_is_matrix :: proc($T: typeid) -> bool ---
type_has_nil :: proc($T: typeid) -> bool ---
@@ -153,6 +161,7 @@ type_is_specialization_of :: proc($T, $S: typeid) -> bool ---
type_is_variant_of :: proc($U, $V: typeid) -> bool where type_is_union(U) ---
type_has_field :: proc($T: typeid, $name: string) -> bool ---
type_field_type :: proc($T: typeid, $name: string) -> typeid ---
type_proc_parameter_count :: proc($T: typeid) -> int where type_is_proc(T) ---
type_proc_return_count :: proc($T: typeid) -> int where type_is_proc(T) ---
@@ -160,20 +169,41 @@ type_proc_return_count :: proc($T: typeid) -> int where type_is_proc(T) ---
type_proc_parameter_type :: proc($T: typeid, index: int) -> typeid where type_is_proc(T) ---
type_proc_return_type :: proc($T: typeid, index: int) -> typeid where type_is_proc(T) ---
type_struct_field_count :: proc($T: typeid) -> int where type_is_struct(T) ---
type_polymorphic_record_parameter_count :: proc($T: typeid) -> typeid ---
type_polymorphic_record_parameter_value :: proc($T: typeid, index: int) -> $V ---
type_is_specialized_polymorphic_record :: proc($T: typeid) -> bool ---
type_is_unspecialized_polymorphic_record :: proc($T: typeid) -> bool ---
type_is_subtype_of :: proc($T, $U: typeid) -> bool ---
type_field_index_of :: proc($T: typeid, $name: string) -> uintptr ---
type_equal_proc :: proc($T: typeid) -> (equal: proc "contextless" (rawptr, rawptr) -> bool) where type_is_comparable(T) ---
type_hasher_proc :: proc($T: typeid) -> (hasher: proc "contextless" (data: rawptr, seed: uintptr) -> uintptr) where type_is_comparable(T) ---
constant_utf16_cstring :: proc($literal: string) -> [^]u16 ---
// WASM targets only
wasm_memory_grow :: proc(index, delta: uintptr) -> int ---
wasm_memory_size :: proc(index: uintptr) -> int ---
// Darwin targets only
objc_object :: struct{}
objc_selector :: struct{}
objc_class :: struct{}
objc_id :: ^objc_object
objc_SEL :: ^objc_selector
objc_Class :: ^objc_class
objc_find_selector :: proc($name: string) -> objc_SEL ---
objc_register_selector :: proc($name: string) -> objc_SEL ---
objc_find_class :: proc($name: string) -> objc_Class ---
objc_register_class :: proc($name: string) -> objc_Class ---
// Internal compiler use only
__entry_point :: proc() ---

View File

@@ -4,7 +4,6 @@
package io
import "core:intrinsics"
import "core:runtime"
import "core:unicode/utf8"
// Seek whence values
@@ -254,11 +253,7 @@ read_at :: proc(r: Reader_At, p: []byte, offset: i64, n_read: ^int = nil) -> (n:
return 0, .Empty
}
curr_offset: i64
curr_offset, err = r->impl_seek(offset, .Current)
if err != nil {
return 0, err
}
curr_offset := r->impl_seek(offset, .Current) or_return
n, err = r->impl_read(p)
_, err1 := r->impl_seek(curr_offset, .Start)
@@ -552,7 +547,7 @@ _copy_buffer :: proc(dst: Writer, src: Reader, buf: []byte) -> (written: i64, er
}
}
// NOTE(bill): alloca is fine here
buf = transmute([]byte)runtime.Raw_Slice{intrinsics.alloca(size, 2*align_of(rawptr)), size}
buf = intrinsics.alloca(size, 2*align_of(rawptr))[:size]
}
for {
nr, er := read(src, buf)

View File

@@ -67,7 +67,7 @@ file_console_logger_proc :: proc(logger_data: rawptr, level: Level, text: string
h = data.file_handle
}
backing: [1024]byte //NOTE(Hoej): 1024 might be too much for a header backing, unless somebody has really long paths.
buf := strings.builder_from_slice(backing[:])
buf := strings.builder_from_bytes(backing[:])
do_level_header(options, level, &buf)

View File

@@ -479,21 +479,21 @@ angle_from_quaternion_f16 :: proc(q: Quaternionf16) -> f16 {
return math.asin(q.x*q.x + q.y*q.y + q.z*q.z) * 2
}
return math.cos(q.x) * 2
return math.acos(q.w) * 2
}
angle_from_quaternion_f32 :: proc(q: Quaternionf32) -> f32 {
if abs(q.w) > math.SQRT_THREE*0.5 {
return math.asin(q.x*q.x + q.y*q.y + q.z*q.z) * 2
}
return math.cos(q.x) * 2
return math.acos(q.w) * 2
}
angle_from_quaternion_f64 :: proc(q: Quaternionf64) -> f64 {
if abs(q.w) > math.SQRT_THREE*0.5 {
return math.asin(q.x*q.x + q.y*q.y + q.z*q.z) * 2
}
return math.cos(q.x) * 2
return math.acos(q.w) * 2
}
angle_from_quaternion :: proc{
angle_from_quaternion_f16,

View File

@@ -0,0 +1,312 @@
package rand
import "core:math"
float64_uniform :: float64_range
float32_uniform :: float32_range
// Triangular Distribution
// See: http://wikipedia.org/wiki/Triangular_distribution
float64_triangular :: proc(lo, hi: f64, mode: Maybe(f64), r: ^Rand = nil) -> f64 {
if hi-lo == 0 {
return lo
}
lo, hi := lo, hi
u := float64(r)
c := f64(0.5) if mode == nil else clamp((mode.?-lo) / (hi-lo), 0, 1)
if u > c {
u = 1-u
c = 1-c
lo, hi = hi, lo
}
return lo + (hi - lo) * math.sqrt(u * c)
}
// Triangular Distribution
// See: http://wikipedia.org/wiki/Triangular_distribution
float32_triangular :: proc(lo, hi: f32, mode: Maybe(f32), r: ^Rand = nil) -> f32 {
if hi-lo == 0 {
return lo
}
lo, hi := lo, hi
u := float32(r)
c := f32(0.5) if mode == nil else clamp((mode.?-lo) / (hi-lo), 0, 1)
if u > c {
u = 1-u
c = 1-c
lo, hi = hi, lo
}
return lo + (hi - lo) * math.sqrt(u * c)
}
// Normal/Gaussian Distribution
float64_normal :: proc(mean, stddev: f64, r: ^Rand = nil) -> f64 {
return norm_float64(r) * stddev + mean
}
// Normal/Gaussian Distribution
float32_normal :: proc(mean, stddev: f32, r: ^Rand = nil) -> f32 {
return f32(float64_normal(f64(mean), f64(stddev), r))
}
// Log Normal Distribution
float64_log_normal :: proc(mean, stddev: f64, r: ^Rand = nil) -> f64 {
return math.exp(float64_normal(mean, stddev, r))
}
// Log Normal Distribution
float32_log_normal :: proc(mean, stddev: f32, r: ^Rand = nil) -> f32 {
return f32(float64_log_normal(f64(mean), f64(stddev), r))
}
// Exponential Distribution
// `lambda` is 1.0/(desired mean). It should be non-zero.
// Return values range from
// 0 to positive infinity if lambda > 0
// negative infinity to 0 if lambda <= 0
float64_exponential :: proc(lambda: f64, r: ^Rand = nil) -> f64 {
return - math.ln(1 - float64(r)) / lambda
}
// Exponential Distribution
// `lambda` is 1.0/(desired mean). It should be non-zero.
// Return values range from
// 0 to positive infinity if lambda > 0
// negative infinity to 0 if lambda <= 0
float32_exponential :: proc(lambda: f32, r: ^Rand = nil) -> f32 {
return f32(float64_exponential(f64(lambda), r))
}
// Gamma Distribution (NOT THE GAMMA FUNCTION)
//
// Required: alpha > 0 and beta > 0
//
// math.pow(x, alpha-1) * math.exp(-x / beta)
// pdf(x) = --------------------------------------------
// math.gamma(alpha) * math.pow(beta, alpha)
//
// mean is alpha*beta, variance is math.pow(alpha*beta, 2)
float64_gamma :: proc(alpha, beta: f64, r: ^Rand = nil) -> f64 {
if alpha <= 0 || beta <= 0 {
panic(#procedure + ": alpha and beta must be > 0.0")
}
LOG4 :: 1.3862943611198906188344642429163531361510002687205105082413600189
SG_MAGIC_CONST :: 2.5040773967762740733732583523868748412194809812852436493487
switch {
case alpha > 1:
// R.C.H. Cheng, "The generation of Gamma variables with non-integral shape parameters", Applied Statistics, (1977), 26, No. 1, p71-74
ainv := math.sqrt(2 * alpha - 1)
bbb := alpha - LOG4
ccc := alpha + ainv
for {
u1 := float64(r)
if !(1e-7 < u1 && u1 < 0.9999999) {
continue
}
u2 := 1 - float64(r)
v := math.ln(u1 / (1 - u1)) / ainv
x := alpha * math.exp(v)
z := u1 * u1 * u2
t := bbb + ccc*v - x
if t + SG_MAGIC_CONST - 4.5 * z >= 0 || t >= math.ln(z) {
return x * beta
}
}
case alpha == 1:
// float64_exponential(1/beta)
return -math.ln(1 - float64(r)) * beta
case:
// ALGORITHM GS of Statistical Computing - Kennedy & Gentle
x: f64
for {
u := float64(r)
b := (math.e + alpha) / math.e
p := b * u
if p <= 1 {
x = math.pow(p, 1/alpha)
} else {
x = -math.ln((b - p) / alpha)
}
u1 := float64(r)
if p > 1 {
if u1 <= math.pow(x, alpha-1) {
break
}
} else if u1 <= math.exp(-x) {
break
}
}
return x * beta
}
}
// Gamma Distribution (NOT THE GAMMA FUNCTION)
//
// Required: alpha > 0 and beta > 0
//
// math.pow(x, alpha-1) * math.exp(-x / beta)
// pdf(x) = --------------------------------------------
// math.gamma(alpha) * math.pow(beta, alpha)
//
// mean is alpha*beta, variance is math.pow(alpha*beta, 2)
float32_gamma :: proc(alpha, beta: f32, r: ^Rand = nil) -> f32 {
return f32(float64_gamma(f64(alpha), f64(beta), r))
}
// Beta Distribution
//
// Required: alpha > 0 and beta > 0
//
// Return values range between 0 and 1
float64_beta :: proc(alpha, beta: f64, r: ^Rand = nil) -> f64 {
if alpha <= 0 || beta <= 0 {
panic(#procedure + ": alpha and beta must be > 0.0")
}
// Knuth Vol 2 Ed 3 pg 134 "the beta distribution"
y := float64_gamma(alpha, 1.0, r)
if y != 0 {
return y / (y + float64_gamma(beta, 1.0, r))
}
return 0
}
// Beta Distribution
//
// Required: alpha > 0 and beta > 0
//
// Return values range between 0 and 1
float32_beta :: proc(alpha, beta: f32, r: ^Rand = nil) -> f32 {
return f32(float64_beta(f64(alpha), f64(beta), r))
}
// Pareto distribution, `alpha` is the shape parameter.
// https://wikipedia.org/wiki/Pareto_distribution
float64_pareto :: proc(alpha: f64, r: ^Rand = nil) -> f64 {
return math.pow(1 - float64(r), -1.0 / alpha)
}
// Pareto distribution, `alpha` is the shape parameter.
// https://wikipedia.org/wiki/Pareto_distribution
float32_pareto :: proc(alpha, beta: f32, r: ^Rand = nil) -> f32 {
return f32(float64_pareto(f64(alpha), r))
}
// Weibull distribution, `alpha` is the scale parameter, `beta` is the shape parameter.
float64_weibull :: proc(alpha, beta: f64, r: ^Rand = nil) -> f64 {
u := 1 - float64(r)
return alpha * math.pow(-math.ln(u), 1.0/beta)
}
// Weibull distribution, `alpha` is the scale parameter, `beta` is the shape parameter.
float32_weibull :: proc(alpha, beta: f32, r: ^Rand = nil) -> f32 {
return f32(float64_weibull(f64(alpha), f64(beta), r))
}
// Circular Data (von Mises) Distribution
// `mean_angle` is the in mean angle between 0 and 2pi radians
// `kappa` is the concentration parameter which must be >= 0
// When `kappa` is zero, the Distribution is a uniform Distribution over the range 0 to 2pi
float64_von_mises :: proc(mean_angle, kappa: f64, r: ^Rand = nil) -> f64 {
// Fisher, N.I., "Statistical Analysis of Circular Data", Cambridge University Press, 1993.
mu := mean_angle
if kappa <= 1e-6 {
return math.TAU * float64(r)
}
s := 0.5 / kappa
t := s + math.sqrt(1 + s*s)
z: f64
for {
u1 := float64(r)
z = math.cos(math.TAU * 0.5 * u1)
d := z / (t + z)
u2 := float64(r)
if u2 < 1 - d*d || u2 <= (1-d)*math.exp(d) {
break
}
}
q := 1.0 / t
f := (q + z) / (1 + q*z)
u3 := float64(r)
if u3 > 0.5 {
return math.mod(mu + math.acos(f), math.TAU)
} else {
return math.mod(mu - math.acos(f), math.TAU)
}
}
// Circular Data (von Mises) Distribution
// `mean_angle` is the in mean angle between 0 and 2pi radians
// `kappa` is the concentration parameter which must be >= 0
// When `kappa` is zero, the Distribution is a uniform Distribution over the range 0 to 2pi
float32_von_mises :: proc(mean_angle, kappa: f32, r: ^Rand = nil) -> f32 {
return f32(float64_von_mises(f64(mean_angle), f64(kappa), r))
}
// Cauchy-Lorentz Distribution
// `x_0` is the location, `gamma` is the scale where `gamma` > 0
float64_cauchy_lorentz :: proc(x_0, gamma: f64, r: ^Rand = nil) -> f64 {
assert(gamma > 0)
// Calculated from the inverse CDF
return math.tan(math.PI * (float64(r) - 0.5))*gamma + x_0
}
// Cauchy-Lorentz Distribution
// `x_0` is the location, `gamma` is the scale where `gamma` > 0
float32_cauchy_lorentz :: proc(x_0, gamma: f32, r: ^Rand = nil) -> f32 {
return f32(float64_cauchy_lorentz(f64(x_0), f64(gamma), r))
}
// Log Cauchy-Lorentz Distribution
// `x_0` is the location, `gamma` is the scale where `gamma` > 0
float64_log_cauchy_lorentz :: proc(x_0, gamma: f64, r: ^Rand = nil) -> f64 {
assert(gamma > 0)
return math.exp(math.tan(math.PI * (float64(r) - 0.5))*gamma + x_0)
}
// Log Cauchy-Lorentz Distribution
// `x_0` is the location, `gamma` is the scale where `gamma` > 0
float32_log_cauchy_lorentz :: proc(x_0, gamma: f32, r: ^Rand = nil) -> f32 {
return f32(float64_log_cauchy_lorentz(f64(x_0), f64(gamma), r))
}
// Laplace Distribution
// `b` is the scale where `b` > 0
float64_laplace :: proc(mean, b: f64, r: ^Rand = nil) -> f64 {
assert(b > 0)
p := float64(r)-0.5
return -math.sign(p)*math.ln(1 - 2*abs(p))*b + mean
}
// Laplace Distribution
// `b` is the scale where `b` > 0
float32_laplace :: proc(mean, b: f32, r: ^Rand = nil) -> f32 {
return f32(float64_laplace(f64(mean), f64(b), r))
}
// Gompertz Distribution
// `eta` is the shape, `b` is the scale
// Both `eta` and `b` must be > 0
float64_gompertz :: proc(eta, b: f64, r: ^Rand = nil) -> f64 {
if eta <= 0 || b <= 0 {
panic(#procedure + ": eta and b must be > 0.0")
}
p := float64(r)
return math.ln(1 - math.ln(1 - p)/eta)/b
}
// Gompertz Distribution
// `eta` is the shape, `b` is the scale
// Both `eta` and `b` must be > 0
float32_gompertz :: proc(eta, b: f32, r: ^Rand = nil) -> f32 {
return f32(float64_gompertz(f64(eta), f64(b), r))
}

View File

@@ -5,6 +5,7 @@ import "core:intrinsics"
Rand :: struct {
state: u64,
inc: u64,
is_system: bool,
}
@@ -29,6 +30,16 @@ init :: proc(r: ^Rand, seed: u64) {
_random(r)
}
init_as_system :: proc(r: ^Rand) {
if !#defined(_system_random) {
panic(#procedure + " is not supported on this platform yet")
}
r.state = 0
r.inc = 0
r.is_system = true
}
@(private)
_random :: proc(r: ^Rand) -> u32 {
r := r
if r == nil {
@@ -36,6 +47,12 @@ _random :: proc(r: ^Rand) -> u32 {
// enforce the global random state if necessary with `nil`
r = &global_rand
}
when #defined(_system_random) {
if r.is_system {
return _system_random()
}
}
old_state := r.state
r.state = old_state * 6364136223846793005 + (r.inc|1)
xor_shifted := u32(((old_state>>18) ~ old_state) >> 27)
@@ -119,13 +136,14 @@ int_max :: proc(n: int, r: ^Rand = nil) -> int {
}
}
// Uniform random distribution [0, 1)
float64 :: proc(r: ^Rand = nil) -> f64 { return f64(int63_max(1<<53, r)) / (1 << 53) }
// Uniform random distribution [0, 1)
float32 :: proc(r: ^Rand = nil) -> f32 { return f32(float64(r)) }
float64_range :: proc(lo, hi: f64, r: ^Rand = nil) -> f64 { return (hi-lo)*float64(r) + lo }
float32_range :: proc(lo, hi: f32, r: ^Rand = nil) -> f32 { return (hi-lo)*float32(r) + lo }
read :: proc(p: []byte, r: ^Rand = nil) -> (n: int) {
pos := i8(0)
val := i64(0)

View File

@@ -0,0 +1,21 @@
package rand
import "core:sys/darwin"
_system_random :: proc() -> u32 {
for {
value: u32
ret := darwin.syscall_getentropy(([^]u8)(&value), 4)
if ret < 0 {
switch ret {
case -4: // EINTR
continue
case -78: // ENOSYS
panic("getentropy not available in kernel")
case:
panic("getentropy failed")
}
}
return value
}
}

View File

@@ -0,0 +1,27 @@
package rand
import "core:sys/unix"
_system_random :: proc() -> u32 {
for {
value: u32
ret := unix.sys_getrandom(([^]u8)(&value), 4, 0)
if ret < 0 {
switch ret {
case -4: // EINTR
// Call interupted by a signal handler, just retry the request.
continue
case -38: // ENOSYS
// The kernel is apparently prehistoric (< 3.17 circa 2014)
// and does not support getrandom.
panic("getrandom not available in kernel")
case:
// All other failures are things that should NEVER happen
// unless the kernel interface changes (ie: the Linux
// developers break userland).
panic("getrandom failed")
}
}
return value
}
}

View File

@@ -0,0 +1,12 @@
package rand
import win32 "core:sys/windows"
_system_random :: proc() -> u32 {
value: u32
status := win32.BCryptGenRandom(nil, ([^]u8)(&value), 4, win32.BCRYPT_USE_SYSTEM_PREFERRED_RNG)
if status < 0 {
panic("BCryptGenRandom failed")
}
return value
}

View File

@@ -6,24 +6,7 @@ import "core:runtime"
nil_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
size, alignment: int,
old_memory: rawptr, old_size: int, loc := #caller_location) -> ([]byte, Allocator_Error) {
switch mode {
case .Alloc:
return nil, .Out_Of_Memory
case .Free:
return nil, .None
case .Free_All:
return nil, .Mode_Not_Implemented
case .Resize:
if size == 0 {
return nil, .None
}
return nil, .Out_Of_Memory
case .Query_Features:
return nil, .Mode_Not_Implemented
case .Query_Info:
return nil, .Mode_Not_Implemented
}
return nil, .None
return nil, nil
}
nil_allocator :: proc() -> Allocator {
@@ -679,6 +662,7 @@ dynamic_pool_destroy :: proc(using pool: ^Dynamic_Pool) {
dynamic_pool_free_all(pool)
delete(unused_blocks)
delete(used_blocks)
delete(out_band_allocations)
zero(pool, size_of(pool^))
}
@@ -763,6 +747,8 @@ dynamic_pool_reset :: proc(using pool: ^Dynamic_Pool) {
free(a, block_allocator)
}
clear(&out_band_allocations)
bytes_left = 0 // Make new allocations call `cycle_new_block` again.
}
dynamic_pool_free_all :: proc(using pool: ^Dynamic_Pool) {
@@ -872,7 +858,7 @@ tracking_allocator_proc :: proc(allocator_data: rawptr, mode: Allocator_Mode,
result: []byte
err: Allocator_Error
if mode == .Free && old_memory not_in data.allocation_map {
if mode == .Free && old_memory != nil && old_memory not_in data.allocation_map {
append(&data.bad_free_array, Tracking_Allocator_Bad_Free_Entry{
memory = old_memory,
location = loc,

View File

@@ -3,6 +3,12 @@ package mem
import "core:runtime"
import "core:intrinsics"
Byte :: 1
Kilobyte :: 1024 * Byte
Megabyte :: 1024 * Kilobyte
Gigabyte :: 1024 * Megabyte
Terabyte :: 1024 * Gigabyte
set :: proc "contextless" (data: rawptr, value: byte, len: int) -> rawptr {
return runtime.memset(data, i32(value), len)
}
@@ -166,7 +172,7 @@ slice_data_cast :: proc "contextless" ($T: typeid/[]$A, slice: $S/[]$B) -> T {
slice_to_components :: proc "contextless" (slice: $E/[]$T) -> (data: ^T, len: int) {
s := transmute(Raw_Slice)slice
return s.data, s.len
return (^T)(s.data), s.len
}
buffer_from_slice :: proc "contextless" (backing: $T/[]$E) -> [dynamic]E {
@@ -192,11 +198,6 @@ any_to_bytes :: proc "contextless" (val: any) -> []byte {
}
kilobytes :: proc "contextless" (x: int) -> int { return (x) * 1024 }
megabytes :: proc "contextless" (x: int) -> int { return kilobytes(x) * 1024 }
gigabytes :: proc "contextless" (x: int) -> int { return megabytes(x) * 1024 }
terabytes :: proc "contextless" (x: int) -> int { return gigabytes(x) * 1024 }
is_power_of_two :: proc "contextless" (x: uintptr) -> bool {
if x <= 0 {
return false

View File

@@ -120,7 +120,7 @@ alloc_from_memory_block :: proc(block: ^Memory_Block, min_size, alignment: int)
do_commit_if_necessary :: proc(block: ^Memory_Block, size: uint) -> (err: Allocator_Error) {
if block.committed - block.used < size {
pmblock := (^Platform_Memory_Block)(block)
base_offset := uint(uintptr(block) - uintptr(pmblock))
base_offset := uint(uintptr(pmblock.block.base) - uintptr(pmblock))
platform_total_commit := base_offset + block.used + size
assert(pmblock.committed <= pmblock.reserved)

View File

@@ -151,7 +151,7 @@ print :: proc(p: ^Printer, file: ^ast.File) -> string {
fix_lines(p)
builder := strings.make_builder(0, mem.megabytes(5), p.allocator)
builder := strings.make_builder(0, 5 * mem.Megabyte, p.allocator)
last_line := 0

View File

@@ -13,7 +13,7 @@ read_dir :: proc(fd: Handle, n: int, allocator := context.allocator) -> (fi: []F
if d.cFileName[0] == '.' && d.cFileName[1] == '.' && d.cFileName[2] == 0 {
return
}
path := strings.concatenate({base_path, `\`, win32.utf16_to_utf8(d.cFileName[:])})
path := strings.concatenate({base_path, `\`, win32.utf16_to_utf8(d.cFileName[:]) or_else ""})
fi.fullpath = path
fi.name = basename(path)
fi.size = i64(d.nFileSizeHigh)<<32 + i64(d.nFileSizeLow)

View File

@@ -22,7 +22,7 @@ lookup_env :: proc(key: string, allocator := context.allocator) -> (value: strin
}
if n <= u32(len(b)) {
value = win32.utf16_to_utf8(b[:n], allocator)
value, _ = win32.utf16_to_utf8(b[:n], allocator)
found = true
return
}
@@ -76,7 +76,7 @@ environ :: proc(allocator := context.allocator) -> []string {
if i <= from {
break
}
append(&r, win32.utf16_to_utf8(envs[from:i], allocator))
append(&r, win32.utf16_to_utf8(envs[from:i], allocator) or_else "")
from = i + 1
}
}

View File

@@ -365,7 +365,7 @@ get_current_directory :: proc(allocator := context.allocator) -> string {
win32.ReleaseSRWLockExclusive(&cwd_lock)
return win32.utf16_to_utf8(dir_buf_wstr, allocator)
return win32.utf16_to_utf8(dir_buf_wstr, allocator) or_else ""
}
set_current_directory :: proc(path: string) -> (err: Errno) {

View File

@@ -9,6 +9,10 @@ OS :: ODIN_OS
ARCH :: ODIN_ARCH
ENDIAN :: ODIN_ENDIAN
SEEK_SET :: 0
SEEK_CUR :: 1
SEEK_END :: 2
write_string :: proc(fd: Handle, str: string) -> (int, Errno) {
return write(fd, transmute([]byte)str)
}

View File

@@ -1,33 +1,35 @@
//+private
package os2
//import "core:runtime"
//import "core:mem"
import win32 "core:sys/windows"
import "core:runtime"
_get_env :: proc(key: string, allocator := context.allocator) -> (value: string, found: bool) {
_lookup_env :: proc(key: string, allocator: runtime.Allocator) -> (value: string, found: bool) {
if key == "" {
return
}
wkey := win32.utf8_to_wstring(key)
// https://docs.microsoft.com/en-us/windows/win32/api/processenv/nf-processenv-getenvironmentvariablew
buf_len := win32.GetEnvironmentVariableW(wkey, nil, 0)
if buf_len == 0 {
return
}
buf := make([dynamic]u16, buf_len, context.temp_allocator)
n := win32.GetEnvironmentVariableW(wkey, raw_data(buf), buf_len)
n := win32.GetEnvironmentVariableW(wkey, nil, 0)
if n == 0 {
if win32.GetLastError() == win32.ERROR_ENVVAR_NOT_FOUND {
err := win32.GetLastError()
if err == win32.ERROR_ENVVAR_NOT_FOUND {
return "", false
}
value = ""
found = true
return
return "", true
}
b := make([]u16, n+1, _temp_allocator())
n = win32.GetEnvironmentVariableW(wkey, raw_data(b), u32(len(b)))
if n == 0 {
err := win32.GetLastError()
if err == win32.ERROR_ENVVAR_NOT_FOUND {
return "", false
}
return "", false
}
value = win32.utf16_to_utf8(buf[:n], allocator)
value = win32.utf16_to_utf8(b[:n], allocator) or_else ""
found = true
return
}
@@ -36,21 +38,18 @@ _set_env :: proc(key, value: string) -> bool {
k := win32.utf8_to_wstring(key)
v := win32.utf8_to_wstring(value)
// https://docs.microsoft.com/en-us/windows/win32/api/processenv/nf-processenv-setenvironmentvariablew
return bool(win32.SetEnvironmentVariableW(k, v))
}
_unset_env :: proc(key: string) -> bool {
k := win32.utf8_to_wstring(key)
// https://docs.microsoft.com/en-us/windows/win32/api/processenv/nf-processenv-setenvironmentvariablew
return bool(win32.SetEnvironmentVariableW(k, nil))
}
_clear_env :: proc() {
envs := environ(context.temp_allocator)
envs := environ(_temp_allocator())
for env in envs {
#no_bounds_check for j in 1..<len(env) {
for j in 1..<len(env) {
if env[j] == '=' {
unset_env(env[0:j])
break
@@ -59,34 +58,23 @@ _clear_env :: proc() {
}
}
_environ :: proc(allocator := context.allocator) -> []string {
envs := ([^]u16)(win32.GetEnvironmentStringsW())
_environ :: proc(allocator: runtime.Allocator) -> []string {
envs := win32.GetEnvironmentStringsW()
if envs == nil {
return nil
}
defer win32.FreeEnvironmentStringsW(envs)
length := 0
n := 0
count_loop: for {
if envs[length] == 0 {
n += 1
if envs[length+1] == 0 {
break count_loop
}
}
length += 1
}
r := make([dynamic]string, 0, n, allocator)
for offset, i := 0, 0; i < length && len(r) < n; i += 1 {
c := envs[i]
r := make([dynamic]string, 0, 50, allocator)
for from, i, p := 0, 0, envs; true; i += 1 {
c := ([^]u16)(p)[i]
if c == 0 {
wstr := envs[offset:i]
append(&r, win32.utf16_to_utf8(wstr, allocator))
i += 1
offset = i
if i <= from {
break
}
w := ([^]u16)(p)[from:i]
append(&r, win32.utf16_to_utf8(w, allocator) or_else "")
from = i + 1
}
}

View File

@@ -1,9 +1,10 @@
package os2
import "core:io"
import "core:runtime"
General_Error :: enum u32 {
Invalid_Argument,
None,
Permission_Denied,
Exist,
@@ -11,79 +12,78 @@ General_Error :: enum u32 {
Closed,
Timeout,
Invalid_File,
Invalid_Dir,
Invalid_Path,
Unsupported,
}
Platform_Error :: struct {
err: i32,
}
Platform_Error :: enum i32 {None=0}
Error :: union {
Error :: union #shared_nil {
General_Error,
io.Error,
runtime.Allocator_Error,
Platform_Error,
}
#assert(size_of(Error) == size_of(u64))
Link_Error :: struct {
op: string,
old: string,
new: string,
err: Error,
}
link_error_delete :: proc(lerr: Maybe(Link_Error)) {
if err, ok := lerr.?; ok {
context.allocator = error_allocator()
delete(err.op)
delete(err.old)
delete(err.new)
}
}
is_platform_error :: proc(ferr: Error) -> (err: i32, ok: bool) {
v := ferr.(Platform_Error) or_else {}
return v.err, v.err != 0
return i32(v), i32(v) != 0
}
error_string :: proc(ferr: Error) -> string {
@static general_error_strings := [General_Error]string{
.Invalid_Argument = "invalid argument",
.Permission_Denied = "permission denied",
.Exist = "file already exists",
.Not_Exist = "file does not exist",
.Closed = "file already closed",
.Timeout = "i/o timeout",
}
@static io_error_strings := [io.Error]string{
.None = "",
.EOF = "eof",
.Unexpected_EOF = "unexpected eof",
.Short_Write = "short write",
.Invalid_Write = "invalid write result",
.Short_Buffer = "short buffer",
.No_Progress = "multiple read calls return no data or error",
.Invalid_Whence = "invalid whence",
.Invalid_Offset = "invalid offset",
.Invalid_Unread = "invalid unread",
.Negative_Read = "negative read",
.Negative_Write = "negative write",
.Negative_Count = "negative count",
.Buffer_Full = "buffer full",
.Unknown = "unknown i/o error",
.Empty = "empty i/o error",
}
if ferr == nil {
return ""
}
switch err in ferr {
case General_Error: return general_error_strings[err]
case io.Error: return io_error_strings[err]
case Platform_Error: return _error_string(err.err)
switch e in ferr {
case General_Error:
switch e {
case .None: return ""
case .Permission_Denied: return "permission denied"
case .Exist: return "file already exists"
case .Not_Exist: return "file does not exist"
case .Closed: return "file already closed"
case .Timeout: return "i/o timeout"
case .Invalid_File: return "invalid file"
case .Invalid_Dir: return "invalid directory"
case .Invalid_Path: return "invalid path"
case .Unsupported: return "unsupported"
}
case io.Error:
switch e {
case .None: return ""
case .EOF: return "eof"
case .Unexpected_EOF: return "unexpected eof"
case .Short_Write: return "short write"
case .Invalid_Write: return "invalid write result"
case .Short_Buffer: return "short buffer"
case .No_Progress: return "multiple read calls return no data or error"
case .Invalid_Whence: return "invalid whence"
case .Invalid_Offset: return "invalid offset"
case .Invalid_Unread: return "invalid unread"
case .Negative_Read: return "negative read"
case .Negative_Write: return "negative write"
case .Negative_Count: return "negative count"
case .Buffer_Full: return "buffer full"
case .Unknown, .Empty: //
}
case runtime.Allocator_Error:
switch e {
case .None: return ""
case .Out_Of_Memory: return "out of memory"
case .Invalid_Pointer: return "invalid allocator pointer"
case .Invalid_Argument: return "invalid allocator argument"
case .Mode_Not_Implemented: return "allocator mode not implemented"
}
case Platform_Error:
return _error_string(i32(e))
}
return "unknown error"

View File

@@ -130,7 +130,7 @@ EHWPOISON :: 133 /* Memory page has hardware error */
_get_platform_error :: proc(res: int) -> Error {
errno := unix.get_errno(res)
return Platform_Error{i32(errno)}
return Platform_Error(i32(errno))
}
_ok_or_error :: proc(res: int) -> Error {

View File

@@ -12,3 +12,49 @@ _error_string :: proc(errno: i32) -> string {
// FormatMessageW
return ""
}
_get_platform_error :: proc() -> Error {
err := win32.GetLastError()
if err == 0 {
return nil
}
switch err {
case win32.ERROR_ACCESS_DENIED, win32.ERROR_SHARING_VIOLATION:
return .Permission_Denied
case win32.ERROR_FILE_EXISTS, win32.ERROR_ALREADY_EXISTS:
return .Exist
case win32.ERROR_FILE_NOT_FOUND, win32.ERROR_PATH_NOT_FOUND:
return .Not_Exist
case win32.ERROR_NO_DATA:
return .Closed
case win32.ERROR_TIMEOUT, win32.WAIT_TIMEOUT:
return .Timeout
case win32.ERROR_NOT_SUPPORTED:
return .Unsupported
case
win32.ERROR_BAD_ARGUMENTS,
win32.ERROR_INVALID_PARAMETER,
win32.ERROR_NOT_ENOUGH_MEMORY,
win32.ERROR_INVALID_HANDLE,
win32.ERROR_NO_MORE_FILES,
win32.ERROR_LOCK_VIOLATION,
win32.ERROR_HANDLE_EOF,
win32.ERROR_BROKEN_PIPE,
win32.ERROR_CALL_NOT_IMPLEMENTED,
win32.ERROR_INSUFFICIENT_BUFFER,
win32.ERROR_INVALID_NAME,
win32.ERROR_LOCK_FAILED,
win32.ERROR_ENVVAR_NOT_FOUND,
win32.ERROR_OPERATION_ABORTED,
win32.ERROR_IO_PENDING,
win32.ERROR_NO_UNICODE_TRANSLATION:
// fallthrough
}
return Platform_Error(err)
}

View File

@@ -2,10 +2,11 @@ package os2
import "core:io"
import "core:time"
import "core:runtime"
Handle :: distinct uintptr
INVALID_HANDLE :: ~Handle(0)
File :: struct {
impl: _File,
}
Seek_From :: enum {
Start = 0, // seek relative to the origin of the file
@@ -20,106 +21,109 @@ File_Mode_Device :: File_Mode(1<<18)
File_Mode_Char_Device :: File_Mode(1<<19)
File_Mode_Sym_Link :: File_Mode(1<<20)
File_Mode_Perm :: File_Mode(0o777) // Unix permision bits
File_Flag :: enum u32 {
Read = 0,
Write = 1,
Append = 2,
Create = 3,
Excl = 4,
Sync = 5,
Trunc = 6,
Close_On_Exec = 7,
File_Flags :: distinct bit_set[File_Flag; uint]
File_Flag :: enum {
Read,
Write,
Append,
Create,
Excl,
Sync,
Trunc,
Sparse,
Close_On_Exec,
Unbuffered_IO,
}
File_Flags :: distinct bit_set[File_Flag; u32]
O_RDONLY :: File_Flags{.Read}
O_WRONLY :: File_Flags{.Write}
O_RDWR :: File_Flags{.Read, .Write}
O_APPEND :: File_Flags{.Append}
O_CREATE :: File_Flags{.Create}
O_EXCL :: File_Flags{.Excl}
O_SYNC :: File_Flags{.Sync}
O_TRUNC :: File_Flags{.Trunc}
O_RDONLY :: File_Flags{.Read}
O_WRONLY :: File_Flags{.Write}
O_RDWR :: File_Flags{.Read, .Write}
O_APPEND :: File_Flags{.Append}
O_CREATE :: File_Flags{.Create}
O_EXCL :: File_Flags{.Excl}
O_SYNC :: File_Flags{.Sync}
O_TRUNC :: File_Flags{.Trunc}
O_SPARSE :: File_Flags{.Sparse}
O_CLOEXEC :: File_Flags{.Close_On_Exec}
Std_Handle_Kind :: enum u8 {
stdin = 0,
stdout = 1,
stderr = 2,
stdin: ^File = nil // OS-Specific
stdout: ^File = nil // OS-Specific
stderr: ^File = nil // OS-Specific
create :: proc(name: string) -> (^File, Error) {
return open(name, {.Read, .Write, .Create}, File_Mode(0o777))
}
stdin: Handle = std_handle(.stdin)
stdout: Handle = std_handle(.stdout)
stderr: Handle = std_handle(.stderr)
std_handle :: proc(kind: Std_Handle_Kind) -> Handle {
return _std_handle(kind)
}
create :: proc(name: string, perm: File_Mode = 0) -> (Handle, Error) {
return open(name, {.Read, .Write, .Create}, perm)
}
open :: proc(name: string, flags := File_Flags{.Read}, perm: File_Mode = 0) -> (Handle, Error) {
flags := flags
if .Write not_in flags {
flags += {.Read}
}
open :: proc(name: string, flags := File_Flags{.Read}, perm := File_Mode(0o777)) -> (^File, Error) {
return _open(name, flags, perm)
}
close :: proc(fd: Handle) -> Error {
return _close(fd)
new_file :: proc(handle: uintptr, name: string) -> ^File {
return _new_file(handle, name)
}
name :: proc(fd: Handle, allocator := context.allocator) -> string {
return _name(fd)
}
seek :: proc(fd: Handle, offset: i64, whence: Seek_From) -> (ret: i64, err: Error) {
return _seek(fd, offset, whence)
}
read :: proc(fd: Handle, p: []byte) -> (n: int, err: Error) {
return _read(fd, p)
}
read_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
return _read_at(fd, p, offset)
}
read_from :: proc(fd: Handle, r: io.Reader) -> (n: i64, err: Error) {
return _read_from(fd, r)
}
write :: proc(fd: Handle, p: []byte) -> (n: int, err: Error) {
return _write(fd, p)
}
write_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
return _write_at(fd, p, offset)
}
write_to :: proc(fd: Handle, w: io.Writer) -> (n: i64, err: Error) {
return _write_to(fd, w)
}
file_size :: proc(fd: Handle) -> (n: i64, err: Error) {
return _file_size(fd)
fd :: proc(f: ^File) -> uintptr {
return _fd(f)
}
sync :: proc(fd: Handle) -> Error {
return _sync(fd)
close :: proc(f: ^File) -> Error {
return _close(f)
}
flush :: proc(fd: Handle) -> Error {
return _flush(fd)
name :: proc(f: ^File) -> string {
return _name(f)
}
truncate :: proc(fd: Handle, size: i64) -> Error {
return _truncate(fd, size)
seek :: proc(f: ^File, offset: i64, whence: Seek_From) -> (ret: i64, err: Error) {
return _seek(f, offset, whence)
}
read :: proc(f: ^File, p: []byte) -> (n: int, err: Error) {
return _read(f, p)
}
read_at :: proc(f: ^File, p: []byte, offset: i64) -> (n: int, err: Error) {
return _read_at(f, p, offset)
}
read_from :: proc(f: ^File, r: io.Reader) -> (n: i64, err: Error) {
return _read_from(f, r)
}
write :: proc(f: ^File, p: []byte) -> (n: int, err: Error) {
return _write(f, p)
}
write_at :: proc(f: ^File, p: []byte, offset: i64) -> (n: int, err: Error) {
return _write_at(f, p, offset)
}
write_to :: proc(f: ^File, w: io.Writer) -> (n: i64, err: Error) {
return _write_to(f, w)
}
file_size :: proc(f: ^File) -> (n: i64, err: Error) {
return _file_size(f)
}
sync :: proc(f: ^File) -> Error {
return _sync(f)
}
flush :: proc(f: ^File) -> Error {
return _flush(f)
}
truncate :: proc(f: ^File, size: i64) -> Error {
return _truncate(f, size)
}
remove :: proc(name: string) -> Error {
@@ -139,28 +143,36 @@ symlink :: proc(old_name, new_name: string) -> Error {
return _symlink(old_name, new_name)
}
read_link :: proc(name: string) -> (string, Error) {
return _read_link(name)
}
unlink :: proc(path: string) -> Error {
return _unlink(path)
read_link :: proc(name: string, allocator: runtime.Allocator) -> (string, Error) {
return _read_link(name,allocator)
}
chdir :: proc(fd: Handle) -> Error {
return _chdir(fd)
chdir :: proc(name: string) -> Error {
return _chdir(name)
}
chmod :: proc(fd: Handle, mode: File_Mode) -> Error {
return _chmod(fd, mode)
chmod :: proc(name: string, mode: File_Mode) -> Error {
return _chmod(name, mode)
}
chown :: proc(fd: Handle, uid, gid: int) -> Error {
return _chown(fd, uid, gid)
chown :: proc(name: string, uid, gid: int) -> Error {
return _chown(name, uid, gid)
}
fchdir :: proc(f: ^File) -> Error {
return _fchdir(f)
}
fchmod :: proc(f: ^File, mode: File_Mode) -> Error {
return _fchmod(f, mode)
}
fchown :: proc(f: ^File, uid, gid: int) -> Error {
return _fchown(f, uid, gid)
}
lchown :: proc(name: string, uid, gid: int) -> Error {
return _lchown(name, uid, gid)
@@ -170,16 +182,36 @@ lchown :: proc(name: string, uid, gid: int) -> Error {
chtimes :: proc(name: string, atime, mtime: time.Time) -> Error {
return _chtimes(name, atime, mtime)
}
fchtimes :: proc(f: ^File, atime, mtime: time.Time) -> Error {
return _fchtimes(f, atime, mtime)
}
exists :: proc(path: string) -> bool {
return _exists(path)
}
is_file :: proc(fd: Handle) -> bool {
return _is_file(fd)
is_file :: proc(path: string) -> bool {
return _is_file(path)
}
is_dir :: proc(fd: Handle) -> bool {
return _is_dir(fd)
is_dir :: proc(path: string) -> bool {
return _is_dir(path)
}
copy_file :: proc(dst_path, src_path: string) -> Error {
src := open(src_path) or_return
defer close(src)
info := fstat(src, _file_allocator()) or_return
defer file_info_delete(info, _file_allocator())
if info.is_dir {
return .Invalid_File
}
dst := open(dst_path, {.Read, .Write, .Create, .Trunc}, info.mode & File_Mode_Perm) or_return
defer close(dst)
_, err := io.copy(to_writer(dst), to_reader(src))
return err
}

View File

@@ -4,13 +4,10 @@ package os2
import "core:io"
import "core:time"
import "core:strings"
import "core:strconv"
import "core:runtime"
import "core:sys/unix"
_std_handle :: proc(kind: Std_Handle_Kind) -> Handle {
return Handle(kind)
}
INVALID_HANDLE :: -1
_O_RDONLY :: 0o0
_O_WRONLY :: 0o1
@@ -31,7 +28,17 @@ _AT_FDCWD :: -100
_CSTRING_NAME_HEAP_THRESHOLD :: 512
_open :: proc(name: string, flags: File_Flags, perm: File_Mode) -> (Handle, Error) {
_File :: struct {
name: string,
fd: int,
allocator: runtime.Allocator,
}
_file_allocator :: proc() -> runtime.Allocator {
return heap_allocator()
}
_open :: proc(name: string, flags: File_Flags, perm: File_Mode) -> (^File, Error) {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
delete(name_cstr)
@@ -51,63 +58,75 @@ _open :: proc(name: string, flags: File_Flags, perm: File_Mode) -> (Handle, Erro
flags_i |= (_O_TRUNC * int(.Trunc in flags))
flags_i |= (_O_CLOEXEC * int(.Close_On_Exec in flags))
handle_i := unix.sys_open(name_cstr, flags_i, int(perm))
if handle_i < 0 {
return INVALID_HANDLE, _get_platform_error(handle_i)
fd := unix.sys_open(name_cstr, flags_i, int(perm))
if fd < 0 {
return nil, _get_platform_error(fd)
}
return Handle(handle_i), nil
return _new_file(uintptr(fd), name), nil
}
_close :: proc(fd: Handle) -> Error {
res := unix.sys_close(int(fd))
_new_file :: proc(fd: uintptr, _: string) -> ^File {
file := new(File, _file_allocator())
file.impl.fd = int(fd)
file.impl.allocator = _file_allocator()
file.impl.name = _get_full_path(file.impl.fd, file.impl.allocator)
return file
}
_destroy :: proc(f: ^File) -> Error {
if f == nil {
return nil
}
delete(f.impl.name, f.impl.allocator)
free(f, f.impl.allocator)
return nil
}
_close :: proc(f: ^File) -> Error {
res := unix.sys_close(f.impl.fd)
return _ok_or_error(res)
}
_name :: proc(fd: Handle, allocator := context.allocator) -> string {
// NOTE: Not sure how portable this really is
PROC_FD_PATH :: "/proc/self/fd/"
buf: [32]u8
copy(buf[:], PROC_FD_PATH)
strconv.itoa(buf[len(PROC_FD_PATH):], int(fd))
realpath: string
err: Error
if realpath, err = _read_link_cstr(cstring(&buf[0]), allocator); err != nil || realpath[0] != '/' {
return ""
_fd :: proc(f: ^File) -> uintptr {
if f == nil {
return ~uintptr(0)
}
return realpath
return uintptr(f.impl.fd)
}
_seek :: proc(fd: Handle, offset: i64, whence: Seek_From) -> (ret: i64, err: Error) {
res := unix.sys_lseek(int(fd), offset, int(whence))
_name :: proc(f: ^File) -> string {
return f.impl.name if f != nil else ""
}
_seek :: proc(f: ^File, offset: i64, whence: Seek_From) -> (ret: i64, err: Error) {
res := unix.sys_lseek(f.impl.fd, offset, int(whence))
if res < 0 {
return -1, _get_platform_error(int(res))
}
return res, nil
}
_read :: proc(fd: Handle, p: []byte) -> (n: int, err: Error) {
_read :: proc(f: ^File, p: []byte) -> (n: int, err: Error) {
if len(p) == 0 {
return 0, nil
}
n = unix.sys_read(int(fd), &p[0], len(p))
n = unix.sys_read(f.impl.fd, &p[0], len(p))
if n < 0 {
return -1, _get_platform_error(int(unix.get_errno(n)))
return -1, _get_platform_error(n)
}
return n, nil
}
_read_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
_read_at :: proc(f: ^File, p: []byte, offset: i64) -> (n: int, err: Error) {
if offset < 0 {
return 0, .Invalid_Offset
}
b, offset := p, offset
for len(b) > 0 {
m := unix.sys_pread(int(fd), &b[0], len(b), offset)
m := unix.sys_pread(f.impl.fd, &b[0], len(b), offset)
if m < 0 {
return -1, _get_platform_error(m)
}
@@ -118,30 +137,30 @@ _read_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
return
}
_read_from :: proc(fd: Handle, r: io.Reader) -> (n: i64, err: Error) {
_read_from :: proc(f: ^File, r: io.Reader) -> (n: i64, err: Error) {
//TODO
return
}
_write :: proc(fd: Handle, p: []byte) -> (n: int, err: Error) {
_write :: proc(f: ^File, p: []byte) -> (n: int, err: Error) {
if len(p) == 0 {
return 0, nil
}
n = unix.sys_write(int(fd), &p[0], uint(len(p)))
n = unix.sys_write(f.impl.fd, &p[0], uint(len(p)))
if n < 0 {
return -1, _get_platform_error(n)
}
return int(n), nil
}
_write_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
_write_at :: proc(f: ^File, p: []byte, offset: i64) -> (n: int, err: Error) {
if offset < 0 {
return 0, .Invalid_Offset
}
b, offset := p, offset
for len(b) > 0 {
m := unix.sys_pwrite(int(fd), &b[0], len(b), offset)
m := unix.sys_pwrite(f.impl.fd, &b[0], len(b), offset)
if m < 0 {
return -1, _get_platform_error(m)
}
@@ -152,30 +171,30 @@ _write_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
return
}
_write_to :: proc(fd: Handle, w: io.Writer) -> (n: i64, err: Error) {
_write_to :: proc(f: ^File, w: io.Writer) -> (n: i64, err: Error) {
//TODO
return
}
_file_size :: proc(fd: Handle) -> (n: i64, err: Error) {
s: OS_Stat = ---
res := unix.sys_fstat(int(fd), &s)
_file_size :: proc(f: ^File) -> (n: i64, err: Error) {
s: _Stat = ---
res := unix.sys_fstat(f.impl.fd, &s)
if res < 0 {
return -1, _get_platform_error(res)
}
return s.size, nil
}
_sync :: proc(fd: Handle) -> Error {
return _ok_or_error(unix.sys_fsync(int(fd)))
_sync :: proc(f: ^File) -> Error {
return _ok_or_error(unix.sys_fsync(f.impl.fd))
}
_flush :: proc(fd: Handle) -> Error {
return _ok_or_error(unix.sys_fsync(int(fd)))
_flush :: proc(f: ^File) -> Error {
return _ok_or_error(unix.sys_fsync(f.impl.fd))
}
_truncate :: proc(fd: Handle, size: i64) -> Error {
return _ok_or_error(unix.sys_ftruncate(int(fd), size))
_truncate :: proc(f: ^File, size: i64) -> Error {
return _ok_or_error(unix.sys_ftruncate(f.impl.fd, size))
}
_remove :: proc(name: string) -> Error {
@@ -184,13 +203,13 @@ _remove :: proc(name: string) -> Error {
delete(name_cstr)
}
handle_i := unix.sys_open(name_cstr, int(File_Flags.Read))
if handle_i < 0 {
return _get_platform_error(handle_i)
fd := unix.sys_open(name_cstr, int(File_Flags.Read))
if fd < 0 {
return _get_platform_error(fd)
}
defer unix.sys_close(handle_i)
defer unix.sys_close(fd)
if _is_dir(Handle(handle_i)) {
if _is_dir_fd(fd) {
return _ok_or_error(unix.sys_rmdir(name_cstr))
}
return _ok_or_error(unix.sys_unlink(name_cstr))
@@ -242,7 +261,7 @@ _read_link_cstr :: proc(name_cstr: cstring, allocator := context.allocator) -> (
rc := unix.sys_readlink(name_cstr, &(buf[0]), bufsz)
if rc < 0 {
delete(buf)
return "", _get_platform_error(int(unix.get_errno(rc)))
return "", _get_platform_error(rc)
} else if rc == int(bufsz) {
bufsz *= 2
delete(buf)
@@ -269,18 +288,40 @@ _unlink :: proc(name: string) -> Error {
return _ok_or_error(unix.sys_unlink(name_cstr))
}
_chdir :: proc(fd: Handle) -> Error {
return _ok_or_error(unix.sys_fchdir(int(fd)))
_chdir :: proc(name: string) -> Error {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
delete(name_cstr)
}
return _ok_or_error(unix.sys_chdir(name_cstr))
}
_chmod :: proc(fd: Handle, mode: File_Mode) -> Error {
return _ok_or_error(unix.sys_fchmod(int(fd), int(mode)))
_fchdir :: proc(f: ^File) -> Error {
return _ok_or_error(unix.sys_fchdir(f.impl.fd))
}
_chown :: proc(fd: Handle, uid, gid: int) -> Error {
return _ok_or_error(unix.sys_fchown(int(fd), uid, gid))
_chmod :: proc(name: string, mode: File_Mode) -> Error {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
delete(name_cstr)
}
return _ok_or_error(unix.sys_chmod(name_cstr, int(mode)))
}
_fchmod :: proc(f: ^File, mode: File_Mode) -> Error {
return _ok_or_error(unix.sys_fchmod(f.impl.fd, int(mode)))
}
// NOTE: will throw error without super user priviledges
_chown :: proc(name: string, uid, gid: int) -> Error {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
delete(name_cstr)
}
return _ok_or_error(unix.sys_chown(name_cstr, uid, gid))
}
// NOTE: will throw error without super user priviledges
_lchown :: proc(name: string, uid, gid: int) -> Error {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
@@ -289,6 +330,11 @@ _lchown :: proc(name: string, uid, gid: int) -> Error {
return _ok_or_error(unix.sys_lchown(name_cstr, uid, gid))
}
// NOTE: will throw error without super user priviledges
_fchown :: proc(f: ^File, uid, gid: int) -> Error {
return _ok_or_error(unix.sys_fchown(f.impl.fd, uid, gid))
}
_chtimes :: proc(name: string, atime, mtime: time.Time) -> Error {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
@@ -301,6 +347,14 @@ _chtimes :: proc(name: string, atime, mtime: time.Time) -> Error {
return _ok_or_error(unix.sys_utimensat(_AT_FDCWD, name_cstr, &times, 0))
}
_fchtimes :: proc(f: ^File, atime, mtime: time.Time) -> Error {
times := [2]Unix_File_Time {
{ atime._nsec, 0 },
{ mtime._nsec, 0 },
}
return _ok_or_error(unix.sys_utimensat(f.impl.fd, nil, &times, 0))
}
_exists :: proc(name: string) -> bool {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
@@ -309,18 +363,38 @@ _exists :: proc(name: string) -> bool {
return unix.sys_access(name_cstr, F_OK) == 0
}
_is_file :: proc(fd: Handle) -> bool {
s: OS_Stat
res := unix.sys_fstat(int(fd), &s)
_is_file :: proc(name: string) -> bool {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
delete(name_cstr)
}
s: _Stat
res := unix.sys_stat(name_cstr, &s)
return S_ISREG(s.mode)
}
_is_file_fd :: proc(fd: int) -> bool {
s: _Stat
res := unix.sys_fstat(fd, &s)
if res < 0 { // error
return false
}
return S_ISREG(s.mode)
}
_is_dir :: proc(fd: Handle) -> bool {
s: OS_Stat
res := unix.sys_fstat(int(fd), &s)
_is_dir :: proc(name: string) -> bool {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
delete(name_cstr)
}
s: _Stat
res := unix.sys_stat(name_cstr, &s)
return S_ISDIR(s.mode)
}
_is_dir_fd :: proc(fd: int) -> bool {
s: _Stat
res := unix.sys_fstat(fd, &s)
if res < 0 { // error
return false
}
@@ -330,7 +404,7 @@ _is_dir :: proc(fd: Handle) -> bool {
// Ideally we want to use the temp_allocator. PATH_MAX on Linux is commonly
// defined as 512, however, it is well known that paths can exceed that limit.
// So, in theory you could have a path larger than the entire temp_allocator's
// buffer. Therefor any large paths will use context.allocator.
// buffer. Therefor, any large paths will use context.allocator.
_name_to_cstring :: proc(name: string) -> (cname: cstring, allocated: bool) {
if len(name) > _CSTRING_NAME_HEAP_THRESHOLD {
cname = strings.clone_to_cstring(name)

View File

@@ -2,12 +2,20 @@ package os2
import "core:io"
file_to_stream :: proc(fd: Handle) -> (s: io.Stream) {
s.stream_data = rawptr(uintptr(fd))
to_stream :: proc(f: ^File) -> (s: io.Stream) {
s.stream_data = f
s.stream_vtable = _file_stream_vtable
return
}
to_writer :: proc(f: ^File) -> (s: io.Writer) {
return {to_stream(f)}
}
to_reader :: proc(f: ^File) -> (s: io.Reader) {
return {to_stream(f)}
}
@(private)
error_to_io_error :: proc(ferr: Error) -> io.Error {
if ferr == nil {
@@ -20,66 +28,66 @@ error_to_io_error :: proc(ferr: Error) -> io.Error {
@(private)
_file_stream_vtable := &io.Stream_VTable{
impl_read = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
fd := Handle(uintptr(s.stream_data))
f := (^File)(s.stream_data)
ferr: Error
n, ferr = read(fd, p)
n, ferr = read(f, p)
err = error_to_io_error(ferr)
return
},
impl_read_at = proc(s: io.Stream, p: []byte, offset: i64) -> (n: int, err: io.Error) {
fd := Handle(uintptr(s.stream_data))
f := (^File)(s.stream_data)
ferr: Error
n, ferr = read_at(fd, p, offset)
n, ferr = read_at(f, p, offset)
err = error_to_io_error(ferr)
return
},
impl_write_to = proc(s: io.Stream, w: io.Writer) -> (n: i64, err: io.Error) {
fd := Handle(uintptr(s.stream_data))
f := (^File)(s.stream_data)
ferr: Error
n, ferr = write_to(fd, w)
n, ferr = write_to(f, w)
err = error_to_io_error(ferr)
return
},
impl_write = proc(s: io.Stream, p: []byte) -> (n: int, err: io.Error) {
fd := Handle(uintptr(s.stream_data))
f := (^File)(s.stream_data)
ferr: Error
n, ferr = write(fd, p)
n, ferr = write(f, p)
err = error_to_io_error(ferr)
return
},
impl_write_at = proc(s: io.Stream, p: []byte, offset: i64) -> (n: int, err: io.Error) {
fd := Handle(uintptr(s.stream_data))
f := (^File)(s.stream_data)
ferr: Error
n, ferr = write_at(fd, p, offset)
n, ferr = write_at(f, p, offset)
err = error_to_io_error(ferr)
return
},
impl_read_from = proc(s: io.Stream, r: io.Reader) -> (n: i64, err: io.Error) {
fd := Handle(uintptr(s.stream_data))
f := (^File)(s.stream_data)
ferr: Error
n, ferr = read_from(fd, r)
n, ferr = read_from(f, r)
err = error_to_io_error(ferr)
return
},
impl_seek = proc(s: io.Stream, offset: i64, whence: io.Seek_From) -> (i64, io.Error) {
fd := Handle(uintptr(s.stream_data))
n, ferr := seek(fd, offset, Seek_From(whence))
f := (^File)(s.stream_data)
n, ferr := seek(f, offset, Seek_From(whence))
err := error_to_io_error(ferr)
return n, err
},
impl_size = proc(s: io.Stream) -> i64 {
fd := Handle(uintptr(s.stream_data))
sz, _ := file_size(fd)
f := (^File)(s.stream_data)
sz, _ := file_size(f)
return sz
},
impl_flush = proc(s: io.Stream) -> io.Error {
fd := Handle(uintptr(s.stream_data))
ferr := flush(fd)
f := (^File)(s.stream_data)
ferr := flush(f)
return error_to_io_error(ferr)
},
impl_close = proc(s: io.Stream) -> io.Error {
fd := Handle(uintptr(s.stream_data))
ferr := close(fd)
f := (^File)(s.stream_data)
ferr := close(f)
return error_to_io_error(ferr)
},
}

View File

@@ -4,25 +4,25 @@ import "core:mem"
import "core:strconv"
import "core:unicode/utf8"
write_string :: proc(fd: Handle, s: string) -> (n: int, err: Error) {
return write(fd, transmute([]byte)s)
write_string :: proc(f: ^File, s: string) -> (n: int, err: Error) {
return write(f, transmute([]byte)s)
}
write_byte :: proc(fd: Handle, b: byte) -> (n: int, err: Error) {
return write(fd, []byte{b})
write_byte :: proc(f: ^File, b: byte) -> (n: int, err: Error) {
return write(f, []byte{b})
}
write_rune :: proc(fd: Handle, r: rune) -> (n: int, err: Error) {
write_rune :: proc(f: ^File, r: rune) -> (n: int, err: Error) {
if r < utf8.RUNE_SELF {
return write_byte(fd, byte(r))
return write_byte(f, byte(r))
}
b: [4]byte
b, n = utf8.encode_rune(r)
return write(fd, b[:n])
return write(f, b[:n])
}
write_encoded_rune :: proc(fd: Handle, r: rune) -> (n: int, err: Error) {
write_encoded_rune :: proc(f: ^File, r: rune) -> (n: int, err: Error) {
wrap :: proc(m: int, merr: Error, n: ^int, err: ^Error) -> bool {
n^ += m
if merr != nil {
@@ -32,102 +32,78 @@ write_encoded_rune :: proc(fd: Handle, r: rune) -> (n: int, err: Error) {
return false
}
if wrap(write_byte(fd, '\''), &n, &err) { return }
if wrap(write_byte(f, '\''), &n, &err) { return }
switch r {
case '\a': if wrap(write_string(fd, "\\a"), &n, &err) { return }
case '\b': if wrap(write_string(fd, "\\b"), &n, &err) { return }
case '\e': if wrap(write_string(fd, "\\e"), &n, &err) { return }
case '\f': if wrap(write_string(fd, "\\f"), &n, &err) { return }
case '\n': if wrap(write_string(fd, "\\n"), &n, &err) { return }
case '\r': if wrap(write_string(fd, "\\r"), &n, &err) { return }
case '\t': if wrap(write_string(fd, "\\t"), &n, &err) { return }
case '\v': if wrap(write_string(fd, "\\v"), &n, &err) { return }
case '\a': if wrap(write_string(f, "\\a"), &n, &err) { return }
case '\b': if wrap(write_string(f, "\\b"), &n, &err) { return }
case '\e': if wrap(write_string(f, "\\e"), &n, &err) { return }
case '\f': if wrap(write_string(f, "\\f"), &n, &err) { return }
case '\n': if wrap(write_string(f, "\\n"), &n, &err) { return }
case '\r': if wrap(write_string(f, "\\r"), &n, &err) { return }
case '\t': if wrap(write_string(f, "\\t"), &n, &err) { return }
case '\v': if wrap(write_string(f, "\\v"), &n, &err) { return }
case:
if r < 32 {
if wrap(write_string(fd, "\\x"), &n, &err) { return }
if wrap(write_string(f, "\\x"), &n, &err) { return }
b: [2]byte
s := strconv.append_bits(b[:], u64(r), 16, true, 64, strconv.digits, nil)
switch len(s) {
case 0: if wrap(write_string(fd, "00"), &n, &err) { return }
case 1: if wrap(write_rune(fd, '0'), &n, &err) { return }
case 2: if wrap(write_string(fd, s), &n, &err) { return }
case 0: if wrap(write_string(f, "00"), &n, &err) { return }
case 1: if wrap(write_rune(f, '0'), &n, &err) { return }
case 2: if wrap(write_string(f, s), &n, &err) { return }
}
} else {
if wrap(write_rune(fd, r), &n, &err) { return }
if wrap(write_rune(f, r), &n, &err) { return }
}
}
_ = wrap(write_byte(fd, '\''), &n, &err)
_ = wrap(write_byte(f, '\''), &n, &err)
return
}
write_ptr :: proc(fd: Handle, data: rawptr, len: int) -> (n: int, err: Error) {
write_ptr :: proc(f: ^File, data: rawptr, len: int) -> (n: int, err: Error) {
s := transmute([]byte)mem.Raw_Slice{data, len}
return write(fd, s)
return write(f, s)
}
read_ptr :: proc(fd: Handle, data: rawptr, len: int) -> (n: int, err: Error) {
read_ptr :: proc(f: ^File, data: rawptr, len: int) -> (n: int, err: Error) {
s := transmute([]byte)mem.Raw_Slice{data, len}
return read(fd, s)
return read(f, s)
}
read_at_least :: proc(fd: Handle, buf: []byte, min: int) -> (n: int, err: Error) {
if len(buf) < min {
return 0, .Short_Buffer
read_entire_file :: proc(name: string, allocator := context.allocator) -> (data: []byte, err: Error) {
f, ferr := open(name)
if ferr != nil {
return nil, ferr
}
for n < min && err == nil {
nn: int
nn, err = read(fd, buf[n:])
n += nn
defer close(f)
size: int
if size64, err := file_size(f); err == nil {
if i64(int(size64)) != size64 {
size = int(size64)
}
}
if n >= min {
err = nil
size += 1 // for EOF
// TODO(bill): Is this correct logic?
total: int
data = make([]byte, size, allocator) or_return
for {
n: int
n, err = read(f, data[total:])
total += n
if err != nil {
if err == .EOF {
err = nil
}
data = data[:total]
return
}
}
return
}
read_full :: proc(fd: Handle, buf: []byte) -> (n: int, err: Error) {
return read_at_least(fd, buf, len(buf))
}
file_size_from_path :: proc(path: string) -> (length: i64, err: Error) {
fd := open(path, O_RDONLY, 0) or_return
defer close(fd)
return file_size(fd)
}
read_entire_file :: proc{
read_entire_file_from_path,
read_entire_file_from_handle,
}
read_entire_file_from_path :: proc(name: string, allocator := context.allocator) -> (data: []byte, err: Error) {
fd := open(name, {.Read}) or_return
defer close(fd)
return read_entire_file_from_handle(fd, allocator)
}
read_entire_file_from_handle :: proc(fd: Handle, allocator := context.allocator) -> (data: []byte, err: Error) {
length := file_size(fd) or_return
if length <= 0 {
return nil, nil
}
if i64(int(length)) != length {
return nil, .Short_Buffer
}
data = make([]byte, int(length), allocator)
if data == nil {
return nil, .Short_Buffer
}
defer if err != nil {
delete(data, allocator)
}
bytes_read := read_full(fd, data) or_return
return data[:bytes_read], nil
}
write_entire_file :: proc(name: string, data: []byte, perm: File_Mode, truncate := true) -> Error {
@@ -135,9 +111,11 @@ write_entire_file :: proc(name: string, data: []byte, perm: File_Mode, truncate
if truncate {
flags |= O_TRUNC
}
f := open(name, flags, perm) or_return
_, err := write(f, data)
f, err := open(name, flags, perm)
if err != nil {
return err
}
_, err = write(f, data)
if cerr := close(f); cerr != nil && err == nil {
err = cerr
}

View File

@@ -2,323 +2,455 @@
package os2
import "core:io"
import "core:mem"
import "core:sync"
import "core:runtime"
import "core:strings"
import "core:time"
import "core:unicode/utf16"
import win32 "core:sys/windows"
_get_platform_error :: proc() -> Error {
// TODO(bill): map some of these errors correctly
err := win32.GetLastError()
if err == 0 {
return nil
}
return Platform_Error{i32(err)}
INVALID_HANDLE :: ~uintptr(0)
S_IWRITE :: 0o200
_ERROR_BAD_NETPATH :: 53
MAX_RW :: 1<<30
_file_allocator :: proc() -> runtime.Allocator {
return heap_allocator()
}
_ok_or_error :: proc(ok: win32.BOOL) -> Error {
return nil if ok else _get_platform_error()
_temp_allocator :: proc() -> runtime.Allocator {
// TODO(bill): make this not depend on the context allocator
return context.temp_allocator
}
_std_handle :: proc(kind: Std_Handle_Kind) -> Handle {
get_handle :: proc(h: win32.DWORD) -> Handle {
fd := win32.GetStdHandle(h)
when size_of(uintptr) == 8 {
win32.SetHandleInformation(fd, win32.HANDLE_FLAG_INHERIT, 0)
}
return Handle(fd)
}
switch kind {
case .stdin: return get_handle(win32.STD_INPUT_HANDLE)
case .stdout: return get_handle(win32.STD_OUTPUT_HANDLE)
case .stderr: return get_handle(win32.STD_ERROR_HANDLE)
}
unreachable()
_File_Kind :: enum u8 {
File,
Console,
Pipe,
}
_open :: proc(path: string, flags: File_Flags, perm: File_Mode) -> (handle: Handle, err: Error) {
handle = INVALID_HANDLE
if len(path) == 0 {
_File :: struct {
fd: rawptr,
name: string,
wname: win32.wstring,
kind: _File_Kind,
allocator: runtime.Allocator,
rw_mutex: sync.RW_Mutex, // read write calls
p_mutex: sync.Mutex, // pread pwrite calls
}
_handle :: proc(f: ^File) -> win32.HANDLE {
return win32.HANDLE(_fd(f))
}
_open_internal :: proc(name: string, flags: File_Flags, perm: File_Mode) -> (handle: uintptr, err: Error) {
if len(name) == 0 {
err = .Not_Exist
return
}
path := _fix_long_path(name)
access: u32
switch flags & O_RDONLY|O_WRONLY|O_RDWR {
case O_RDONLY: access = win32.FILE_GENERIC_READ
case O_WRONLY: access = win32.FILE_GENERIC_WRITE
case O_RDWR: access = win32.FILE_GENERIC_READ | win32.FILE_GENERIC_WRITE
switch flags & {.Read, .Write} {
case {.Read}: access = win32.FILE_GENERIC_READ
case {.Write}: access = win32.FILE_GENERIC_WRITE
case {.Read, .Write}: access = win32.FILE_GENERIC_READ | win32.FILE_GENERIC_WRITE
}
if .Append in flags {
access &~= win32.FILE_GENERIC_WRITE
access |= win32.FILE_APPEND_DATA
}
if .Create in flags {
access |= win32.FILE_GENERIC_WRITE
}
share_mode := win32.FILE_SHARE_READ|win32.FILE_SHARE_WRITE
sa: ^win32.SECURITY_ATTRIBUTES = nil
sa_inherit := win32.SECURITY_ATTRIBUTES{nLength = size_of(win32.SECURITY_ATTRIBUTES), bInheritHandle = true}
if .Close_On_Exec in flags {
sa = &sa_inherit
if .Append in flags {
access &~= win32.FILE_GENERIC_WRITE
access |= win32.FILE_APPEND_DATA
}
share_mode := u32(win32.FILE_SHARE_READ | win32.FILE_SHARE_WRITE)
sa: ^win32.SECURITY_ATTRIBUTES
if .Close_On_Exec not_in flags {
sa = &win32.SECURITY_ATTRIBUTES{}
sa.nLength = size_of(win32.SECURITY_ATTRIBUTES)
sa.bInheritHandle = true
}
create_mode: u32
create_mode: u32 = win32.OPEN_EXISTING
switch {
case flags&(O_CREATE|O_EXCL) == (O_CREATE | O_EXCL):
case flags & {.Create, .Excl} == {.Create, .Excl}:
create_mode = win32.CREATE_NEW
case flags&(O_CREATE|O_TRUNC) == (O_CREATE | O_TRUNC):
case flags & {.Create, .Trunc} == {.Create, .Trunc}:
create_mode = win32.CREATE_ALWAYS
case flags&O_CREATE == O_CREATE:
case flags & {.Create} == {.Create}:
create_mode = win32.OPEN_ALWAYS
case flags&O_TRUNC == O_TRUNC:
case flags & {.Trunc} == {.Trunc}:
create_mode = win32.TRUNCATE_EXISTING
case:
create_mode = win32.OPEN_EXISTING
}
wide_path := win32.utf8_to_wstring(path)
handle = Handle(win32.CreateFileW(wide_path, access, share_mode, sa, create_mode, win32.FILE_ATTRIBUTE_NORMAL|win32.FILE_FLAG_BACKUP_SEMANTICS, nil))
if handle == INVALID_HANDLE {
err = _get_platform_error()
attrs: u32 = win32.FILE_ATTRIBUTE_NORMAL
if perm & S_IWRITE == 0 {
attrs = win32.FILE_ATTRIBUTE_READONLY
if create_mode == win32.CREATE_ALWAYS {
// NOTE(bill): Open has just asked to create a file in read-only mode.
// If the file already exists, to make it akin to a *nix open call,
// the call preserves the existing permissions.
h := win32.CreateFileW(path, access, share_mode, sa, win32.TRUNCATE_EXISTING, win32.FILE_ATTRIBUTE_NORMAL, nil)
if h == win32.INVALID_HANDLE {
switch e := win32.GetLastError(); e {
case win32.ERROR_FILE_NOT_FOUND, _ERROR_BAD_NETPATH, win32.ERROR_PATH_NOT_FOUND:
// file does not exist, create the file
case 0:
return uintptr(h), nil
case:
return 0, Platform_Error(e)
}
}
}
}
return
h := win32.CreateFileW(path, access, share_mode, sa, create_mode, attrs, nil)
if h == win32.INVALID_HANDLE {
return 0, _get_platform_error()
}
return uintptr(h), nil
}
_close :: proc(fd: Handle) -> Error {
if fd == 0 {
return .Invalid_Argument
_open :: proc(name: string, flags: File_Flags, perm: File_Mode) -> (f: ^File, err: Error) {
flags := flags if flags != nil else {.Read}
handle := _open_internal(name, flags + {.Close_On_Exec}, perm) or_return
return _new_file(handle, name), nil
}
_new_file :: proc(handle: uintptr, name: string) -> ^File {
if handle == INVALID_HANDLE {
return nil
}
hnd := win32.HANDLE(fd)
f := new(File, _file_allocator())
file_info: win32.BY_HANDLE_FILE_INFORMATION
_ok_or_error(win32.GetFileInformationByHandle(hnd, &file_info)) or_return
f.impl.allocator = _file_allocator()
f.impl.fd = rawptr(fd)
f.impl.name = strings.clone(name, f.impl.allocator)
f.impl.wname = win32.utf8_to_wstring(name, f.impl.allocator)
if file_info.dwFileAttributes & win32.FILE_ATTRIBUTE_DIRECTORY != 0 {
handle := _handle(f)
kind := _File_Kind.File
if m: u32; win32.GetConsoleMode(handle, &m) {
kind = .Console
}
if win32.GetFileType(handle) == win32.FILE_TYPE_PIPE {
kind = .Pipe
}
f.impl.kind = kind
return f
}
_fd :: proc(f: ^File) -> uintptr {
if f == nil {
return INVALID_HANDLE
}
return uintptr(f.impl.fd)
}
_destroy :: proc(f: ^File) -> Error {
if f == nil {
return nil
}
return _ok_or_error(win32.CloseHandle(hnd))
a := f.impl.allocator
free(f.impl.wname, a)
delete(f.impl.name, a)
free(f, a)
return nil
}
_name :: proc(fd: Handle, allocator := context.allocator) -> string {
FILE_NAME_NORMALIZED :: 0x0
handle := win32.HANDLE(fd)
buf_len := win32.GetFinalPathNameByHandleW(handle, nil, 0, FILE_NAME_NORMALIZED)
if buf_len == 0 {
return ""
_close :: proc(f: ^File) -> Error {
if f == nil {
return nil
}
buf := make([]u16, buf_len, context.temp_allocator)
n := win32.GetFinalPathNameByHandleW(handle, raw_data(buf), buf_len, FILE_NAME_NORMALIZED)
return win32.utf16_to_utf8(buf[:n], allocator)
if !win32.CloseHandle(win32.HANDLE(f.impl.fd)) {
return .Closed
}
return _destroy(f)
}
_seek :: proc(fd: Handle, offset: i64, whence: Seek_From) -> (ret: i64, err: Error) {
new_offset: win32.LARGE_INTEGER
move_method: win32.DWORD
_name :: proc(f: ^File) -> string {
return f.impl.name if f != nil else ""
}
_seek :: proc(f: ^File, offset: i64, whence: Seek_From) -> (ret: i64, err: Error) {
handle := _handle(f)
if handle == win32.INVALID_HANDLE {
return 0, .Invalid_File
}
if f.impl.kind == .Pipe {
return 0, .Invalid_File
}
sync.guard(&f.impl.rw_mutex)
w: u32
switch whence {
case .Start: move_method = win32.FILE_BEGIN
case .Current: move_method = win32.FILE_CURRENT
case .End: move_method = win32.FILE_END
case .Start: w = win32.FILE_BEGIN
case .Current: w = win32.FILE_CURRENT
case .End: w = win32.FILE_END
}
ok := win32.SetFilePointerEx(win32.HANDLE(fd), win32.LARGE_INTEGER(offset), &new_offset, move_method)
ret = i64(new_offset)
if !ok {
err = .Invalid_Whence
hi := i32(offset>>32)
lo := i32(offset)
dw_ptr := win32.SetFilePointer(handle, lo, &hi, w)
if dw_ptr == win32.INVALID_SET_FILE_POINTER {
return 0, _get_platform_error()
}
return
return i64(hi)<<32 + i64(dw_ptr), nil
}
MAX_RW :: 1<<30
_read :: proc(f: ^File, p: []byte) -> (n: int, err: Error) {
read_console :: proc(handle: win32.HANDLE, b: []byte) -> (n: int, err: Error) {
if len(b) == 0 {
return 0, nil
}
@(private="file")
_read_console :: proc(handle: win32.HANDLE, b: []byte) -> (n: int, err: Error) {
if len(b) == 0 {
return 0, nil
}
// TODO(bill): should this be moved to `_File` instead?
BUF_SIZE :: 386
buf16: [BUF_SIZE]u16
buf8: [4*BUF_SIZE]u8
BUF_SIZE :: 386
buf16: [BUF_SIZE]u16
buf8: [4*BUF_SIZE]u8
for n < len(b) && err == nil {
max_read := u32(min(BUF_SIZE, len(b)/4))
single_read_length: u32
err = _ok_or_error(win32.ReadConsoleW(handle, &buf16[0], max_read, &single_read_length, nil))
buf8_len := utf16.decode_to_utf8(buf8[:], buf16[:single_read_length])
src := buf8[:buf8_len]
ctrl_z := false
for i := 0; i < len(src) && n+i < len(b); i += 1 {
x := src[i]
if x == 0x1a { // ctrl-z
ctrl_z = true
for n < len(b) && err == nil {
min_read := max(len(b)/4, 1 if len(b) > 0 else 0)
max_read := u32(min(BUF_SIZE, min_read))
if max_read == 0 {
break
}
single_read_length: u32
ok := win32.ReadConsoleW(handle, &buf16[0], max_read, &single_read_length, nil)
if !ok {
err = _get_platform_error()
}
buf8_len := utf16.decode_to_utf8(buf8[:], buf16[:single_read_length])
src := buf8[:buf8_len]
ctrl_z := false
for i := 0; i < len(src) && n+i < len(b); i += 1 {
x := src[i]
if x == 0x1a { // ctrl-z
ctrl_z = true
break
}
b[n] = x
n += 1
}
if ctrl_z || single_read_length < max_read {
break
}
// NOTE(bill): if the last two values were a newline, then it is expected that
// this is the end of the input
if n >= 2 && single_read_length == max_read && string(b[n-2:n]) == "\r\n" {
break
}
b[n] = x
n += 1
}
if ctrl_z || single_read_length < len(buf16) {
break
}
return
}
return
}
_read :: proc(fd: Handle, p: []byte) -> (n: int, err: Error) {
if len(p) == 0 {
return 0, nil
}
handle := win32.HANDLE(fd)
m: u32
is_console := win32.GetConsoleMode(handle, &m)
handle := _handle(f)
single_read_length: win32.DWORD
total_read: int
length := len(p)
to_read := min(win32.DWORD(length), MAX_RW)
sync.shared_guard(&f.impl.rw_mutex) // multiple readers
e: win32.BOOL
if is_console {
n, err := _read_console(handle, p[total_read:][:to_read])
total_read += n
if err != nil {
return int(total_read), err
if sync.guard(&f.impl.p_mutex) {
to_read := min(win32.DWORD(length), MAX_RW)
ok: win32.BOOL
if f.impl.kind == .Console {
n, err := read_console(handle, p[total_read:][:to_read])
total_read += n
if err != nil {
return int(total_read), err
}
} else {
ok = win32.ReadFile(handle, &p[total_read], to_read, &single_read_length, nil)
}
if single_read_length > 0 && ok {
total_read += int(single_read_length)
} else {
err = _get_platform_error()
}
} else {
e = win32.ReadFile(handle, &p[total_read], to_read, &single_read_length, nil)
}
if single_read_length <= 0 || !e {
return int(total_read), _get_platform_error()
}
total_read += int(single_read_length)
return int(total_read), nil
}
_read_at :: proc(f: ^File, p: []byte, offset: i64) -> (n: int, err: Error) {
pread :: proc(f: ^File, data: []byte, offset: i64) -> (n: int, err: Error) {
buf := data
if len(buf) > MAX_RW {
buf = buf[:MAX_RW]
_read_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
if offset < 0 {
return 0, .Invalid_Offset
}
curr_offset := seek(f, offset, .Current) or_return
defer seek(f, curr_offset, .Start)
o := win32.OVERLAPPED{
OffsetHigh = u32(offset>>32),
Offset = u32(offset),
}
// TODO(bill): Determine the correct behaviour for consoles
h := _handle(f)
done: win32.DWORD
if !win32.ReadFile(h, raw_data(buf), u32(len(buf)), &done, &o) {
err = _get_platform_error()
done = 0
}
n = int(done)
return
}
b, offset := p, offset
for len(b) > 0 {
m := _pread(fd, b, offset) or_return
sync.guard(&f.impl.p_mutex)
p, offset := p, offset
for len(p) > 0 {
m := pread(f, p, offset) or_return
n += m
b = b[m:]
p = p[m:]
offset += i64(m)
}
return
}
_read_from :: proc(fd: Handle, r: io.Reader) -> (n: i64, err: Error) {
_read_from :: proc(f: ^File, r: io.Reader) -> (n: i64, err: Error) {
// TODO(bill)
return
}
_pread :: proc(fd: Handle, data: []byte, offset: i64) -> (n: int, err: Error) {
buf := data
if len(buf) > MAX_RW {
buf = buf[:MAX_RW]
}
curr_offset := seek(fd, offset, .Current) or_return
defer seek(fd, curr_offset, .Start)
o := win32.OVERLAPPED{
OffsetHigh = u32(offset>>32),
Offset = u32(offset),
_write :: proc(f: ^File, p: []byte) -> (n: int, err: Error) {
if len(p) == 0 {
return
}
// TODO(bill): Determine the correct behaviour for consoles
single_write_length: win32.DWORD
total_write: i64
length := i64(len(p))
h := win32.HANDLE(fd)
done: win32.DWORD
_ok_or_error(win32.ReadFile(h, raw_data(buf), u32(len(buf)), &done, &o)) or_return
return int(done), nil
handle := _handle(f)
sync.guard(&f.impl.rw_mutex)
for total_write < length {
remaining := length - total_write
to_write := win32.DWORD(min(i32(remaining), MAX_RW))
e := win32.WriteFile(handle, &p[total_write], to_write, &single_write_length, nil)
if single_write_length <= 0 || !e {
n = int(total_write)
err = _get_platform_error()
return
}
total_write += i64(single_write_length)
}
return int(total_write), nil
}
_pwrite :: proc(fd: Handle, data: []byte, offset: i64) -> (n: int, err: Error) {
buf := data
if len(buf) > MAX_RW {
buf = buf[:MAX_RW]
_write_at :: proc(f: ^File, p: []byte, offset: i64) -> (n: int, err: Error) {
pwrite :: proc(f: ^File, data: []byte, offset: i64) -> (n: int, err: Error) {
buf := data
if len(buf) > MAX_RW {
buf = buf[:MAX_RW]
}
curr_offset := seek(fd, offset, .Current) or_return
defer seek(fd, curr_offset, .Start)
}
curr_offset := seek(f, offset, .Current) or_return
defer seek(f, curr_offset, .Start)
o := win32.OVERLAPPED{
OffsetHigh = u32(offset>>32),
Offset = u32(offset),
o := win32.OVERLAPPED{
OffsetHigh = u32(offset>>32),
Offset = u32(offset),
}
h := _handle(f)
done: win32.DWORD
if !win32.WriteFile(h, raw_data(buf), u32(len(buf)), &done, &o) {
err = _get_platform_error()
done = 0
}
n = int(done)
return
}
h := win32.HANDLE(fd)
done: win32.DWORD
_ok_or_error(win32.WriteFile(h, raw_data(buf), u32(len(buf)), &done, &o)) or_return
return int(done), nil
}
_write :: proc(fd: Handle, p: []byte) -> (n: int, err: Error) {
return
}
_write_at :: proc(fd: Handle, p: []byte, offset: i64) -> (n: int, err: Error) {
if offset < 0 {
return 0, .Invalid_Offset
}
b, offset := p, offset
for len(b) > 0 {
m := _pwrite(fd, b, offset) or_return
sync.guard(&f.impl.p_mutex)
p, offset := p, offset
for len(p) > 0 {
m := pwrite(f, p, offset) or_return
n += m
b = b[m:]
p = p[m:]
offset += i64(m)
}
return
}
_write_to :: proc(fd: Handle, w: io.Writer) -> (n: i64, err: Error) {
_write_to :: proc(f: ^File, w: io.Writer) -> (n: i64, err: Error) {
// TODO(bill)
return
}
_file_size :: proc(fd: Handle) -> (n: i64, err: Error) {
_file_size :: proc(f: ^File) -> (n: i64, err: Error) {
length: win32.LARGE_INTEGER
err = _ok_or_error(win32.GetFileSizeEx(win32.HANDLE(fd), &length))
return i64(length), err
handle := _handle(f)
if !win32.GetFileSizeEx(handle, &length) {
err = _get_platform_error()
}
n = i64(length)
return
}
_sync :: proc(fd: Handle) -> Error {
_sync :: proc(f: ^File) -> Error {
return _flush(f)
}
_flush :: proc(f: ^File) -> Error {
handle := _handle(f)
if !win32.FlushFileBuffers(handle) {
return _get_platform_error()
}
return nil
}
_flush :: proc(fd: Handle) -> Error {
return _ok_or_error(win32.FlushFileBuffers(win32.HANDLE(fd)))
}
_truncate :: proc(fd: Handle, size: i64) -> Error {
offset := seek(fd, size, .Start) or_return
defer seek(fd, offset, .Start)
return _ok_or_error(win32.SetEndOfFile(win32.HANDLE(fd)))
_truncate :: proc(f: ^File, size: i64) -> Error {
if f == nil {
return nil
}
curr_off := seek(f, 0, .Current) or_return
defer seek(f, curr_off, .Start)
seek(f, size, .Start) or_return
handle := _handle(f)
if !win32.SetEndOfFile(handle) {
return _get_platform_error()
}
return nil
}
_remove :: proc(name: string) -> Error {
p := win32.utf8_to_wstring(_fix_long_path(name))
err := _ok_or_error(win32.DeleteFileW(p))
p := _fix_long_path(name)
err, err1: Error
if !win32.DeleteFileW(p) {
err = _get_platform_error()
}
if err == nil {
return nil
}
err1 := _ok_or_error(win32.RemoveDirectoryW(p))
if !win32.RemoveDirectoryW(p) {
err1 = _get_platform_error()
}
if err1 == nil {
return nil
}
@@ -332,7 +464,10 @@ _remove :: proc(name: string) -> Error {
err = err1
} else if a & win32.FILE_ATTRIBUTE_READONLY != 0 {
if win32.SetFileAttributesW(p, a &~ win32.FILE_ATTRIBUTE_READONLY) {
err = _ok_or_error(win32.DeleteFileW(p))
err = nil
if !win32.DeleteFileW(p) {
err = _get_platform_error()
}
}
}
}
@@ -342,80 +477,253 @@ _remove :: proc(name: string) -> Error {
}
_rename :: proc(old_path, new_path: string) -> Error {
from := win32.utf8_to_wstring(old_path, context.temp_allocator)
to := win32.utf8_to_wstring(new_path, context.temp_allocator)
return _ok_or_error(win32.MoveFileExW(from, to, win32.MOVEFILE_REPLACE_EXISTING))
from := _fix_long_path(old_path)
to := _fix_long_path(new_path)
if win32.MoveFileExW(from, to, win32.MOVEFILE_REPLACE_EXISTING) {
return nil
}
return _get_platform_error()
}
_link :: proc(old_name, new_name: string) -> Error {
n := win32.utf8_to_wstring(_fix_long_path(new_name))
o := win32.utf8_to_wstring(_fix_long_path(old_name))
return _ok_or_error(win32.CreateHardLinkW(n, o, nil))
o := _fix_long_path(old_name)
n := _fix_long_path(new_name)
if win32.CreateHardLinkW(n, o, nil) {
return nil
}
return _get_platform_error()
}
_symlink :: proc(old_name, new_name: string) -> Error {
return nil
return .Unsupported
}
_read_link :: proc(name: string) -> (string, Error) {
_open_sym_link :: proc(p: [^]u16) -> (handle: win32.HANDLE, err: Error) {
attrs := u32(win32.FILE_FLAG_BACKUP_SEMANTICS)
attrs |= win32.FILE_FLAG_OPEN_REPARSE_POINT
handle = win32.CreateFileW(p, 0, 0, nil, win32.OPEN_EXISTING, attrs, nil)
if handle == win32.INVALID_HANDLE {
return nil, _get_platform_error()
}
return
}
_normalize_link_path :: proc(p: []u16, allocator: runtime.Allocator) -> (str: string, err: Error) {
has_prefix :: proc(p: []u16, str: string) -> bool {
if len(p) < len(str) {
return false
}
// assume ascii
for i in 0..<len(str) {
if p[i] != u16(str[i]) {
return false
}
}
return true
}
has_unc_prefix :: proc(p: []u16) -> bool {
return has_prefix(p, `\??\`)
}
if !has_unc_prefix(p) {
return win32.utf16_to_utf8(p, allocator)
}
ws := p[4:]
switch {
case len(ws) >= 2 && ws[1] == ':':
return win32.utf16_to_utf8(ws, allocator)
case has_prefix(ws, `UNC\`):
ws[3] = '\\' // override data in buffer
return win32.utf16_to_utf8(ws[3:], allocator)
}
handle := _open_sym_link(raw_data(p)) or_return
defer win32.CloseHandle(handle)
n := win32.GetFinalPathNameByHandleW(handle, nil, 0, win32.VOLUME_NAME_DOS)
if n == 0 {
return "", _get_platform_error()
}
buf := make([]u16, n+1, _temp_allocator())
n = win32.GetFinalPathNameByHandleW(handle, raw_data(buf), u32(len(buf)), win32.VOLUME_NAME_DOS)
if n == 0 {
return "", _get_platform_error()
}
ws = buf[:n]
if has_unc_prefix(ws) {
ws = ws[4:]
if len(ws) > 3 && has_prefix(ws, `UNC`) {
ws[2] = '\\'
return win32.utf16_to_utf8(ws[2:], allocator)
}
return win32.utf16_to_utf8(ws, allocator)
}
return "", .Invalid_Path
}
_read_link :: proc(name: string, allocator: runtime.Allocator) -> (s: string, err: Error) {
MAXIMUM_REPARSE_DATA_BUFFER_SIZE :: 16 * 1024
@thread_local
rdb_buf: [MAXIMUM_REPARSE_DATA_BUFFER_SIZE]byte
p := _fix_long_path(name)
handle := _open_sym_link(p) or_return
defer win32.CloseHandle(handle)
bytes_returned: u32
if !win32.DeviceIoControl(handle, win32.FSCTL_GET_REPARSE_POINT, nil, 0, &rdb_buf[0], len(rdb_buf)-1, &bytes_returned, nil) {
err = _get_platform_error()
return
}
mem.zero_slice(rdb_buf[:min(bytes_returned+1, len(rdb_buf))])
rdb := (^win32.REPARSE_DATA_BUFFER)(&rdb_buf[0])
switch rdb.ReparseTag {
case win32.IO_REPARSE_TAG_SYMLINK:
rb := (^win32.SYMBOLIC_LINK_REPARSE_BUFFER)(&rdb.rest)
pb := win32.wstring(&rb.PathBuffer)
pb[rb.SubstituteNameOffset+rb.SubstituteNameLength] = 0
p := pb[rb.SubstituteNameOffset:][:rb.SubstituteNameLength]
if rb.Flags & win32.SYMLINK_FLAG_RELATIVE != 0 {
return win32.utf16_to_utf8(p, allocator)
}
return _normalize_link_path(p, allocator)
case win32.IO_REPARSE_TAG_MOUNT_POINT:
rb := (^win32.MOUNT_POINT_REPARSE_BUFFER)(&rdb.rest)
pb := win32.wstring(&rb.PathBuffer)
pb[rb.SubstituteNameOffset+rb.SubstituteNameLength] = 0
p := pb[rb.SubstituteNameOffset:][:rb.SubstituteNameLength]
return _normalize_link_path(p, allocator)
}
// Path wasn't a symlink/junction but another reparse point kind
return "", nil
}
_unlink :: proc(path: string) -> Error {
wpath := win32.utf8_to_wstring(path, context.temp_allocator)
return _ok_or_error(win32.DeleteFileW(wpath))
}
_chdir :: proc(fd: Handle) -> Error {
_fchdir :: proc(f: ^File) -> Error {
if f == nil {
return nil
}
if !win32.SetCurrentDirectoryW(f.impl.wname) {
return _get_platform_error()
}
return nil
}
_chmod :: proc(fd: Handle, mode: File_Mode) -> Error {
_fchmod :: proc(f: ^File, mode: File_Mode) -> Error {
if f == nil {
return nil
}
d: win32.BY_HANDLE_FILE_INFORMATION
if !win32.GetFileInformationByHandle(_handle(f), &d) {
return _get_platform_error()
}
attrs := d.dwFileAttributes
if mode & S_IWRITE != 0 {
attrs &~= win32.FILE_ATTRIBUTE_READONLY
} else {
attrs |= win32.FILE_ATTRIBUTE_READONLY
}
info: win32.FILE_BASIC_INFO
info.FileAttributes = attrs
if !win32.SetFileInformationByHandle(_handle(f), .FileBasicInfo, &info, size_of(d)) {
return _get_platform_error()
}
return nil
}
_chown :: proc(fd: Handle, uid, gid: int) -> Error {
_fchown :: proc(f: ^File, uid, gid: int) -> Error {
return .Unsupported
}
_chdir :: proc(name: string) -> Error {
p := _fix_long_path(name)
if !win32.SetCurrentDirectoryW(p) {
return _get_platform_error()
}
return nil
}
_chmod :: proc(name: string, mode: File_Mode) -> Error {
f := open(name, {.Write}) or_return
defer close(f)
return _fchmod(f, mode)
}
_chown :: proc(name: string, uid, gid: int) -> Error {
return .Unsupported
}
_lchown :: proc(name: string, uid, gid: int) -> Error {
return nil
return .Unsupported
}
_chtimes :: proc(name: string, atime, mtime: time.Time) -> Error {
f := open(name, {.Write}) or_return
defer close(f)
return _fchtimes(f, atime, mtime)
}
_fchtimes :: proc(f: ^File, atime, mtime: time.Time) -> Error {
if f == nil {
return nil
}
d: win32.BY_HANDLE_FILE_INFORMATION
if !win32.GetFileInformationByHandle(_handle(f), &d) {
return _get_platform_error()
}
to_windows_time :: #force_inline proc(t: time.Time) -> win32.LARGE_INTEGER {
// a 64-bit value representing the number of 100-nanosecond intervals since January 1, 1601 (UTC)
return win32.LARGE_INTEGER(time.time_to_unix_nano(t) * 100 + 116444736000000000)
}
atime, mtime := atime, mtime
if time.time_to_unix_nano(atime) < time.time_to_unix_nano(mtime) {
atime = mtime
}
info: win32.FILE_BASIC_INFO
info.LastAccessTime = to_windows_time(atime)
info.LastWriteTime = to_windows_time(mtime)
if !win32.SetFileInformationByHandle(_handle(f), .FileBasicInfo, &info, size_of(d)) {
return _get_platform_error()
}
return nil
}
_exists :: proc(path: string) -> bool {
wpath := win32.utf8_to_wstring(path, context.temp_allocator)
return bool(win32.PathFileExistsW(wpath))
wpath := _fix_long_path(path)
attribs := win32.GetFileAttributesW(wpath)
return i32(attribs) != win32.INVALID_FILE_ATTRIBUTES
}
_is_file :: proc(fd: Handle) -> bool {
hnd := win32.HANDLE(fd)
file_info: win32.BY_HANDLE_FILE_INFORMATION
if ok := win32.GetFileInformationByHandle(hnd, &file_info); !ok {
return false
_is_file :: proc(path: string) -> bool {
wpath := _fix_long_path(path)
attribs := win32.GetFileAttributesW(wpath)
if i32(attribs) != win32.INVALID_FILE_ATTRIBUTES {
return attribs & win32.FILE_ATTRIBUTE_DIRECTORY == 0
}
no_flags :: win32.FILE_ATTRIBUTE_DIRECTORY | win32.FILE_ATTRIBUTE_DEVICE
yes_flags :: win32.FILE_ATTRIBUTE_NORMAL
return (file_info.dwFileAttributes & no_flags == 0) && (file_info.dwFileAttributes & yes_flags != 0)
return false
}
_is_dir :: proc(fd: Handle) -> bool {
hnd := win32.HANDLE(fd)
file_info: win32.BY_HANDLE_FILE_INFORMATION
if ok := win32.GetFileInformationByHandle(hnd, &file_info); !ok {
return false
_is_dir :: proc(path: string) -> bool {
wpath := _fix_long_path(path)
attribs := win32.GetFileAttributesW(wpath)
if i32(attribs) != win32.INVALID_FILE_ATTRIBUTES {
return attribs & win32.FILE_ATTRIBUTE_DIRECTORY != 0
}
return file_info.dwFileAttributes & win32.FILE_ATTRIBUTE_DIRECTORY != 0
return false
}

View File

@@ -102,7 +102,6 @@ MMAP_PROT :: unix.PROT_READ | unix.PROT_WRITE
@thread_local _local_region: ^Region
//_local_region: ^Region
global_regions: ^Region

View File

@@ -1,5 +1,7 @@
package os2
import "core:runtime"
Path_Separator :: _Path_Separator // OS-Specific
Path_List_Separator :: _Path_List_Separator // OS-Specific
@@ -21,7 +23,7 @@ remove_all :: proc(path: string) -> Error {
getwd :: proc(allocator := context.allocator) -> (dir: string, err: Error) {
getwd :: proc(allocator: runtime.Allocator) -> (dir: string, err: Error) {
return _getwd(allocator)
}
setwd :: proc(dir: string) -> (err: Error) {

View File

@@ -2,6 +2,8 @@
package os2
import "core:strings"
import "core:strconv"
import "core:runtime"
import "core:sys/unix"
_Path_Separator :: '/'
@@ -37,31 +39,31 @@ _mkdir :: proc(path: string, perm: File_Mode) -> Error {
}
_mkdir_all :: proc(path: string, perm: File_Mode) -> Error {
_mkdirat :: proc(dfd: Handle, path: []u8, perm: int, has_created: ^bool) -> Error {
_mkdirat :: proc(dfd: int, path: []u8, perm: int, has_created: ^bool) -> Error {
if len(path) == 0 {
return _ok_or_error(unix.sys_close(int(dfd)))
return _ok_or_error(unix.sys_close(dfd))
}
i: int
for /**/; i < len(path) - 1 && path[i] != '/'; i += 1 {}
path[i] = 0
new_dfd := unix.sys_openat(int(dfd), cstring(&path[0]), _OPENDIR_FLAGS)
new_dfd := unix.sys_openat(dfd, cstring(&path[0]), _OPENDIR_FLAGS)
switch new_dfd {
case -ENOENT:
if res := unix.sys_mkdirat(int(dfd), cstring(&path[0]), perm); res < 0 {
if res := unix.sys_mkdirat(dfd, cstring(&path[0]), perm); res < 0 {
return _get_platform_error(res)
}
has_created^ = true
if new_dfd = unix.sys_openat(int(dfd), cstring(&path[0]), _OPENDIR_FLAGS); new_dfd < 0 {
if new_dfd = unix.sys_openat(dfd, cstring(&path[0]), _OPENDIR_FLAGS); new_dfd < 0 {
return _get_platform_error(new_dfd)
}
fallthrough
case 0:
if res := unix.sys_close(int(dfd)); res < 0 {
if res := unix.sys_close(dfd); res < 0 {
return _get_platform_error(res)
}
// skip consecutive '/'
for i += 1; i < len(path) && path[i] == '/'; i += 1 {}
return _mkdirat(Handle(new_dfd), path[i:], perm, has_created)
return _mkdirat(new_dfd, path[i:], perm, has_created)
case:
return _get_platform_error(new_dfd)
}
@@ -101,7 +103,7 @@ _mkdir_all :: proc(path: string, perm: File_Mode) -> Error {
}
has_created: bool
_mkdirat(Handle(dfd), path_bytes, int(perm & 0o777), &has_created) or_return
_mkdirat(dfd, path_bytes, int(perm & 0o777), &has_created) or_return
if has_created {
return nil
}
@@ -120,13 +122,13 @@ dirent64 :: struct {
_remove_all :: proc(path: string) -> Error {
DT_DIR :: 4
_remove_all_dir :: proc(dfd: Handle) -> Error {
_remove_all_dir :: proc(dfd: int) -> Error {
n := 64
buf := make([]u8, n)
defer delete(buf)
loop: for {
getdents_res := unix.sys_getdents64(int(dfd), &buf[0], n)
getdents_res := unix.sys_getdents64(dfd, &buf[0], n)
switch getdents_res {
case -EINVAL:
delete(buf)
@@ -161,15 +163,15 @@ _remove_all :: proc(path: string) -> Error {
switch d.d_type {
case DT_DIR:
handle_i := unix.sys_openat(int(dfd), d_name_cstr, _OPENDIR_FLAGS)
if handle_i < 0 {
return _get_platform_error(handle_i)
new_dfd := unix.sys_openat(dfd, d_name_cstr, _OPENDIR_FLAGS)
if new_dfd < 0 {
return _get_platform_error(new_dfd)
}
defer unix.sys_close(handle_i)
_remove_all_dir(Handle(handle_i)) or_return
unlink_res = unix.sys_unlinkat(int(dfd), d_name_cstr, int(unix.AT_REMOVEDIR))
defer unix.sys_close(new_dfd)
_remove_all_dir(new_dfd) or_return
unlink_res = unix.sys_unlinkat(dfd, d_name_cstr, int(unix.AT_REMOVEDIR))
case:
unlink_res = unix.sys_unlinkat(int(dfd), d_name_cstr)
unlink_res = unix.sys_unlinkat(dfd, d_name_cstr)
}
if unlink_res < 0 {
@@ -185,21 +187,20 @@ _remove_all :: proc(path: string) -> Error {
delete(path_cstr)
}
handle_i := unix.sys_open(path_cstr, _OPENDIR_FLAGS)
switch handle_i {
fd := unix.sys_open(path_cstr, _OPENDIR_FLAGS)
switch fd {
case -ENOTDIR:
return _ok_or_error(unix.sys_unlink(path_cstr))
case -4096..<0:
return _get_platform_error(handle_i)
return _get_platform_error(fd)
}
fd := Handle(handle_i)
defer close(fd)
defer unix.sys_close(fd)
_remove_all_dir(fd) or_return
return _ok_or_error(unix.sys_rmdir(path_cstr))
}
_getwd :: proc(allocator := context.allocator) -> (string, Error) {
_getwd :: proc(allocator: runtime.Allocator) -> (string, Error) {
// NOTE(tetra): I would use PATH_MAX here, but I was not able to find
// an authoritative value for it across all systems.
// The largest value I could find was 4096, so might as well use the page size.
@@ -227,3 +228,20 @@ _setwd :: proc(dir: string) -> Error {
}
return _ok_or_error(unix.sys_chdir(dir_cstr))
}
_get_full_path :: proc(fd: int, allocator := context.allocator) -> string {
PROC_FD_PATH :: "/proc/self/fd/"
buf: [32]u8
copy(buf[:], PROC_FD_PATH)
strconv.itoa(buf[len(PROC_FD_PATH):], fd)
fullpath: string
err: Error
if fullpath, err = _read_link_cstr(cstring(&buf[0]), allocator); err != nil || fullpath[0] != '/' {
return ""
}
return fullpath
}

View File

@@ -1,6 +1,10 @@
//+private
package os2
import win32 "core:sys/windows"
import "core:runtime"
import "core:strings"
_Path_Separator :: '\\'
_Path_List_Separator :: ';'
@@ -9,11 +13,58 @@ _is_path_separator :: proc(c: byte) -> bool {
}
_mkdir :: proc(name: string, perm: File_Mode) -> Error {
if !win32.CreateDirectoryW(_fix_long_path(name), nil) {
return _get_platform_error()
}
return nil
}
_mkdir_all :: proc(path: string, perm: File_Mode) -> Error {
// TODO(bill): _mkdir_all for windows
fix_root_directory :: proc(p: string) -> (s: string, allocated: bool, err: runtime.Allocator_Error) {
if len(p) == len(`\\?\c:`) {
if is_path_separator(p[0]) && is_path_separator(p[1]) && p[2] == '?' && is_path_separator(p[3]) && p[5] == ':' {
s = strings.concatenate_safe({p, `\`}, _file_allocator()) or_return
allocated = true
return
}
}
return p, false, nil
}
dir, err := stat(path, _temp_allocator())
if err == nil {
if dir.is_dir {
return nil
}
return .Exist
}
i := len(path)
for i > 0 && is_path_separator(path[i-1]) {
i -= 1
}
j := i
for j > 0 && !is_path_separator(path[j-1]) {
j -= 1
}
if j > 1 {
new_path, allocated := fix_root_directory(path[:j-1]) or_return
defer if allocated {
delete(new_path, _file_allocator())
}
mkdir_all(new_path, perm) or_return
}
err = mkdir(path, perm)
if err != nil {
dir1, err1 := lstat(path, _temp_allocator())
if err1 == nil && dir1.is_dir {
return nil
}
return err
}
return nil
}
@@ -22,10 +73,90 @@ _remove_all :: proc(path: string) -> Error {
return nil
}
_getwd :: proc(allocator := context.allocator) -> (dir: string, err: Error) {
_getwd :: proc(allocator: runtime.Allocator) -> (dir: string, err: Error) {
// TODO(bill)
return "", nil
}
_setwd :: proc(dir: string) -> (err: Error) {
// TODO(bill)
return nil
}
can_use_long_paths: bool
@(init)
init_long_path_support :: proc() {
// TODO(bill): init_long_path_support
// ADD THIS SHIT
// registry_path := win32.L(`Computer\HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\FileSystem\LongPathsEnabled`)
can_use_long_paths = false
}
_fix_long_path_slice :: proc(path: string) -> []u16 {
return win32.utf8_to_utf16(_fix_long_path_internal(path))
}
_fix_long_path :: proc(path: string) -> win32.wstring {
return win32.utf8_to_wstring(_fix_long_path_internal(path))
}
_fix_long_path_internal :: proc(path: string) -> string {
if can_use_long_paths {
return path
}
// When using win32 to create a directory, the path
// cannot be too long that you cannot append an 8.3
// file name, because MAX_PATH is 260, 260-12 = 248
if len(path) < 248 {
return path
}
// UNC paths do not need to be modified
if len(path) >= 2 && path[:2] == `\\` {
return path
}
if !_is_abs(path) { // relative path
return path
}
PREFIX :: `\\?`
path_buf := make([]byte, len(PREFIX)+len(path)+1, _temp_allocator())
copy(path_buf, PREFIX)
n := len(path)
r, w := 0, len(PREFIX)
for r < n {
switch {
case is_path_separator(path[r]):
r += 1
case path[r] == '.' && (r+1 == n || is_path_separator(path[r+1])):
// \.\
r += 1
case r+1 < n && path[r] == '.' && path[r+1] == '.' && (r+2 == n || is_path_separator(path[r+2])):
// Skip \..\ paths
return path
case:
path_buf[w] = '\\'
w += 1
for r < n && !is_path_separator(path[r]) {
path_buf[w] = path[r]
r += 1
w += 1
}
}
}
// Root directories require a trailing \
if w == len(`\\?\c:`) {
path_buf[w] = '\\'
w += 1
}
return string(path_buf[:w])
}

View File

@@ -1,5 +1,5 @@
package os2
pipe :: proc() -> (r, w: Handle, err: Error) {
pipe :: proc() -> (r, w: ^File, err: Error) {
return _pipe()
}

View File

@@ -1,7 +1,7 @@
//+private
package os2
_pipe :: proc() -> (r, w: Handle, err: Error) {
return INVALID_HANDLE, INVALID_HANDLE, nil
_pipe :: proc() -> (r, w: ^File, err: Error) {
return nil, nil, nil
}

View File

@@ -3,15 +3,11 @@ package os2
import win32 "core:sys/windows"
_pipe :: proc() -> (r, w: Handle, err: Error) {
sa: win32.SECURITY_ATTRIBUTES
sa.nLength = size_of(win32.SECURITY_ATTRIBUTES)
sa.bInheritHandle = true
_pipe :: proc() -> (r, w: ^File, err: Error) {
p: [2]win32.HANDLE
if !win32.CreatePipe(&p[0], &p[1], &sa, 0) {
return 0, 0, Platform_Error{i32(win32.GetLastError())}
if !win32.CreatePipe(&p[0], &p[1], nil, 0) {
return nil, nil, _get_platform_error()
}
return Handle(p[0]), Handle(p[1]), nil
return new_file(uintptr(p[0]), ""), new_file(uintptr(p[1]), ""), nil
}

View File

@@ -46,7 +46,7 @@ Process :: struct {
Process_Attributes :: struct {
dir: string,
env: []string,
files: []Handle,
files: []^File,
sys: ^Process_Attributes_OS_Specific,
}

View File

@@ -1,6 +1,7 @@
package os2
import "core:time"
import "core:runtime"
File_Info :: struct {
fullpath: string,
@@ -13,26 +14,26 @@ File_Info :: struct {
access_time: time.Time,
}
file_info_slice_delete :: proc(infos: []File_Info, allocator := context.allocator) {
file_info_slice_delete :: proc(infos: []File_Info, allocator: runtime.Allocator) {
for i := len(infos)-1; i >= 0; i -= 1 {
file_info_delete(infos[i], allocator)
}
delete(infos, allocator)
}
file_info_delete :: proc(fi: File_Info, allocator := context.allocator) {
file_info_delete :: proc(fi: File_Info, allocator: runtime.Allocator) {
delete(fi.fullpath, allocator)
}
fstat :: proc(fd: Handle, allocator := context.allocator) -> (File_Info, Error) {
return _fstat(fd, allocator)
fstat :: proc(f: ^File, allocator: runtime.Allocator) -> (File_Info, Error) {
return _fstat(f, allocator)
}
stat :: proc(name: string, allocator := context.allocator) -> (File_Info, Error) {
stat :: proc(name: string, allocator: runtime.Allocator) -> (File_Info, Error) {
return _stat(name, allocator)
}
lstat :: proc(name: string, allocator := context.allocator) -> (File_Info, Error) {
lstat :: proc(name: string, allocator: runtime.Allocator) -> (File_Info, Error) {
return _lstat(name, allocator)
}

View File

@@ -2,6 +2,7 @@
package os2
import "core:time"
import "core:runtime"
import "core:sys/unix"
import "core:path/filepath"
@@ -59,7 +60,7 @@ Unix_File_Time :: struct {
}
@private
OS_Stat :: struct {
_Stat :: struct {
device_id: u64, // ID of device containing file
serial: u64, // File serial number
nlink: u64, // Number of hard links
@@ -82,16 +83,20 @@ OS_Stat :: struct {
}
_fstat :: proc(fd: Handle, allocator := context.allocator) -> (File_Info, Error) {
s: OS_Stat
result := unix.sys_fstat(int(fd), &s)
_fstat :: proc(f: ^File, allocator := context.allocator) -> (File_Info, Error) {
return _fstat_internal(f.impl.fd, allocator)
}
_fstat_internal :: proc(fd: int, allocator: runtime.Allocator) -> (File_Info, Error) {
s: _Stat
result := unix.sys_fstat(fd, &s)
if result < 0 {
return {}, _get_platform_error(result)
}
// TODO: As of Linux 4.11, the new statx syscall can retrieve creation_time
fi := File_Info {
fullpath = _name(fd, allocator),
fullpath = _get_full_path(fd, allocator),
name = "",
size = s.size,
mode = 0,
@@ -117,7 +122,7 @@ _stat :: proc(name: string, allocator := context.allocator) -> (File_Info, Error
return {}, _get_platform_error(fd)
}
defer unix.sys_close(fd)
return _fstat(Handle(fd), allocator)
return _fstat_internal(fd, allocator)
}
_lstat :: proc(name: string, allocator := context.allocator) -> (File_Info, Error) {
@@ -130,14 +135,14 @@ _lstat :: proc(name: string, allocator := context.allocator) -> (File_Info, Erro
return {}, _get_platform_error(fd)
}
defer unix.sys_close(fd)
return _fstat(Handle(fd), allocator)
return _fstat_internal(fd, allocator)
}
_same_file :: proc(fi1, fi2: File_Info) -> bool {
return fi1.fullpath == fi2.fullpath
}
_stat_internal :: proc(name: string) -> (s: OS_Stat, res: int) {
_stat_internal :: proc(name: string) -> (s: _Stat, res: int) {
name_cstr, allocated := _name_to_cstring(name)
defer if allocated {
delete(name_cstr)

View File

@@ -1,21 +1,22 @@
//+private
package os2
import "core:runtime"
import "core:time"
import "core:strings"
import win32 "core:sys/windows"
_fstat :: proc(fd: Handle, allocator := context.allocator) -> (File_Info, Error) {
if fd == 0 {
return {}, .Invalid_Argument
_fstat :: proc(f: ^File, allocator: runtime.Allocator) -> (File_Info, Error) {
if f == nil || f.impl.fd == nil {
return {}, nil
}
context.allocator = allocator
path, err := _cleanpath_from_handle(fd)
path, err := _cleanpath_from_handle(f, allocator)
if err != nil {
return {}, err
}
h := win32.HANDLE(fd)
h := _handle(f)
switch win32.GetFileType(h) {
case win32.FILE_TYPE_PIPE, win32.FILE_TYPE_CHAR:
fi: File_Info
@@ -25,58 +26,52 @@ _fstat :: proc(fd: Handle, allocator := context.allocator) -> (File_Info, Error)
return fi, nil
}
return _file_info_from_get_file_information_by_handle(path, h)
return _file_info_from_get_file_information_by_handle(path, h, allocator)
}
_stat :: proc(name: string, allocator := context.allocator) -> (File_Info, Error) {
return internal_stat(name, win32.FILE_FLAG_BACKUP_SEMANTICS)
_stat :: proc(name: string, allocator: runtime.Allocator) -> (File_Info, Error) {
return internal_stat(name, win32.FILE_FLAG_BACKUP_SEMANTICS, allocator)
}
_lstat :: proc(name: string, allocator := context.allocator) -> (File_Info, Error) {
return internal_stat(name, win32.FILE_FLAG_BACKUP_SEMANTICS|win32.FILE_FLAG_OPEN_REPARSE_POINT)
_lstat :: proc(name: string, allocator: runtime.Allocator) -> (File_Info, Error) {
return internal_stat(name, win32.FILE_FLAG_BACKUP_SEMANTICS|win32.FILE_FLAG_OPEN_REPARSE_POINT, allocator)
}
_same_file :: proc(fi1, fi2: File_Info) -> bool {
return fi1.fullpath == fi2.fullpath
}
full_path_from_name :: proc(name: string, allocator := context.allocator) -> (path: string, err: Error) {
context.allocator = allocator
full_path_from_name :: proc(name: string, allocator: runtime.Allocator) -> (path: string, err: Error) {
name := name
if name == "" {
name = "."
}
p := win32.utf8_to_utf16(name, context.temp_allocator)
buf := make([dynamic]u16, 100)
for {
n := win32.GetFullPathNameW(raw_data(p), u32(len(buf)), raw_data(buf), nil)
if n == 0 {
delete(buf)
return "", _get_platform_error()
}
if n <= u32(len(buf)) {
return win32.utf16_to_utf8(buf[:n]), nil
}
resize(&buf, len(buf)*2)
}
p := win32.utf8_to_utf16(name, _temp_allocator())
return
n := win32.GetFullPathNameW(raw_data(p), 0, nil, nil)
if n == 0 {
return "", _get_platform_error()
}
buf := make([]u16, n+1, _temp_allocator())
n = win32.GetFullPathNameW(raw_data(p), u32(len(buf)), raw_data(buf), nil)
if n == 0 {
return "", _get_platform_error()
}
return win32.utf16_to_utf8(buf[:n], allocator)
}
internal_stat :: proc(name: string, create_file_attributes: u32, allocator := context.allocator) -> (fi: File_Info, e: Error) {
internal_stat :: proc(name: string, create_file_attributes: u32, allocator: runtime.Allocator) -> (fi: File_Info, e: Error) {
if len(name) == 0 {
return {}, .Not_Exist
}
context.allocator = allocator
wname := win32.utf8_to_wstring(_fix_long_path(name), context.temp_allocator)
wname := _fix_long_path(name)
fa: win32.WIN32_FILE_ATTRIBUTE_DATA
ok := win32.GetFileAttributesExW(wname, win32.GetFileExInfoStandard, &fa)
if ok && fa.dwFileAttributes & win32.FILE_ATTRIBUTE_REPARSE_POINT == 0 {
// Not a symlink
return _file_info_from_win32_file_attribute_data(&fa, name)
return _file_info_from_win32_file_attribute_data(&fa, name, allocator)
}
err := 0 if ok else win32.GetLastError()
@@ -90,7 +85,7 @@ internal_stat :: proc(name: string, create_file_attributes: u32, allocator := co
}
win32.FindClose(sh)
return _file_info_from_win32_find_data(&fd, name)
return _file_info_from_win32_find_data(&fd, name, allocator)
}
h := win32.CreateFileW(wname, 0, 0, nil, win32.OPEN_EXISTING, create_file_attributes, nil)
@@ -99,7 +94,7 @@ internal_stat :: proc(name: string, create_file_attributes: u32, allocator := co
return
}
defer win32.CloseHandle(h)
return _file_info_from_get_file_information_by_handle(name, h)
return _file_info_from_get_file_information_by_handle(name, h, allocator)
}
@@ -124,56 +119,40 @@ _cleanpath_strip_prefix :: proc(buf: []u16) -> []u16 {
}
_cleanpath_from_handle :: proc(fd: Handle) -> (string, Error) {
if fd == 0 {
return "", .Invalid_Argument
_cleanpath_from_handle :: proc(f: ^File, allocator: runtime.Allocator) -> (string, Error) {
if f == nil || f.impl.fd == nil {
return "", nil
}
h := win32.HANDLE(fd)
h := _handle(f)
MAX_PATH := win32.DWORD(260) + 1
buf: []u16
for {
buf = make([]u16, MAX_PATH, context.temp_allocator)
err := win32.GetFinalPathNameByHandleW(h, raw_data(buf), MAX_PATH, 0)
switch err {
case win32.ERROR_PATH_NOT_FOUND, win32.ERROR_INVALID_PARAMETER:
return "", Platform_Error{i32(err)}
case win32.ERROR_NOT_ENOUGH_MEMORY:
MAX_PATH = MAX_PATH*2 + 1
continue
}
break
n := win32.GetFinalPathNameByHandleW(h, nil, 0, 0)
if n == 0 {
return "", _get_platform_error()
}
return _cleanpath_from_buf(buf), nil
buf := make([]u16, max(n, 260)+1, _temp_allocator())
n = win32.GetFinalPathNameByHandleW(h, raw_data(buf), u32(len(buf)), 0)
return _cleanpath_from_buf(buf[:n], allocator)
}
_cleanpath_from_handle_u16 :: proc(fd: Handle) -> ([]u16, Error) {
if fd == 0 {
return nil, .Invalid_Argument
_cleanpath_from_handle_u16 :: proc(f: ^File) -> ([]u16, Error) {
if f == nil || f.impl.fd == nil {
return nil, nil
}
h := win32.HANDLE(fd)
h := _handle(f)
MAX_PATH := win32.DWORD(260) + 1
buf: []u16
for {
buf = make([]u16, MAX_PATH, context.temp_allocator)
err := win32.GetFinalPathNameByHandleW(h, raw_data(buf), MAX_PATH, 0)
switch err {
case win32.ERROR_PATH_NOT_FOUND, win32.ERROR_INVALID_PARAMETER:
return nil, Platform_Error{i32(err)}
case win32.ERROR_NOT_ENOUGH_MEMORY:
MAX_PATH = MAX_PATH*2 + 1
continue
}
break
n := win32.GetFinalPathNameByHandleW(h, nil, 0, 0)
if n == 0 {
return nil, _get_platform_error()
}
return _cleanpath_strip_prefix(buf), nil
buf := make([]u16, max(n, 260)+1, _temp_allocator())
n = win32.GetFinalPathNameByHandleW(h, raw_data(buf), u32(len(buf)), 0)
return _cleanpath_strip_prefix(buf[:n]), nil
}
_cleanpath_from_buf :: proc(buf: []u16) -> string {
_cleanpath_from_buf :: proc(buf: []u16, allocator: runtime.Allocator) -> (string, runtime.Allocator_Error) {
buf := buf
buf = _cleanpath_strip_prefix(buf)
return win32.utf16_to_utf8(buf, context.allocator)
return win32.utf16_to_utf8(buf, allocator)
}
@@ -215,15 +194,15 @@ file_type_mode :: proc(h: win32.HANDLE) -> File_Mode {
_file_mode_from_file_attributes :: proc(FileAttributes: win32.DWORD, h: win32.HANDLE, ReparseTag: win32.DWORD) -> (mode: File_Mode) {
if FileAttributes & win32.FILE_ATTRIBUTE_READONLY != 0 {
_file_mode_from_file_attributes :: proc(file_attributes: win32.DWORD, h: win32.HANDLE, ReparseTag: win32.DWORD) -> (mode: File_Mode) {
if file_attributes & win32.FILE_ATTRIBUTE_READONLY != 0 {
mode |= 0o444
} else {
mode |= 0o666
}
is_sym := false
if FileAttributes & win32.FILE_ATTRIBUTE_REPARSE_POINT == 0 {
if file_attributes & win32.FILE_ATTRIBUTE_REPARSE_POINT == 0 {
is_sym = false
} else {
is_sym = ReparseTag == win32.IO_REPARSE_TAG_SYMLINK || ReparseTag == win32.IO_REPARSE_TAG_MOUNT_POINT
@@ -232,7 +211,7 @@ _file_mode_from_file_attributes :: proc(FileAttributes: win32.DWORD, h: win32.HA
if is_sym {
mode |= File_Mode_Sym_Link
} else {
if FileAttributes & win32.FILE_ATTRIBUTE_DIRECTORY != 0 {
if file_attributes & win32.FILE_ATTRIBUTE_DIRECTORY != 0 {
mode |= 0o111 | File_Mode_Dir
}
@@ -245,7 +224,7 @@ _file_mode_from_file_attributes :: proc(FileAttributes: win32.DWORD, h: win32.HA
}
_file_info_from_win32_file_attribute_data :: proc(d: ^win32.WIN32_FILE_ATTRIBUTE_DATA, name: string) -> (fi: File_Info, e: Error) {
_file_info_from_win32_file_attribute_data :: proc(d: ^win32.WIN32_FILE_ATTRIBUTE_DATA, name: string, allocator: runtime.Allocator) -> (fi: File_Info, e: Error) {
fi.size = i64(d.nFileSizeHigh)<<32 + i64(d.nFileSizeLow)
fi.mode |= _file_mode_from_file_attributes(d.dwFileAttributes, nil, 0)
@@ -255,14 +234,14 @@ _file_info_from_win32_file_attribute_data :: proc(d: ^win32.WIN32_FILE_ATTRIBUTE
fi.modification_time = time.unix(0, win32.FILETIME_as_unix_nanoseconds(d.ftLastWriteTime))
fi.access_time = time.unix(0, win32.FILETIME_as_unix_nanoseconds(d.ftLastAccessTime))
fi.fullpath, e = full_path_from_name(name)
fi.fullpath, e = full_path_from_name(name, allocator)
fi.name = basename(fi.fullpath)
return
}
_file_info_from_win32_find_data :: proc(d: ^win32.WIN32_FIND_DATAW, name: string) -> (fi: File_Info, e: Error) {
_file_info_from_win32_find_data :: proc(d: ^win32.WIN32_FIND_DATAW, name: string, allocator: runtime.Allocator) -> (fi: File_Info, e: Error) {
fi.size = i64(d.nFileSizeHigh)<<32 + i64(d.nFileSizeLow)
fi.mode |= _file_mode_from_file_attributes(d.dwFileAttributes, nil, 0)
@@ -272,14 +251,14 @@ _file_info_from_win32_find_data :: proc(d: ^win32.WIN32_FIND_DATAW, name: string
fi.modification_time = time.unix(0, win32.FILETIME_as_unix_nanoseconds(d.ftLastWriteTime))
fi.access_time = time.unix(0, win32.FILETIME_as_unix_nanoseconds(d.ftLastAccessTime))
fi.fullpath, e = full_path_from_name(name)
fi.fullpath, e = full_path_from_name(name, allocator)
fi.name = basename(fi.fullpath)
return
}
_file_info_from_get_file_information_by_handle :: proc(path: string, h: win32.HANDLE) -> (File_Info, Error) {
_file_info_from_get_file_information_by_handle :: proc(path: string, h: win32.HANDLE, allocator: runtime.Allocator) -> (File_Info, Error) {
d: win32.BY_HANDLE_FILE_INFORMATION
if !win32.GetFileInformationByHandle(h, &d) {
return {}, _get_platform_error()
@@ -290,7 +269,7 @@ _file_info_from_get_file_information_by_handle :: proc(path: string, h: win32.HA
if !win32.GetFileInformationByHandleEx(h, .FileAttributeTagInfo, &ti, size_of(ti)) {
err := win32.GetLastError()
if err != win32.ERROR_INVALID_PARAMETER {
return {}, Platform_Error{i32(err)}
return {}, Platform_Error(err)
}
// Indicate this is a symlink on FAT file systems
ti.ReparseTag = 0
@@ -312,58 +291,83 @@ _file_info_from_get_file_information_by_handle :: proc(path: string, h: win32.HA
return fi, nil
}
_is_abs :: proc(path: string) -> bool {
if len(path) > 0 && path[0] == '/' {
return true
reserved_names := [?]string{
"CON", "PRN", "AUX", "NUL",
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
}
_is_reserved_name :: proc(path: string) -> bool {
if len(path) == 0 {
return false
}
if len(path) > 2 {
switch path[0] {
case 'A'..='Z', 'a'..='z':
return path[1] == ':' && is_path_separator(path[2])
for reserved in reserved_names {
if strings.equal_fold(path, reserved) {
return true
}
}
return false
}
_fix_long_path :: proc(path: string) -> string {
if len(path) < 248 {
return path
}
_is_UNC :: proc(path: string) -> bool {
return _volume_name_len(path) > 2
}
if len(path) >= 2 && path[:2] == `\\` {
return path
}
if !_is_abs(path) {
return path
}
_volume_name_len :: proc(path: string) -> int {
if ODIN_OS == .Windows {
if len(path) < 2 {
return 0
}
c := path[0]
if path[1] == ':' {
switch c {
case 'a'..='z', 'A'..='Z':
return 2
}
}
prefix :: `\\?`
path_buf := make([]byte, len(prefix)+len(path)+len(`\`), context.temp_allocator)
copy(path_buf, prefix)
n := len(path)
r, w := 0, len(prefix)
for r < n {
switch {
case is_path_separator(path[r]):
r += 1
case path[r] == '.' && (r+1 == n || is_path_separator(path[r+1])):
r += 1
case r+1 < n && path[r] == '.' && path[r+1] == '.' && (r+2 == n || is_path_separator(path[r+2])):
return path
case:
path_buf[w] = '\\'
w += 1
for ; r < n && !is_path_separator(path[r]); r += 1 {
path_buf[w] = path[r]
w += 1
// URL: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
if l := len(path); l >= 5 && _is_path_separator(path[0]) && _is_path_separator(path[1]) &&
!_is_path_separator(path[2]) && path[2] != '.' {
for n := 3; n < l-1; n += 1 {
if _is_path_separator(path[n]) {
n += 1
if !_is_path_separator(path[n]) {
if path[n] == '.' {
break
}
}
for ; n < l; n += 1 {
if _is_path_separator(path[n]) {
break
}
}
return n
}
break
}
}
}
if w == len(`\\?\c:`) {
path_buf[w] = '\\'
w += 1
}
return string(path_buf[:w])
return 0
}
_is_abs :: proc(path: string) -> bool {
if _is_reserved_name(path) {
return true
}
l := _volume_name_len(path)
if l == 0 {
return false
}
path := path
path = path[l:]
if path == "" {
return false
}
return is_path_separator(path[0])
}

View File

@@ -1,14 +1,15 @@
package os2
import "core:runtime"
create_temp :: proc(dir, pattern: string) -> (Handle, Error) {
create_temp :: proc(dir, pattern: string) -> (^File, Error) {
return _create_temp(dir, pattern)
}
mkdir_temp :: proc(dir, pattern: string, allocator := context.allocator) -> (string, Error) {
return _mkdir_temp(dir, pattern)
mkdir_temp :: proc(dir, pattern: string, allocator: runtime.Allocator) -> (string, Error) {
return _mkdir_temp(dir, pattern, allocator)
}
temp_dir :: proc(allocator := context.allocator) -> string {
temp_dir :: proc(allocator: runtime.Allocator) -> (string, Error) {
return _temp_dir(allocator)
}

View File

@@ -1,18 +1,20 @@
//+private
package os2
import "core:runtime"
_create_temp :: proc(dir, pattern: string) -> (Handle, Error) {
_create_temp :: proc(dir, pattern: string) -> (^File, Error) {
//TODO
return 0, nil
return nil, nil
}
_mkdir_temp :: proc(dir, pattern: string, allocator := context.allocator) -> (string, Error) {
_mkdir_temp :: proc(dir, pattern: string, allocator: runtime.Allocator) -> (string, Error) {
//TODO
return "", nil
}
_temp_dir :: proc(allocator := context.allocator) -> string {
_temp_dir :: proc(allocator: runtime.Allocator) -> (string, Error) {
//TODO
return ""
return "", nil
}

View File

@@ -1,29 +1,29 @@
//+private
package os2
import "core:runtime"
import win32 "core:sys/windows"
_create_temp :: proc(dir, pattern: string) -> (Handle, Error) {
return 0, nil
_create_temp :: proc(dir, pattern: string) -> (^File, Error) {
return nil, nil
}
_mkdir_temp :: proc(dir, pattern: string, allocator := context.allocator) -> (string, Error) {
_mkdir_temp :: proc(dir, pattern: string, allocator: runtime.Allocator) -> (string, Error) {
return "", nil
}
_temp_dir :: proc(allocator := context.allocator) -> string {
b := make([dynamic]u16, u32(win32.MAX_PATH), context.temp_allocator)
for {
n := win32.GetTempPathW(u32(len(b)), raw_data(b))
if n > u32(len(b)) {
resize(&b, int(n))
continue
}
if n == 3 && b[1] == ':' && b[2] == '\\' {
} else if n > 0 && b[n-1] == '\\' {
n -= 1
}
return win32.utf16_to_utf8(b[:n], allocator)
_temp_dir :: proc(allocator: runtime.Allocator) -> (string, runtime.Allocator_Error) {
n := win32.GetTempPathW(0, nil)
if n == 0 {
return "", nil
}
b := make([]u16, max(win32.MAX_PATH, n), _temp_allocator())
n = win32.GetTempPathW(u32(len(b)), raw_data(b))
if n == 3 && b[1] == ':' && b[2] == '\\' {
} else if n > 0 && b[n-1] == '\\' {
n -= 1
}
return win32.utf16_to_utf8(b[:n], allocator)
}

View File

@@ -1,66 +1,75 @@
package os2
import "core:strings"
import "core:runtime"
user_cache_dir :: proc(allocator := context.allocator) -> (dir: string, is_defined: bool) {
user_cache_dir :: proc(allocator: runtime.Allocator) -> (dir: string, err: Error) {
found: bool
#partial switch ODIN_OS {
case .Windows:
dir = get_env("LocalAppData") or_return
if dir != "" {
dir = strings.clone(dir, allocator)
dir, found = get_env("LocalAppData")
if found {
dir = strings.clone_safe(dir, allocator) or_return
}
case .Darwin:
dir = get_env("HOME") or_return
if dir != "" {
dir = strings.concatenate({dir, "/Library/Caches"}, allocator)
dir, found = get_env("HOME")
if found {
dir = strings.concatenate_safe({dir, "/Library/Caches"}, allocator) or_return
}
case: // All other UNIX systems
dir = get_env("XDG_CACHE_HOME") or_return
if dir == "" {
dir = get_env("HOME") or_return
if dir == "" {
dir, found = get_env("XDG_CACHE_HOME")
if found {
dir, found = get_env("HOME")
if !found {
return
}
dir = strings.concatenate({dir, "/.cache"}, allocator)
dir = strings.concatenate_safe({dir, "/.cache"}, allocator) or_return
}
}
is_defined = dir != ""
if !found || dir == "" {
err = .Invalid_Path
}
return
}
user_config_dir :: proc(allocator := context.allocator) -> (dir: string, is_defined: bool) {
user_config_dir :: proc(allocator: runtime.Allocator) -> (dir: string, err: Error) {
found: bool
#partial switch ODIN_OS {
case .Windows:
dir = get_env("AppData") or_return
if dir != "" {
dir = strings.clone(dir, allocator)
dir, found = get_env("AppData")
if found {
dir = strings.clone_safe(dir, allocator) or_return
}
case .Darwin:
dir = get_env("HOME") or_return
if dir != "" {
dir = strings.concatenate({dir, "/Library/Application Support"}, allocator)
dir, found = get_env("HOME")
if found {
dir = strings.concatenate_safe({dir, "/Library/Application Support"}, allocator) or_return
}
case: // All other UNIX systems
dir = get_env("XDG_CACHE_HOME") or_return
if dir == "" {
dir = get_env("HOME") or_return
if dir == "" {
dir, found = get_env("XDG_CACHE_HOME")
if !found {
dir, found = get_env("HOME")
if !found {
return
}
dir = strings.concatenate({dir, "/.config"}, allocator)
dir = strings.concatenate_safe({dir, "/.config"}, allocator) or_return
}
}
is_defined = dir != ""
if !found || dir == "" {
err = .Invalid_Path
}
return
}
user_home_dir :: proc() -> (dir: string, is_defined: bool) {
user_home_dir :: proc() -> (dir: string, err: Error) {
env := "HOME"
#partial switch ODIN_OS {
case .Windows:
env = "USERPROFILE"
}
v := get_env(env) or_return
return v, true
if v, found := get_env(env); found {
return v, nil
}
return "", .Invalid_Path
}

View File

@@ -163,9 +163,6 @@ O_SYNC :: 0x0080
O_ASYNC :: 0x0040
O_CLOEXEC :: 0x1000000
SEEK_SET :: 0
SEEK_CUR :: 1
SEEK_END :: 2
SEEK_DATA :: 3
SEEK_HOLE :: 4
SEEK_MAX :: SEEK_HOLE
@@ -279,7 +276,7 @@ foreign libc {
@(link_name="__error") __error :: proc() -> ^int ---
@(link_name="open") _unix_open :: proc(path: cstring, flags: i32, mode: u16) -> Handle ---
@(link_name="close") _unix_close :: proc(handle: Handle) ---
@(link_name="close") _unix_close :: proc(handle: Handle) -> c.int ---
@(link_name="read") _unix_read :: proc(handle: Handle, buffer: rawptr, count: int) -> int ---
@(link_name="write") _unix_write :: proc(handle: Handle, buffer: rawptr, count: int) -> int ---
@(link_name="lseek") _unix_lseek :: proc(fs: Handle, offset: int, whence: int) -> int ---
@@ -298,13 +295,13 @@ foreign libc {
@(link_name="closedir") _unix_closedir :: proc(dirp: Dir) -> c.int ---
@(link_name="rewinddir") _unix_rewinddir :: proc(dirp: Dir) ---
@(link_name="fcntl") _unix_fcntl :: proc(fd: Handle, cmd: c.int, buf: ^byte) -> c.int ---
@(link_name="__fcntl") _unix__fcntl :: proc(fd: Handle, cmd: c.int, buf: ^byte) -> c.int ---
@(link_name="rename") _unix_rename :: proc(old: cstring, new: cstring) -> c.int ---
@(link_name="remove") _unix_remove :: proc(path: cstring) -> c.int ---
@(link_name="fchmod") _unix_fchmod :: proc(fildes: Handle, mode: u16) -> c.int ---
@(link_name="fchmod") _unix_fchmod :: proc(fd: Handle, mode: u16) -> c.int ---
@(link_name="malloc") _unix_malloc :: proc(size: int) -> rawptr ---
@(link_name="calloc") _unix_calloc :: proc(num, size: int) -> rawptr ---
@@ -364,12 +361,12 @@ when ODIN_OS == .Darwin && ODIN_ARCH == .arm64 {
return handle, 0
}
fchmod :: proc(fildes: Handle, mode: u16) -> Errno {
return cast(Errno)_unix_fchmod(fildes, mode)
fchmod :: proc(fd: Handle, mode: u16) -> Errno {
return cast(Errno)_unix_fchmod(fd, mode)
}
close :: proc(fd: Handle) {
_unix_close(fd)
close :: proc(fd: Handle) -> bool {
return _unix_close(fd) == 0
}
write :: proc(fd: Handle, data: []u8) -> (int, Errno) {
@@ -480,12 +477,12 @@ is_dir :: proc {is_dir_path, is_dir_handle}
rename :: proc(old: string, new: string) -> bool {
old_cstr := strings.clone_to_cstring(old, context.temp_allocator)
new_cstr := strings.clone_to_cstring(new, context.temp_allocator)
return _unix_rename(old_cstr, new_cstr) != -1
return _unix_rename(old_cstr, new_cstr) != -1
}
remove :: proc(path: string) -> bool {
path_cstr := strings.clone_to_cstring(path, context.temp_allocator)
return _unix_remove(path_cstr) != -1
return _unix_remove(path_cstr) != -1
}
@private
@@ -549,7 +546,7 @@ _rewinddir :: proc(dirp: Dir) {
_readdir :: proc(dirp: Dir) -> (entry: Dirent, err: Errno, end_of_stream: bool) {
result: ^Dirent
rc := _unix_readdir_r(dirp, &entry, &result)
if rc != 0 {
err = Errno(get_last_error())
return
@@ -589,7 +586,7 @@ _readlink :: proc(path: string) -> (string, Errno) {
absolute_path_from_handle :: proc(fd: Handle) -> (string, Errno) {
buf : [256]byte
res := _unix_fcntl(fd, F_GETPATH, &buf[0])
res := _unix__fcntl(fd, F_GETPATH, &buf[0])
if res != 0 {
return "", Errno(get_last_error())
}

View File

@@ -123,9 +123,6 @@ O_ASYNC :: 0x02000
O_CLOEXEC :: 0x80000
SEEK_SET :: 0
SEEK_CUR :: 1
SEEK_END :: 2
SEEK_DATA :: 3
SEEK_HOLE :: 4
SEEK_MAX :: SEEK_HOLE

View File

@@ -167,9 +167,6 @@ O_ASYNC :: 0x02000
O_CLOEXEC :: 0x80000
SEEK_SET :: 0
SEEK_CUR :: 1
SEEK_END :: 2
SEEK_DATA :: 3
SEEK_HOLE :: 4
SEEK_MAX :: SEEK_HOLE
@@ -418,6 +415,7 @@ foreign libc {
@(link_name="realloc") _unix_realloc :: proc(ptr: rawptr, size: c.size_t) -> rawptr ---
@(link_name="getenv") _unix_getenv :: proc(cstring) -> cstring ---
@(link_name="putenv") _unix_putenv :: proc(cstring) -> c.int ---
@(link_name="realpath") _unix_realpath :: proc(path: cstring, resolved_path: rawptr) -> rawptr ---
@(link_name="exit") _unix_exit :: proc(status: c.int) -> ! ---
@@ -582,6 +580,11 @@ is_dir_path :: proc(path: string, follow_links: bool = true) -> bool {
is_file :: proc {is_file_path, is_file_handle}
is_dir :: proc {is_dir_path, is_dir_handle}
exists :: proc(path: string) -> bool {
cpath := strings.clone_to_cstring(path, context.temp_allocator)
res := _unix_access(cpath, O_RDONLY)
return res == 0
}
// NOTE(bill): Uses startup to initialize it
@@ -767,13 +770,28 @@ heap_free :: proc(ptr: rawptr) {
_unix_free(ptr)
}
getenv :: proc(name: string) -> (string, bool) {
path_str := strings.clone_to_cstring(name, context.temp_allocator)
lookup_env :: proc(key: string, allocator := context.allocator) -> (value: string, found: bool) {
path_str := strings.clone_to_cstring(key, context.temp_allocator)
// NOTE(tetra): Lifetime of 'cstr' is unclear, but _unix_free(cstr) segfaults.
cstr := _unix_getenv(path_str)
if cstr == nil {
return "", false
}
return string(cstr), true
return strings.clone(string(cstr), allocator), true
}
get_env :: proc(key: string, allocator := context.allocator) -> (value: string) {
value, _ = lookup_env(key, allocator)
return
}
set_env :: proc(key, value: string) -> Errno {
s := strings.concatenate({key, "=", value, "\x00"}, context.temp_allocator)
res := _unix_putenv(strings.unsafe_string_to_cstring(s))
if res < 0 {
return Errno(get_last_error())
}
return ERROR_NONE
}
get_current_directory :: proc() -> string {

View File

@@ -125,10 +125,6 @@ O_EXCL :: 0x00800
O_NOCTTY :: 0x08000
O_CLOEXEC :: 0x10000
SEEK_SET :: 0
SEEK_CUR :: 1
SEEK_END :: 2
RTLD_LAZY :: 0x001
RTLD_NOW :: 0x002
RTLD_LOCAL :: 0x000

View File

@@ -119,7 +119,6 @@ lstat :: proc(name: string, allocator := context.allocator) -> (fi: File_Info, e
}
stat :: proc(name: string, allocator := context.allocator) -> (fi: File_Info, err: Errno) {
context.allocator = allocator
s: OS_Stat

View File

@@ -20,7 +20,7 @@ full_path_from_name :: proc(name: string, allocator := context.allocator) -> (pa
return "", Errno(win32.GetLastError())
}
if n <= u32(len(buf)) {
return win32.utf16_to_utf8(buf[:n], allocator), ERROR_NONE
return win32.utf16_to_utf8(buf[:n], allocator) or_else "", ERROR_NONE
}
resize(&buf, len(buf)*2)
}
@@ -136,7 +136,7 @@ cleanpath_from_handle :: proc(fd: Handle) -> (string, Errno) {
if err != 0 {
return "", err
}
return win32.utf16_to_utf8(buf, context.allocator), err
return win32.utf16_to_utf8(buf, context.allocator) or_else "", err
}
@(private)
cleanpath_from_handle_u16 :: proc(fd: Handle) -> ([]u16, Errno) {
@@ -157,7 +157,7 @@ cleanpath_from_handle_u16 :: proc(fd: Handle) -> ([]u16, Errno) {
cleanpath_from_buf :: proc(buf: []u16) -> string {
buf := buf
buf = cleanpath_strip_prefix(buf)
return win32.utf16_to_utf8(buf, context.allocator)
return win32.utf16_to_utf8(buf, context.allocator) or_else ""
}
@(private)

View File

@@ -4,6 +4,8 @@ package filepath
import "core:strings"
SEPARATOR_CHARS :: `/\`
// is_separator checks whether the byte is a valid separator character
is_separator :: proc(c: byte) -> bool {
switch c {
@@ -69,6 +71,16 @@ volume_name_len :: proc(path: string) -> int {
return 0
}
/*
Gets the file name and extension from a path.
i.e:
'path/to/name.tar.gz' -> 'name.tar.gz'
'path/to/name.txt' -> 'name.txt'
'path/to/name' -> 'name'
Returns "." if the path is an empty string.
*/
base :: proc(path: string) -> string {
if path == "" {
return "."
@@ -94,6 +106,118 @@ base :: proc(path: string) -> string {
return path
}
/*
Gets the name of a file from a path.
The stem of a file is such that stem(path) + ext(path) = base(path).
Only the last dot is considered when splitting the file extension.
See `short_stem`.
i.e:
'name.tar.gz' -> 'name.tar'
'name.txt' -> 'name'
Returns an empty string if there is no stem. e.g: '.gitignore'.
Returns an empty string if there's a trailing path separator.
*/
stem :: proc(path: string) -> string {
if len(path) > 0 && is_separator(path[len(path) - 1]) {
// NOTE(tetra): Trailing separator
return ""
}
// NOTE(tetra): Get the basename
path := path
if i := strings.last_index_any(path, SEPARATOR_CHARS); i != -1 {
path = path[i+1:]
}
if i := strings.last_index_byte(path, '.'); i != -1 {
return path[:i]
}
return path
}
/*
Gets the name of a file from a path.
The short stem is such that short_stem(path) + long_ext(path) = base(path).
The first dot is used to split off the file extension, unlike `stem` which uses the last dot.
i.e:
'name.tar.gz' -> 'name'
'name.txt' -> 'name'
Returns an empty string if there is no stem. e.g: '.gitignore'.
Returns an empty string if there's a trailing path separator.
*/
short_stem :: proc(path: string) -> string {
s := stem(path)
if i := strings.index_byte(s, '.'); i != -1 {
return s[:i]
}
return s
}
/*
Gets the file extension from a path, including the dot.
The file extension is such that stem(path) + ext(path) = base(path).
Only the last dot is considered when splitting the file extension.
See `long_ext`.
i.e:
'name.tar.gz' -> '.gz'
'name.txt' -> '.txt'
Returns an empty string if there is no dot.
Returns an empty string if there is a trailing path separator.
*/
ext :: proc(path: string) -> string {
for i := len(path)-1; i >= 0 && !is_separator(path[i]); i -= 1 {
if path[i] == '.' {
return path[i:]
}
}
return ""
}
/*
Gets the file extension from a path, including the dot.
The long file extension is such that short_stem(path) + long_ext(path) = base(path).
The first dot is used to split off the file extension, unlike `ext` which uses the last dot.
i.e:
'name.tar.gz' -> '.tar.gz'
'name.txt' -> '.txt'
Returns an empty string if there is no dot.
Returns an empty string if there is a trailing path separator.
*/
long_ext :: proc(path: string) -> string {
if len(path) > 0 && is_separator(path[len(path) - 1]) {
// NOTE(tetra): Trailing separator
return ""
}
// NOTE(tetra): Get the basename
path := path
if i := strings.last_index_any(path, SEPARATOR_CHARS); i != -1 {
path = path[i+1:]
}
if i := strings.index_byte(path, '.'); i != -1 {
return path[i:]
}
return ""
}
clean :: proc(path: string, allocator := context.allocator) -> string {
context.allocator = allocator
@@ -189,15 +313,6 @@ to_slash :: proc(path: string, allocator := context.allocator) -> (new_path: str
return strings.replace_all(path, SEPARATOR_STRING, "/", allocator)
}
ext :: proc(path: string) -> string {
for i := len(path)-1; i >= 0 && !is_separator(path[i]); i -= 1 {
if path[i] == '.' {
return path[i:]
}
}
return ""
}
Relative_Error :: enum {
None,

View File

@@ -68,7 +68,7 @@ temp_full_path :: proc(name: string) -> (path: string, err: os.Errno) {
return "", os.Errno(win32.GetLastError())
}
if n <= u32(len(buf)) {
return win32.utf16_to_utf8(buf[:n], ta), os.ERROR_NONE
return win32.utf16_to_utf8(buf[:n], ta) or_else "", os.ERROR_NONE
}
resize(&buf, len(buf)*2)
}

View File

@@ -401,6 +401,7 @@ Raw_Cstring :: struct {
Linux,
Essence,
FreeBSD,
OpenBSD,
WASI,
JS,
Freestanding,
@@ -414,6 +415,7 @@ Odin_OS_Type :: type_of(ODIN_OS)
Unknown,
amd64,
i386,
arm32,
arm64,
wasm32,
wasm64,
@@ -565,7 +567,7 @@ __init_context :: proc "contextless" (c: ^Context) {
return
}
// NOTE(bill): Do not initialize these procedures with a call as they are not defined with the "contexless" calling convention
// NOTE(bill): Do not initialize these procedures with a call as they are not defined with the "contextless" calling convention
c.allocator.procedure = default_allocator_proc
c.allocator.data = nil

View File

@@ -5,6 +5,16 @@ import "core:intrinsics"
@builtin
Maybe :: union($T: typeid) #maybe {T}
@builtin
container_of :: #force_inline proc "contextless" (ptr: $P/^$Field_Type, $T: typeid, $field_name: string) -> ^T
where intrinsics.type_has_field(T, field_name),
intrinsics.type_field_type(T, field_name) == Field_Type {
offset :: offset_of_by_string(T, field_name)
return (^T)(uintptr(ptr) - offset) if ptr != nil else nil
}
@thread_local global_default_temp_allocator_data: Default_Temp_Allocator
@builtin
@@ -621,13 +631,15 @@ assert :: proc(condition: bool, message := "", loc := #caller_location) {
// to improve performance to make the CPU not
// execute speculatively, making it about an order of
// magnitude faster
proc(message: string, loc: Source_Code_Location) {
@(cold)
internal :: proc(message: string, loc: Source_Code_Location) {
p := context.assertion_failure_proc
if p == nil {
p = default_assertion_failure_proc
}
p("runtime assertion", message, loc)
}(message, loc)
}
internal(message, loc)
}
}

View File

@@ -4,7 +4,7 @@ when ODIN_NO_CRT && ODIN_OS == .Windows {
foreign import lib "system:NtDll.lib"
@(private="file")
@(default_calling_convention="std")
@(default_calling_convention="stdcall")
foreign lib {
RtlMoveMemory :: proc(dst, src: rawptr, length: int) ---
RtlFillMemory :: proc(dst: rawptr, length: int, fill: i32) ---

View File

@@ -10,6 +10,53 @@ _ :: builtin
_ :: bits
_ :: mem
/*
Turn a pointer and a length into a slice.
*/
from_ptr :: proc "contextless" (ptr: ^$T, count: int) -> []T {
return ([^]T)(ptr)[:count]
}
/*
Turn a pointer and a length into a byte slice.
*/
bytes_from_ptr :: proc "contextless" (ptr: rawptr, byte_count: int) -> []byte {
return ([^]byte)(ptr)[:byte_count]
}
/*
Turn a slice into a byte slice.
See `slice.reinterpret` to go the other way.
*/
to_bytes :: proc "contextless" (s: []$T) -> []byte {
return ([^]byte)(raw_data(s))[:len(s) * size_of(T)]
}
/*
Turn a slice of one type, into a slice of another type.
Only converts the type and length of the slice itself.
The length is rounded down to the nearest whole number of items.
```
large_items := []i64{1, 2, 3, 4}
small_items := slice.reinterpret([]i32, large_items)
assert(len(small_items) == 8)
```
```
small_items := []byte{1, 0, 0, 0, 0, 0, 0, 0,
2, 0, 0, 0}
large_items := slice.reinterpret([]i64, small_items)
assert(len(large_items) == 1) // only enough bytes to make 1 x i64; two would need at least 8 bytes.
```
*/
reinterpret :: proc "contextless" ($T: typeid/[]$U, s: []$V) -> []U {
bytes := to_bytes(s)
n := len(bytes) / size_of(U)
return ([^]U)(raw_data(bytes))[:n]
}
swap :: proc(array: $T/[]$E, a, b: int) {
when size_of(E) > 8 {

View File

@@ -150,7 +150,7 @@ _quick_sort_general :: proc(data: $T/[]$E, a, b, max_depth: int, call: $P, $KIND
a, b, max_depth := a, b, max_depth
if b-a > 12 { // only use shell sort for lengths <= 12
for b-a > 12 { // only use shell sort for lengths <= 12
if max_depth == 0 {
heap_sort(data, a, b, call)
return

View File

@@ -124,11 +124,11 @@ reset_builder :: proc(b: ^Builder) {
used in `fmt.bprint*`
bytes: [8]byte // <-- gets filled
builder := strings.builder_from_slice(bytes[:])
builder := strings.builder_from_bytes(bytes[:])
strings.write_byte(&builder, 'a') -> "a"
strings.write_byte(&builder, 'b') -> "ab"
*/
builder_from_slice :: proc(backing: []byte) -> Builder {
builder_from_bytes :: proc(backing: []byte) -> Builder {
s := transmute(mem.Raw_Slice)backing
d := mem.Raw_Dynamic_Array{
data = s.data,
@@ -140,6 +140,7 @@ builder_from_slice :: proc(backing: []byte) -> Builder {
buf = transmute([dynamic]byte)d,
}
}
builder_from_slice :: builder_from_bytes
// cast the builder byte buffer to a string and return it
to_string :: proc(b: Builder) -> string {

Some files were not shown because too many files have changed in this diff Show More