Merge branch 'master' into slice_scanner

This commit is contained in:
Andrea Piseri
2021-12-28 16:12:15 +01:00
17 changed files with 233 additions and 1412 deletions

View File

@@ -1,216 +0,0 @@
package container
import "core:mem"
import "core:runtime"
Array :: struct($T: typeid) {
data: ^T,
len: int,
cap: int,
allocator: mem.Allocator,
}
ARRAY_DEFAULT_CAPACITY :: 16
/*
array_init :: proc {
array_init_none,
array_init_len,
array_init_len_cap,
}
array_init
array_delete
array_len
array_cap
array_space
array_slice
array_get
array_get_ptr
array_set
array_reserve
array_resize
array_push = array_append :: proc{
array_push_back,
array_push_back_elems,
}
array_push_front
array_pop_back
array_pop_front
array_consume
array_trim
array_clear
array_clone
array_set_capacity
array_grow
*/
array_init_none :: proc(a: ^$A/Array, allocator := context.allocator) {
array_init_len_cap(a, 0, ARRAY_DEFAULT_CAPACITY, allocator)
}
array_init_len :: proc(a: ^$A/Array, len: int, allocator := context.allocator) {
array_init_len_cap(a, len, len, allocator)
}
array_init_len_cap :: proc(a: ^$A/Array($T), len: int, cap: int, allocator := context.allocator) {
a.allocator = allocator
a.data = (^T)(mem.alloc(size_of(T)*cap, align_of(T), a.allocator))
a.len = len
a.cap = cap
}
array_init :: proc{array_init_none, array_init_len, array_init_len_cap}
array_delete :: proc(a: $A/Array) {
mem.free(a.data, a.allocator)
}
array_len :: proc(a: $A/Array) -> int {
return a.len
}
array_cap :: proc(a: $A/Array) -> int {
return a.cap
}
array_space :: proc(a: $A/Array) -> int {
return a.cap - a.len
}
array_slice :: proc(a: $A/Array($T)) -> []T {
s := mem.Raw_Slice{a.data, a.len}
return transmute([]T)s
}
array_cap_slice :: proc(a: $A/Array($T)) -> []T {
s := mem.Raw_Slice{a.data, a.cap}
return transmute([]T)s
}
array_get :: proc(a: $A/Array($T), index: int, loc := #caller_location) -> T {
runtime.bounds_check_error_loc(loc, index, array_len(a))
return (^T)(uintptr(a.data) + size_of(T)*uintptr(index))^
}
array_get_ptr :: proc(a: $A/Array($T), index: int, loc := #caller_location) -> ^T {
runtime.bounds_check_error_loc(loc, index, array_len(a))
return (^T)(uintptr(a.data) + size_of(T)*uintptr(index))
}
array_set :: proc(a: ^$A/Array($T), index: int, item: T, loc := #caller_location) {
runtime.bounds_check_error_loc(loc, index, array_len(a^))
(^T)(uintptr(a.data) + size_of(T)*uintptr(index))^ = item
}
array_reserve :: proc(a: ^$A/Array, capacity: int) {
if capacity > a.len {
array_set_capacity(a, capacity)
}
}
array_resize :: proc(a: ^$A/Array, length: int) {
if length > a.len {
array_set_capacity(a, length)
}
a.len = length
}
array_push_back :: proc(a: ^$A/Array($T), item: T) {
if array_space(a^) == 0 {
array_grow(a)
}
a.len += 1
array_set(a, a.len-1, item)
}
array_push_front :: proc(a: ^$A/Array($T), item: T) {
if array_space(a^) == 0 {
array_grow(a)
}
a.len += 1
data := array_slice(a^)
copy(data[1:], data[:])
data[0] = item
}
array_pop_back :: proc(a: ^$A/Array($T), loc := #caller_location) -> T {
assert(condition=a.len > 0, loc=loc)
item := array_get(a^, a.len-1)
a.len -= 1
return item
}
array_pop_front :: proc(a: ^$A/Array($T), loc := #caller_location) -> T {
assert(condition=a.len > 0, loc=loc)
item := array_get(a^, 0)
s := array_slice(a^)
copy(s[:], s[1:])
a.len -= 1
return item
}
array_consume :: proc(a: ^$A/Array($T), count: int, loc := #caller_location) {
assert(condition=a.len >= count, loc=loc)
a.len -= count
}
array_trim :: proc(a: ^$A/Array($T)) {
array_set_capacity(a, a.len)
}
array_clear :: proc(a: ^$A/Array($T)) {
array_resize(a, 0)
}
array_clone :: proc(a: $A/Array($T), allocator := context.allocator) -> A {
res: A
array_init(&res, array_len(a), array_len(a), allocator)
copy(array_slice(res), array_slice(a))
return res
}
array_push_back_elems :: proc(a: ^$A/Array($T), items: ..T) {
if array_space(a^) < len(items) {
array_grow(a, a.len + len(items))
}
offset := a.len
data := array_cap_slice(a^)
n := copy(data[a.len:], items)
a.len += n
}
array_push :: proc{array_push_back, array_push_back_elems}
array_append :: proc{array_push_back, array_push_back_elems}
array_set_capacity :: proc(a: ^$A/Array($T), new_capacity: int) {
if new_capacity == a.cap {
return
}
if new_capacity < a.len {
array_resize(a, new_capacity)
}
new_data: ^T
if new_capacity > 0 {
if a.allocator.procedure == nil {
a.allocator = context.allocator
}
new_data = (^T)(mem.alloc(size_of(T)*new_capacity, align_of(T), a.allocator))
if new_data != nil {
mem.copy(new_data, a.data, size_of(T)*a.len)
}
}
mem.free(a.data, a.allocator)
a.data = new_data
a.cap = new_capacity
}
array_grow :: proc(a: ^$A/Array, min_capacity: int = 0) {
new_capacity := max(array_len(a^)*2 + 8, min_capacity)
array_set_capacity(a, new_capacity)
}

View File

@@ -0,0 +1,124 @@
package dynamic_bit_array
import "core:intrinsics"
/*
Note that these constants are dependent on the backing being a u64.
*/
@(private="file")
INDEX_SHIFT :: 6
@(private="file")
INDEX_MASK :: 63
Bit_Array :: struct {
bits: [dynamic]u64,
bias: int,
}
/*
In:
- ba: ^Bit_Array - a pointer to the Bit Array
- index: The bit index. Can be an enum member.
Out:
- res: The bit you're interested in.
- ok: Whether the index was valid. Returns `false` if the index is smaller than the bias.
The `ok` return value may be ignored.
*/
get :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator) -> (res: bool, ok: bool) {
idx := int(index) - ba.bias
if ba == nil || int(index) < ba.bias { return false, false }
context.allocator = allocator
leg_index := idx >> INDEX_SHIFT
bit_index := idx & INDEX_MASK
/*
If we `get` a bit that doesn't fit in the Bit Array, it's naturally `false`.
This early-out prevents unnecessary resizing.
*/
if leg_index + 1 > len(ba.bits) { return false, true }
val := u64(1 << uint(bit_index))
res = ba.bits[leg_index] & val == val
return res, true
}
/*
In:
- ba: ^Bit_Array - a pointer to the Bit Array
- index: The bit index. Can be an enum member.
Out:
- ok: Whether or not we managed to set requested bit.
`set` automatically resizes the Bit Array to accommodate the requested index if needed.
*/
set :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator) -> (ok: bool) {
idx := int(index) - ba.bias
if ba == nil || int(index) < ba.bias { return false }
context.allocator = allocator
leg_index := idx >> INDEX_SHIFT
bit_index := idx & INDEX_MASK
resize_if_needed(ba, leg_index) or_return
ba.bits[leg_index] |= 1 << uint(bit_index)
return true
}
/*
A helper function to create a Bit Array with optional bias, in case your smallest index is non-zero (including negative).
*/
create :: proc(max_index: int, min_index := 0, allocator := context.allocator) -> (res: Bit_Array, ok: bool) #optional_ok {
context.allocator = allocator
size_in_bits := max_index - min_index
if size_in_bits < 1 { return {}, false }
legs := size_in_bits >> INDEX_SHIFT
res = Bit_Array{
bias = min_index,
}
return res, resize_if_needed(&res, size_in_bits)
}
/*
Sets all bits to `false`.
*/
clear :: proc(ba: ^Bit_Array) {
if ba == nil { return }
ba.bits = {}
}
/*
Releases the memory used by the Bit Array.
*/
destroy :: proc(ba: ^Bit_Array) {
if ba == nil { return }
delete(ba.bits)
}
/*
Resizes the Bit Array. For internal use.
If you want to reserve the memory for a given-sized Bit Array up front, you can use `create`.
*/
@(private="file")
resize_if_needed :: proc(ba: ^Bit_Array, legs: int, allocator := context.allocator) -> (ok: bool) {
if ba == nil { return false }
context.allocator = allocator
if legs + 1 > len(ba.bits) {
resize(&ba.bits, legs + 1)
}
return len(ba.bits) > legs
}

View File

@@ -0,0 +1,52 @@
package dynamic_bit_array
/*
The Bit Array can be used in several ways:
-- By default you don't need to instantiate a Bit Array:
package test
import "core:fmt"
import "core:container/bit_array"
main :: proc() {
using bit_array
bits: Bit_Array
// returns `true`
fmt.println(set(&bits, 42))
// returns `false`, `false`, because this Bit Array wasn't created to allow negative indices.
was_set, was_retrieved := get(&bits, -1)
fmt.println(was_set, was_retrieved)
}
-- A Bit Array can optionally allow for negative indices, if the mininum value was given during creation:
package test
import "core:fmt"
import "core:container/bit_array"
main :: proc() {
Foo :: enum int {
Negative_Test = -42,
Bar = 420,
Leaves = 69105,
}
using bit_array
bits := create(int(max(Foo)), int(min(Foo)))
defer destroy(&bits)
fmt.printf("Set(Bar): %v\n", set(&bits, Foo.Bar))
fmt.printf("Get(Bar): %v, %v\n", get(&bits, Foo.Bar))
fmt.printf("Set(Negative_Test): %v\n", set(&bits, Foo.Negative_Test))
fmt.printf("Get(Leaves): %v, %v\n", get(&bits, Foo.Leaves))
fmt.printf("Get(Negative_Test): %v, %v\n", get(&bits, Foo.Negative_Test))
fmt.printf("Freed.\n")
}
*/

View File

@@ -1,80 +0,0 @@
package container
import "core:mem"
Bloom_Hash_Proc :: #type proc(data: []byte) -> u32
Bloom_Hash :: struct {
hash_proc: Bloom_Hash_Proc,
next: ^Bloom_Hash,
}
Bloom_Filter :: struct {
allocator: mem.Allocator,
hash: ^Bloom_Hash,
bits: []byte,
}
bloom_filter_init :: proc(b: ^Bloom_Filter, size: int, allocator := context.allocator) {
b.allocator = allocator
b.bits = make([]byte, size, allocator)
}
bloom_filter_destroy :: proc(b: ^Bloom_Filter) {
context.allocator = b.allocator
delete(b.bits)
for b.hash != nil {
hash := b.hash
b.hash = b.hash.next
free(hash)
}
}
bloom_filter_add_hash_proc :: proc(b: ^Bloom_Filter, hash_proc: Bloom_Hash_Proc) {
context.allocator = b.allocator
h := new(Bloom_Hash)
h.hash_proc = hash_proc
head := &b.hash
for head^ != nil {
head = &(head^.next)
}
head^ = h
}
bloom_filter_add :: proc(b: ^Bloom_Filter, item: []byte) {
#no_bounds_check for h := b.hash; h != nil; h = h.next {
hash := h.hash_proc(item)
hash %= u32(len(b.bits) * 8)
b.bits[hash >> 3] |= 1 << (hash & 3)
}
}
bloom_filter_add_string :: proc(b: ^Bloom_Filter, item: string) {
bloom_filter_add(b, transmute([]byte)item)
}
bloom_filter_add_raw :: proc(b: ^Bloom_Filter, data: rawptr, size: int) {
item := mem.slice_ptr((^byte)(data), size)
bloom_filter_add(b, item)
}
bloom_filter_test :: proc(b: ^Bloom_Filter, item: []byte) -> bool {
#no_bounds_check for h := b.hash; h != nil; h = h.next {
hash := h.hash_proc(item)
hash %= u32(len(b.bits) * 8)
if (b.bits[hash >> 3] & (1 << (hash & 3)) == 0) {
return false
}
}
return true
}
bloom_filter_test_string :: proc(b: ^Bloom_Filter, item: string) -> bool {
return bloom_filter_test(b, transmute([]byte)item)
}
bloom_filter_test_raw :: proc(b: ^Bloom_Filter, data: rawptr, size: int) -> bool {
item := mem.slice_ptr((^byte)(data), size)
return bloom_filter_test(b, item)
}

View File

@@ -1,377 +0,0 @@
package container
import "core:intrinsics"
_ :: intrinsics
Map :: struct($Key, $Value: typeid) where intrinsics.type_is_valid_map_key(Key) {
hash: Array(int),
entries: Array(Map_Entry(Key, Value)),
}
Map_Entry :: struct($Key, $Value: typeid) where intrinsics.type_is_valid_map_key(Key) {
hash: uintptr,
next: int,
key: Key,
value: Value,
}
/*
map_init :: proc{
map_init_none,
map_init_cap,
}
map_delete
map_has
map_get
map_get_default
map_get_ptr
map_set
map_remove
map_reserve
map_clear
// Multi Map
multi_map_find_first
multi_map_find_next
multi_map_count
multi_map_get :: proc{
multi_map_get_array,
multi_map_get_slice,
};
multi_map_get_as_slice
multi_map_insert
multi_map_remove
multi_map_remove_all
*/
map_init :: proc{map_init_none, map_init_cap}
map_init_none :: proc(m: ^$M/Map($Key, $Value), allocator := context.allocator) {
m.hash.allocator = allocator
m.entries.allocator = allocator
}
map_init_cap :: proc(m: ^$M/Map($Key, $Value), cap: int, allocator := context.allocator) {
m.hash.allocator = allocator
m.entries.allocator = allocator
map_reserve(m, cap)
}
map_delete :: proc(m: $M/Map($Key, $Value)) {
array_delete(m.hash)
array_delete(m.entries)
}
map_has :: proc(m: $M/Map($Key, $Value), key: Key) -> bool {
return _map_find_or_fail(m, key) >= 0
}
map_get :: proc(m: $M/Map($Key, $Value), key: Key) -> (res: Value, ok: bool) #optional_ok {
i := _map_find_or_fail(m, key)
if i < 0 {
return {}, false
}
return array_get(m.entries, i).value, true
}
map_get_default :: proc(m: $M/Map($Key, $Value), key: Key, default: Value) -> (res: Value, ok: bool) #optional_ok {
i := _map_find_or_fail(m, key)
if i < 0 {
return default, false
}
return array_get(m.entries, i).value, true
}
map_get_ptr :: proc(m: $M/Map($Key, $Value), key: Key) -> ^Value {
i := _map_find_or_fail(m, key)
if i < 0 {
return nil
}
return array_get_ptr(m.entries, i).value
}
map_set :: proc(m: ^$M/Map($Key, $Value), key: Key, value: Value) {
if array_len(m.hash) == 0 {
_map_grow(m)
}
i := _map_find_or_make(m, key)
array_get_ptr(m.entries, i).value = value
if _map_full(m^) {
_map_grow(m)
}
}
map_remove :: proc(m: ^$M/Map($Key, $Value), key: Key) {
fr := _map_find_key(m^, key)
if fr.entry_index >= 0 {
_map_erase(m, fr)
}
}
map_reserve :: proc(m: ^$M/Map($Key, $Value), new_size: int) {
nm: M
map_init(&nm, m.hash.allocator)
array_resize(&nm.hash, new_size)
array_reserve(&nm.entries, array_len(m.entries))
for i in 0..<new_size {
array_set(&nm.hash, i, -1)
}
for i in 0..<array_len(m.entries) {
e := array_get(m.entries, i)
multi_map_insert(&nm, e.key, e.value)
}
map_delete(m^)
m^ = nm
}
map_clear :: proc(m: ^$M/Map($Key, $Value)) {
array_clear(&m.hash)
array_clear(&m.entries)
}
multi_map_find_first :: proc(m: $M/Map($Key, $Value), key: Key) -> ^Map_Entry(Key, Value) {
i := _map_find_or_fail(m, key)
if i < 0 {
return nil
}
return array_get_ptr(m.entries, i)
}
multi_map_find_next :: proc(m: $M/Map($Key, $Value), e: ^Map_Entry(Key, Value)) -> ^Map_Entry(Key, Value) {
i := e.next
for i >= 0 {
it := array_get_ptr(m.entries, i)
if it.hash == e.hash && it.key == e.key {
return it
}
i = it.next
}
return nil
}
multi_map_count :: proc(m: $M/Map($Key, $Value), key: Key) -> int {
n := 0
e := multi_map_find_first(m, key)
for e != nil {
n += 1
e = multi_map_find_next(m, e)
}
return n
}
multi_map_get :: proc{multi_map_get_array, multi_map_get_slice}
multi_map_get_array :: proc(m: $M/Map($Key, $Value), key: Key, items: ^Array(Value)) {
if items == nil {
return
}
e := multi_map_find_first(m, key)
for e != nil {
array_append(items, e.value)
e = multi_map_find_next(m, e)
}
}
multi_map_get_slice :: proc(m: $M/Map($Key, $Value), key: Key, items: []Value) {
e := multi_map_find_first(m, key)
i := 0
for e != nil && i < len(items) {
items[i] = e.value
i += 1
e = multi_map_find_next(m, e)
}
}
multi_map_get_as_slice :: proc(m: $M/Map($Key, $Value), key: Key) -> []Value {
items: Array(Value)
array_init(&items, 0)
e := multi_map_find_first(m, key)
for e != nil {
array_append(&items, e.value)
e = multi_map_find_next(m, e)
}
return array_slice(items)
}
multi_map_insert :: proc(m: ^$M/Map($Key, $Value), key: Key, value: Value) {
if array_len(m.hash) == 0 {
_map_grow(m)
}
i := _map_make(m, key)
array_get_ptr(m.entries, i).value = value
if _map_full(m^) {
_map_grow(m)
}
}
multi_map_remove :: proc(m: ^$M/Map($Key, $Value), e: ^Map_Entry(Key, Value)) {
fr := _map_find_entry(m, e)
if fr.entry_index >= 0 {
_map_erase(m, fr)
}
}
multi_map_remove_all :: proc(m: ^$M/Map($Key, $Value), key: Key) {
for map_exist(m^, key) {
map_remove(m, key)
}
}
/// Internal
Map_Find_Result :: struct {
hash_index: int,
entry_prev: int,
entry_index: int,
}
_map_add_entry :: proc(m: ^$M/Map($Key, $Value), key: Key) -> int where intrinsics.type_is_valid_map_key(Key) {
hasher := intrinsics.type_hasher_proc(Key)
e: Map_Entry(Key, Value)
e.key = key
e.hash = hasher(&e.key, 0)
e.next = -1
idx := array_len(m.entries)
array_push(&m.entries, e)
return idx
}
_map_erase :: proc(m: ^$M/Map, fr: Map_Find_Result) {
if fr.entry_prev < 0 {
array_set(&m.hash, fr.hash_index, array_get(m.entries, fr.entry_index).next)
} else {
array_get_ptr(m.entries, fr.entry_prev).next = array_get(m.entries, fr.entry_index).next
}
if fr.entry_index == array_len(m.entries)-1 {
array_pop_back(&m.entries)
return
}
array_set(&m.entries, fr.entry_index, array_get(m.entries, array_len(m.entries)-1))
last := _map_find_key(m^, array_get(m.entries, fr.entry_index).key)
if last.entry_prev < 0 {
array_get_ptr(m.entries, last.entry_prev).next = fr.entry_index
} else {
array_set(&m.hash, last.hash_index, fr.entry_index)
}
}
_map_find_key :: proc(m: $M/Map($Key, $Value), key: Key) -> Map_Find_Result where intrinsics.type_is_valid_map_key(Key) {
fr: Map_Find_Result
fr.hash_index = -1
fr.entry_prev = -1
fr.entry_index = -1
if array_len(m.hash) == 0 {
return fr
}
hasher := intrinsics.type_hasher_proc(Key)
key := key
hash := hasher(&key, 0)
fr.hash_index = int(hash % uintptr(array_len(m.hash)))
fr.entry_index = array_get(m.hash, fr.hash_index)
for fr.entry_index >= 0 {
it := array_get_ptr(m.entries, fr.entry_index)
if it.hash == hash && it.key == key {
return fr
}
fr.entry_prev = fr.entry_index
fr.entry_index = it.next
}
return fr
}
_map_find_entry :: proc(m: ^$M/Map($Key, $Value), e: ^Map_Entry(Key, Value)) -> Map_Find_Result {
fr: Map_Find_Result
fr.hash_index = -1
fr.entry_prev = -1
fr.entry_index = -1
if array_len(m.hash) == 0 {
return fr
}
fr.hash_index = int(e.hash % uintptr(array_len(m.hash)))
fr.entry_index = array_get(m.hash, fr.hash_index)
for fr.entry_index >= 0 {
it := array_get_ptr(m.entries, fr.entry_index)
if it == e {
return fr
}
fr.entry_prev = fr.entry_index
fr.entry_index = it.next
}
return fr
}
_map_find_or_fail :: proc(m: $M/Map($Key, $Value), key: Key) -> int {
return _map_find_key(m, key).entry_index
}
_map_find_or_make :: proc(m: ^$M/Map($Key, $Value), key: Key) -> int {
fr := _map_find_key(m^, key)
if fr.entry_index >= 0 {
return fr.entry_index
}
i := _map_add_entry(m, key)
if fr.entry_prev < 0 {
array_set(&m.hash, fr.hash_index, i)
} else {
array_get_ptr(m.entries, fr.entry_prev).next = i
}
return i
}
_map_make :: proc(m: ^$M/Map($Key, $Value), key: Key) -> int {
fr := _map_find_key(m^, key)
i := _map_add_entry(m, key)
if fr.entry_prev < 0 {
array_set(&m.hash, fr.hash_index, i)
} else {
array_get_ptr(m.entries, fr.entry_prev).next = i
}
array_get_ptr(m.entries, i).next = fr.entry_index
return i
}
_map_full :: proc(m: $M/Map($Key, $Value)) -> bool {
// TODO(bill): Determine good max load factor
return array_len(m.entries) >= (array_len(m.hash) / 4)*3
}
_map_grow :: proc(m: ^$M/Map($Key, $Value)) {
new_size := array_len(m.entries) * 4 + 7 // TODO(bill): Determine good grow rate
map_reserve(m, new_size)
}

View File

@@ -1,121 +0,0 @@
package container
Priority_Queue :: struct($T: typeid) {
data: Array(T),
len: int,
priority: proc(item: T) -> int,
}
priority_queue_init_none :: proc(q: ^$Q/Priority_Queue($T), f: proc(item: T) -> int, allocator := context.allocator) {
queue_init_len(q, f, 0, allocator)
}
priority_queue_init_len :: proc(q: ^$Q/Priority_Queue($T), f: proc(item: T) -> int, len: int, allocator := context.allocator) {
queue_init_len_cap(q, f, 0, 16, allocator)
}
priority_queue_init_len_cap :: proc(q: ^$Q/Priority_Queue($T), f: proc(item: T) -> int, len: int, cap: int, allocator := context.allocator) {
array_init(&q.data, len, cap, allocator)
q.len = len
q.priority = f
}
priority_queue_init :: proc{priority_queue_init_none, priority_queue_init_len, priority_queue_init_len_cap}
priority_queue_delete :: proc(q: $Q/Priority_Queue($T)) {
array_delete(q.data)
}
priority_queue_clear :: proc(q: ^$Q/Priority_Queue($T)) {
q.len = 0
}
priority_queue_len :: proc(q: $Q/Priority_Queue($T)) -> int {
return q.len
}
priority_queue_cap :: proc(q: $Q/Priority_Queue($T)) -> int {
return array_cap(q.data)
}
priority_queue_space :: proc(q: $Q/Priority_Queue($T)) -> int {
return array_len(q.data) - q.len
}
priority_queue_reserve :: proc(q: ^$Q/Priority_Queue($T), capacity: int) {
if capacity > q.len {
array_resize(&q.data, new_capacity)
}
}
priority_queue_resize :: proc(q: ^$Q/Priority_Queue($T), length: int) {
if length > q.len {
array_resize(&q.data, new_capacity)
}
q.len = length
}
_priority_queue_grow :: proc(q: ^$Q/Priority_Queue($T), min_capacity: int = 0) {
new_capacity := max(array_len(q.data)*2 + 8, min_capacity)
array_resize(&q.data, new_capacity)
}
priority_queue_push :: proc(q: ^$Q/Priority_Queue($T), item: T) {
if array_len(q.data) - q.len == 0 {
_priority_queue_grow(q)
}
s := array_slice(q.data)
s[q.len] = item
i := q.len
for i > 0 {
p := (i - 1) / 2
if q.priority(s[p]) <= q.priority(item) {
break
}
s[i] = s[p]
i = p
}
q.len += 1
if q.len > 0 {
s[i] = item
}
}
priority_queue_pop :: proc(q: ^$Q/Priority_Queue($T)) -> T {
assert(q.len > 0)
s := array_slice(q.data)
min := s[0]
root := s[q.len-1]
q.len -= 1
i := 0
for i * 2 + 1 < q.len {
a := i * 2 + 1
b := i * 2 + 2
c := b < q.len && q.priority(s[b]) < q.priority(s[a]) ? b : a
if q.priority(s[c]) >= q.priority(root) {
break
}
s[i] = s[c]
i = c
}
if q.len > 0 {
s[i] = root
}
return min
}
priority_queue_peek :: proc(q: ^$Q/Priority_Queue($T)) -> T {
assert(q.len > 0)
s := array_slice(q.data)
return s[0]
}

View File

@@ -1,175 +0,0 @@
package container
Queue :: struct($T: typeid) {
data: Array(T),
len: int,
offset: int,
}
/*
queue_init :: proc{
queue_init_none,
queue_init_len,
queue_init_len_cap,
}
queue_delete
queue_clear
queue_len
queue_cap
queue_space
queue_get
queue_set
queue_reserve
queue_resize
queue_push :: proc{
queue_push_back,
queue_push_elems,
};
queue_push_front
queue_pop_front
queue_pop_back
queue_consume
*/
queue_init_none :: proc(q: ^$Q/Queue($T), allocator := context.allocator) {
queue_init_len(q, 0, allocator)
}
queue_init_len :: proc(q: ^$Q/Queue($T), len: int, allocator := context.allocator) {
queue_init_len_cap(q, 0, 16, allocator)
}
queue_init_len_cap :: proc(q: ^$Q/Queue($T), len: int, cap: int, allocator := context.allocator) {
array_init(&q.data, len, cap, allocator)
q.len = len
q.offset = 0
}
queue_init :: proc{queue_init_none, queue_init_len, queue_init_len_cap}
queue_delete :: proc(q: $Q/Queue($T)) {
array_delete(q.data)
}
queue_clear :: proc(q: ^$Q/Queue($T)) {
q.len = 0
}
queue_len :: proc(q: $Q/Queue($T)) -> int {
return q.len
}
queue_cap :: proc(q: $Q/Queue($T)) -> int {
return array_cap(q.data)
}
queue_space :: proc(q: $Q/Queue($T)) -> int {
return array_len(q.data) - q.len
}
queue_get :: proc(q: $Q/Queue($T), index: int) -> T {
i := (index + q.offset) % array_len(q.data)
data := array_slice(q.data)
return data[i]
}
queue_set :: proc(q: ^$Q/Queue($T), index: int, item: T) {
i := (index + q.offset) % array_len(q.data)
data := array_slice(q.data)
data[i] = item
}
queue_reserve :: proc(q: ^$Q/Queue($T), capacity: int) {
if capacity > q.len {
_queue_increase_capacity(q, capacity)
}
}
queue_resize :: proc(q: ^$Q/Queue($T), length: int) {
if length > q.len {
_queue_increase_capacity(q, length)
}
q.len = length
}
queue_push_back :: proc(q: ^$Q/Queue($T), item: T) {
if queue_space(q^) == 0 {
_queue_grow(q)
}
queue_set(q, q.len, item)
q.len += 1
}
queue_push_front :: proc(q: ^$Q/Queue($T), item: T) {
if queue_space(q^) == 0 {
_queue_grow(q)
}
q.offset = (q.offset - 1 + array_len(q.data)) % array_len(q.data)
q.len += 1
queue_set(q, 0, item)
}
queue_pop_front :: proc(q: ^$Q/Queue($T)) -> T {
assert(q.len > 0)
item := queue_get(q^, 0)
q.offset = (q.offset + 1) % array_len(q.data)
q.len -= 1
if q.len == 0 {
q.offset = 0
}
return item
}
queue_pop_back :: proc(q: ^$Q/Queue($T)) -> T {
assert(q.len > 0)
item := queue_get(q^, q.len-1)
q.len -= 1
return item
}
queue_consume :: proc(q: ^$Q/Queue($T), count: int) {
q.offset = (q.offset + count) & array_len(q.data)
q.len -= count
}
queue_push_elems :: proc(q: ^$Q/Queue($T), items: ..T) {
if queue_space(q^) < len(items) {
_queue_grow(q, q.len + len(items))
}
size := array_len(q.data)
insert := (q.offset + q.len) % size
to_insert := len(items)
if insert + to_insert > size {
to_insert = size - insert
}
the_items := items[:]
data := array_slice(q.data)
q.len += copy(data[insert:][:to_insert], the_items)
the_items = the_items[to_insert:]
q.len += copy(data[:], the_items)
}
queue_push :: proc{queue_push_back, queue_push_elems}
_queue_increase_capacity :: proc(q: ^$Q/Queue($T), new_capacity: int) {
end := array_len(q.data)
array_resize(&q.data, new_capacity)
if q.offset + q.len > end {
end_items := q.len + end
data := array_slice(q.data)
copy(data[new_capacity-end_items:][:end_items], data[q.offset:][:end_items])
q.offset += new_capacity - end
}
}
_queue_grow :: proc(q: ^$Q/Queue($T), min_capacity: int = 0) {
new_capacity := max(array_len(q.data)*2 + 8, min_capacity)
_queue_increase_capacity(q, new_capacity)
}

View File

@@ -1,74 +0,0 @@
package container
Ring :: struct($T: typeid) {
next, prev: ^Ring(T),
value: T,
}
ring_init :: proc(r: ^$R/Ring) -> ^R {
r.prev, r.next = r, r
return r
}
ring_next :: proc(r: ^$R/Ring) -> ^R {
if r.next == nil {
return ring_init(r)
}
return r.next
}
ring_prev :: proc(r: ^$R/Ring) -> ^R {
if r.prev == nil {
return ring_init(r)
}
return r.prev
}
ring_move :: proc(r: ^$R/Ring, n: int) -> ^R {
r := r
if r.next == nil {
return ring_init(r)
}
switch {
case n < 0:
for _ in n..<0 {
r = r.prev
}
case n > 0:
for _ in 0..<n {
r = r.next
}
}
return r
}
ring_link :: proc(r, s: ^$R/Ring) -> ^R {
n := ring_next(r)
if s != nil {
p := ring_prev(s)
r.next = s
s.prev = r
n.prev = p
p.next = n
}
return n
}
ring_unlink :: proc(r: ^$R/Ring, n: int) -> ^R {
if n <= 0 {
return nil
}
return ring_link(r, ring_move(r, n+1))
}
ring_len :: proc(r: ^$R/Ring) -> int {
n := 0
if r != nil {
n = 1
for p := ring_next(r); p != r; p = p.next {
n += 1
}
}
return n
}

View File

@@ -1,240 +0,0 @@
package container
Set :: struct {
hash: Array(int),
entries: Array(Set_Entry),
}
Set_Entry :: struct {
key: u64,
next: int,
}
/*
set_init :: proc{
set_init_none,
set_init_cap,
}
set_delete
set_in
set_not_in
set_add
set_remove
set_reserve
set_clear
*/
set_init :: proc{set_init_none, set_init_cap}
set_init_none :: proc(m: ^Set, allocator := context.allocator) {
m.hash.allocator = allocator
m.entries.allocator = allocator
}
set_init_cap :: proc(m: ^Set, cap: int, allocator := context.allocator) {
m.hash.allocator = allocator
m.entries.allocator = allocator
set_reserve(m, cap)
}
set_delete :: proc(m: Set) {
array_delete(m.hash)
array_delete(m.entries)
}
set_in :: proc(m: Set, key: u64) -> bool {
return _set_find_or_fail(m, key) >= 0
}
set_not_in :: proc(m: Set, key: u64) -> bool {
return _set_find_or_fail(m, key) < 0
}
set_add :: proc(m: ^Set, key: u64) {
if array_len(m.hash) == 0 {
_set_grow(m)
}
_ = _set_find_or_make(m, key)
if _set_full(m^) {
_set_grow(m)
}
}
set_remove :: proc(m: ^Set, key: u64) {
fr := _set_find_key(m^, key)
if fr.entry_index >= 0 {
_set_erase(m, fr)
}
}
set_reserve :: proc(m: ^Set, new_size: int) {
nm: Set
set_init(&nm, m.hash.allocator)
array_resize(&nm.hash, new_size)
array_reserve(&nm.entries, array_len(m.entries))
for i in 0..<new_size {
array_set(&nm.hash, i, -1)
}
for i in 0..<array_len(m.entries) {
e := array_get(m.entries, i)
set_add(&nm, e.key)
}
set_delete(m^)
m^ = nm
}
set_clear :: proc(m: ^Set) {
array_clear(&m.hash)
array_clear(&m.entries)
}
set_equal :: proc(a, b: Set) -> bool {
a_entries := array_slice(a.entries)
b_entries := array_slice(b.entries)
if len(a_entries) != len(b_entries) {
return false
}
for e in a_entries {
if set_not_in(b, e.key) {
return false
}
}
return true
}
/// Internal
_set_add_entry :: proc(m: ^Set, key: u64) -> int {
e: Set_Entry
e.key = key
e.next = -1
idx := array_len(m.entries)
array_push(&m.entries, e)
return idx
}
_set_erase :: proc(m: ^Set, fr: Map_Find_Result) {
if fr.entry_prev < 0 {
array_set(&m.hash, fr.hash_index, array_get(m.entries, fr.entry_index).next)
} else {
array_get_ptr(m.entries, fr.entry_prev).next = array_get(m.entries, fr.entry_index).next
}
if fr.entry_index == array_len(m.entries)-1 {
array_pop_back(&m.entries)
return
}
array_set(&m.entries, fr.entry_index, array_get(m.entries, array_len(m.entries)-1))
last := _set_find_key(m^, array_get(m.entries, fr.entry_index).key)
if last.entry_prev < 0 {
array_get_ptr(m.entries, last.entry_prev).next = fr.entry_index
} else {
array_set(&m.hash, last.hash_index, fr.entry_index)
}
}
_set_find_key :: proc(m: Set, key: u64) -> Map_Find_Result {
fr: Map_Find_Result
fr.hash_index = -1
fr.entry_prev = -1
fr.entry_index = -1
if array_len(m.hash) == 0 {
return fr
}
fr.hash_index = int(key % u64(array_len(m.hash)))
fr.entry_index = array_get(m.hash, fr.hash_index)
for fr.entry_index >= 0 {
it := array_get_ptr(m.entries, fr.entry_index)
if it.key == key {
return fr
}
fr.entry_prev = fr.entry_index
fr.entry_index = it.next
}
return fr
}
_set_find_entry :: proc(m: ^Set, e: ^Set_Entry) -> Map_Find_Result {
fr: Map_Find_Result
fr.hash_index = -1
fr.entry_prev = -1
fr.entry_index = -1
if array_len(m.hash) == 0 {
return fr
}
fr.hash_index = int(e.key % u64(array_len(m.hash)))
fr.entry_index = array_get(m.hash, fr.hash_index)
for fr.entry_index >= 0 {
it := array_get_ptr(m.entries, fr.entry_index)
if it == e {
return fr
}
fr.entry_prev = fr.entry_index
fr.entry_index = it.next
}
return fr
}
_set_find_or_fail :: proc(m: Set, key: u64) -> int {
return _set_find_key(m, key).entry_index
}
_set_find_or_make :: proc(m: ^Set, key: u64) -> int {
fr := _set_find_key(m^, key)
if fr.entry_index >= 0 {
return fr.entry_index
}
i := _set_add_entry(m, key)
if fr.entry_prev < 0 {
array_set(&m.hash, fr.hash_index, i)
} else {
array_get_ptr(m.entries, fr.entry_prev).next = i
}
return i
}
_set_make :: proc(m: ^Set, key: u64) -> int {
fr := _set_find_key(m^, key)
i := _set_add_entry(m, key)
if fr.entry_prev < 0 {
array_set(&m.hash, fr.hash_index, i)
} else {
array_get_ptr(m.entries, fr.entry_prev).next = i
}
array_get_ptr(m.entries, i).next = fr.entry_index
return i
}
_set_full :: proc(m: Set) -> bool {
// TODO(bill): Determine good max load factor
return array_len(m.entries) >= (array_len(m.hash) / 4)*3
}
_set_grow :: proc(m: ^Set) {
new_size := array_len(m.entries) * 4 + 7 // TODO(bill): Determine good grow rate
set_reserve(m, new_size)
}

View File

@@ -1,95 +0,0 @@
package container
Small_Array :: struct($N: int, $T: typeid) where N >= 0 {
data: [N]T,
len: int,
}
small_array_len :: proc(a: $A/Small_Array) -> int {
return a.len
}
small_array_cap :: proc(a: $A/Small_Array) -> int {
return len(a.data)
}
small_array_space :: proc(a: $A/Small_Array) -> int {
return len(a.data) - a.len
}
small_array_slice :: proc(a: ^$A/Small_Array($N, $T)) -> []T {
return a.data[:a.len]
}
small_array_get :: proc(a: $A/Small_Array($N, $T), index: int, loc := #caller_location) -> T {
return a.data[index]
}
small_array_get_ptr :: proc(a: $A/Small_Array($N, $T), index: int, loc := #caller_location) -> ^T {
return &a.data[index]
}
small_array_set :: proc(a: ^$A/Small_Array($N, $T), index: int, item: T, loc := #caller_location) {
a.data[index] = item
}
small_array_resize :: proc(a: ^$A/Small_Array, length: int) {
a.len = min(length, len(a.data))
}
small_array_push_back :: proc(a: ^$A/Small_Array($N, $T), item: T) -> bool {
if a.len < len(a.data) {
a.len += 1
a.data[a.len-1] = item
return true
}
return false
}
small_array_push_front :: proc(a: ^$A/Small_Array($N, $T), item: T) -> bool {
if a.len < len(a.data) {
a.len += 1
data := small_array_slice(a)
copy(data[1:], data[:])
data[0] = item
return true
}
return false
}
small_array_pop_back :: proc(a: ^$A/Small_Array($N, $T), loc := #caller_location) -> T {
assert(condition=a.len > 0, loc=loc)
item := a.data[a.len-1]
a.len -= 1
return item
}
small_array_pop_front :: proc(a: ^$A/Small_Array($N, $T), loc := #caller_location) -> T {
assert(condition=a.len > 0, loc=loc)
item := a.data[0]
s := small_array_slice(a)
copy(s[:], s[1:])
a.len -= 1
return item
}
small_array_consume :: proc(a: ^$A/Small_Array($N, $T), count: int, loc := #caller_location) {
assert(condition=a.len >= count, loc=loc)
a.len -= count
}
small_array_clear :: proc(a: ^$A/Small_Array($N, $T)) {
small_array_resize(a, 0)
}
small_array_push_back_elems :: proc(a: ^$A/Small_Array($N, $T), items: ..T) {
n := copy(a.data[a.len:], items[:])
a.len += n
}
small_array_push :: proc{small_array_push_back, small_array_push_back_elems}
small_array_append :: proc{small_array_push_back, small_array_push_back_elems}

View File

@@ -52,11 +52,11 @@ unmarshal_any :: proc(data: []byte, v: any, spec := DEFAULT_SPECIFICATION, alloc
if p.spec == .MJSON {
#partial switch p.curr_token.kind {
case .Ident, .String:
return unmarsal_object(&p, data, .EOF)
return unmarshal_object(&p, data, .EOF)
}
}
return unmarsal_value(&p, data)
return unmarshal_value(&p, data)
}
@@ -148,7 +148,7 @@ assign_float :: proc(val: any, f: $T) -> bool {
@(private)
unmarsal_string :: proc(p: ^Parser, val: any, str: string, ti: ^reflect.Type_Info) -> bool {
unmarshal_string_token :: proc(p: ^Parser, val: any, str: string, ti: ^reflect.Type_Info) -> bool {
val := val
switch dst in &val {
case string:
@@ -198,7 +198,7 @@ unmarsal_string :: proc(p: ^Parser, val: any, str: string, ti: ^reflect.Type_Inf
@(private)
unmarsal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
unmarshal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
UNSUPPORTED_TYPE := Unsupported_Type_Error{v.id, p.curr_token}
token := p.curr_token
@@ -257,7 +257,7 @@ unmarsal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
case .Ident:
advance_token(p)
if p.spec == .MJSON {
if unmarsal_string(p, any{v.data, ti.id}, token.text, ti) {
if unmarshal_string_token(p, any{v.data, ti.id}, token.text, ti) {
return nil
}
}
@@ -266,7 +266,7 @@ unmarsal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
case .String:
advance_token(p)
str := unquote_string(token, p.spec, p.allocator) or_return
if unmarsal_string(p, any{v.data, ti.id}, str, ti) {
if unmarshal_string_token(p, any{v.data, ti.id}, str, ti) {
return nil
}
delete(str, p.allocator)
@@ -274,10 +274,10 @@ unmarsal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
case .Open_Brace:
return unmarsal_object(p, v, .Close_Brace)
return unmarshal_object(p, v, .Close_Brace)
case .Open_Bracket:
return unmarsal_array(p, v)
return unmarshal_array(p, v)
case:
if p.spec != .JSON {
@@ -312,16 +312,16 @@ unmarsal_value :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
@(private)
unmarsal_expect_token :: proc(p: ^Parser, kind: Token_Kind, loc := #caller_location) -> Token {
unmarshal_expect_token :: proc(p: ^Parser, kind: Token_Kind, loc := #caller_location) -> Token {
prev := p.curr_token
err := expect_token(p, kind)
assert(err == nil, "unmarsal_expect_token")
assert(err == nil, "unmarshal_expect_token")
return prev
}
@(private)
unmarsal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unmarshal_Error) {
unmarshal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unmarshal_Error) {
UNSUPPORTED_TYPE := Unsupported_Type_Error{v.id, p.curr_token}
if end_token == .Close_Brace {
@@ -342,7 +342,7 @@ unmarsal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unma
key, _ := parse_object_key(p, p.allocator)
defer delete(key, p.allocator)
unmarsal_expect_token(p, .Colon)
unmarshal_expect_token(p, .Colon)
fields := reflect.struct_fields_zipped(ti.id)
@@ -378,7 +378,7 @@ unmarsal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unma
field_ptr := rawptr(uintptr(v.data) + offset)
field := any{field_ptr, type.id}
unmarsal_value(p, field) or_return
unmarshal_value(p, field) or_return
if parse_comma(p) {
break struct_loop
@@ -407,11 +407,11 @@ unmarsal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unma
map_loop: for p.curr_token.kind != end_token {
key, _ := parse_object_key(p, p.allocator)
unmarsal_expect_token(p, .Colon)
unmarshal_expect_token(p, .Colon)
mem.zero_slice(elem_backing)
if err := unmarsal_value(p, map_backing_value); err != nil {
if err := unmarshal_value(p, map_backing_value); err != nil {
delete(key, p.allocator)
return err
}
@@ -443,7 +443,7 @@ unmarsal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unma
enumerated_array_loop: for p.curr_token.kind != end_token {
key, _ := parse_object_key(p, p.allocator)
unmarsal_expect_token(p, .Colon)
unmarshal_expect_token(p, .Colon)
defer delete(key, p.allocator)
index := -1
@@ -460,7 +460,7 @@ unmarsal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unma
index_ptr := rawptr(uintptr(v.data) + uintptr(index*t.elem_size))
index_any := any{index_ptr, t.elem.id}
unmarsal_value(p, index_any) or_return
unmarshal_value(p, index_any) or_return
if parse_comma(p) {
break enumerated_array_loop
@@ -480,10 +480,10 @@ unmarsal_object :: proc(p: ^Parser, v: any, end_token: Token_Kind) -> (err: Unma
@(private)
unmarsal_count_array :: proc(p: ^Parser) -> (length: uintptr) {
unmarshal_count_array :: proc(p: ^Parser) -> (length: uintptr) {
p_backup := p^
p.allocator = mem.nil_allocator()
unmarsal_expect_token(p, .Open_Bracket)
unmarshal_expect_token(p, .Open_Bracket)
array_length_loop: for p.curr_token.kind != .Close_Bracket {
_, _ = parse_value(p)
length += 1
@@ -497,9 +497,9 @@ unmarsal_count_array :: proc(p: ^Parser) -> (length: uintptr) {
}
@(private)
unmarsal_array :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
unmarshal_array :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
assign_array :: proc(p: ^Parser, base: rawptr, elem: ^reflect.Type_Info, length: uintptr) -> Unmarshal_Error {
unmarsal_expect_token(p, .Open_Bracket)
unmarshal_expect_token(p, .Open_Bracket)
for idx: uintptr = 0; p.curr_token.kind != .Close_Bracket; idx += 1 {
assert(idx < length)
@@ -507,14 +507,14 @@ unmarsal_array :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
elem_ptr := rawptr(uintptr(base) + idx*uintptr(elem.size))
elem := any{elem_ptr, elem.id}
unmarsal_value(p, elem) or_return
unmarshal_value(p, elem) or_return
if parse_comma(p) {
break
}
}
unmarsal_expect_token(p, .Close_Bracket)
unmarshal_expect_token(p, .Close_Bracket)
return nil
@@ -524,7 +524,7 @@ unmarsal_array :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
ti := reflect.type_info_base(type_info_of(v.id))
length := unmarsal_count_array(p)
length := unmarshal_count_array(p)
#partial switch t in ti.variant {
case reflect.Type_Info_Slice:
@@ -578,4 +578,4 @@ unmarsal_array :: proc(p: ^Parser, v: any) -> (err: Unmarshal_Error) {
}
return UNSUPPORTED_TYPE
}
}

View File

@@ -711,6 +711,7 @@ Union_Type :: struct {
poly_params: ^Field_List,
align: ^Expr,
is_maybe: bool,
is_no_nil: bool,
where_token: tokenizer.Token,
where_clauses: []^Expr,
variants: []^Expr,

View File

@@ -888,6 +888,7 @@ parse_for_stmt :: proc(p: ^Parser) -> ^ast.Stmt {
error(p, body.pos, "the body of a 'do' must be on the same line as the 'for' token")
}
} else {
allow_token(p, .Semicolon)
body = parse_body(p)
}
@@ -2600,6 +2601,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
poly_params: ^ast.Field_List
align: ^ast.Expr
is_maybe: bool
is_no_nil: bool
if allow_token(p, .Open_Paren) {
param_count: int
@@ -2626,6 +2628,11 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
error(p, tag.pos, "duplicate union tag '#%s'", tag.text)
}
is_maybe = true
case "no_nil":
if is_no_nil {
error(p, tag.pos, "duplicate union tag '#%s'", tag.text)
}
is_no_nil = true
case:
error(p, tag.pos, "invalid union tag '#%s", tag.text)
}
@@ -2669,6 +2676,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
ut.where_token = where_token
ut.where_clauses = where_clauses
ut.is_maybe = is_maybe
ut.is_no_nil = is_no_nil
return ut

View File

@@ -273,7 +273,7 @@ get_ptr :: proc(array: $T/[]$E, index: int) -> (value: ^E, ok: bool) {
return
}
as_ptr :: proc(array: $T/[]$E) -> ^E {
as_ptr :: proc(array: $T/[]$E) -> [^]E {
return raw_data(array)
}

View File

@@ -965,6 +965,10 @@ namespace lbAbiArm64 {
}
return false;
}
unsigned is_homogenous_aggregate_small_enough(LLVMTypeRef *base_type_, unsigned member_count_) {
return (member_count_ <= 4);
}
lbArgType compute_return_type(LLVMContextRef c, LLVMTypeRef type, bool return_is_defined) {
LLVMTypeRef homo_base_type = {};
@@ -975,7 +979,16 @@ namespace lbAbiArm64 {
} else if (is_register(type)) {
return non_struct(c, type);
} else if (is_homogenous_aggregate(c, type, &homo_base_type, &homo_member_count)) {
return lb_arg_type_direct(type, LLVMArrayType(homo_base_type, homo_member_count), nullptr, nullptr);
if(is_homogenous_aggregate_small_enough(&homo_base_type, homo_member_count)) {
return lb_arg_type_direct(type, LLVMArrayType(homo_base_type, homo_member_count), nullptr, nullptr);
} else {
//TODO(Platin): do i need to create stuff that can handle the diffrent return type?
// else this needs a fix in llvm_backend_proc as we would need to cast it to the correct array type
//LLVMTypeRef array_type = LLVMArrayType(homo_base_type, homo_member_count);
LLVMAttributeRef attr = lb_create_enum_attribute_with_type(c, "sret", type);
return lb_arg_type_indirect(type, attr);
}
} else {
i64 size = lb_sizeof(type);
if (size <= 16) {

View File

@@ -439,13 +439,14 @@ i32 linker_stage(lbGenerator *gen) {
// so use ld instead.
// :UseLDForShared
linker = "ld";
link_settings = gb_string_appendc(link_settings, "-init '__$startup_runtime' ");
// Shared libraries are .dylib on MacOS and .so on Linux.
#if defined(GB_SYSTEM_OSX)
output_ext = STR_LIT(".dylib");
link_settings = gb_string_appendc(link_settings, "-init '___$startup_runtime' ");
link_settings = gb_string_appendc(link_settings, "-dylib -dynamic ");
#else
output_ext = STR_LIT(".so");
link_settings = gb_string_appendc(link_settings, "-init '__$startup_runtime' ");
link_settings = gb_string_appendc(link_settings, "-shared ");
#endif
} else {

View File

@@ -672,9 +672,9 @@ class WebGLInterface {
this.ctx.texImage3D(target, level, internalformat, width, height, depth, border, format, type, null);
}
},
TexSubImage3D: (target, level, xoffset, yoffset, width, height, depth, format, type, size, data) => {
TexSubImage3D: (target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, size, data) => {
this.assertWebGL2();
this.ctx.texSubImage3D(target, level, xoffset, yoffset, width, height, depth, format, type, this.mem.loadBytes(data, size));
this.ctx.texSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, this.mem.loadBytes(data, size));
},
CompressedTexImage3D: (target, level, internalformat, width, height, depth, border, imageSize, data) => {
this.assertWebGL2();
@@ -684,12 +684,12 @@ class WebGLInterface {
this.ctx.compressedTexImage3D(target, level, internalformat, width, height, depth, border, null);
}
},
CompressedTexSubImage3D: (target, level, xoffset, yoffset, width, height, depth, format, imageSize, data) => {
CompressedTexSubImage3D: (target, level, xoffset, yoffset, zoffset, width, height, depth, format, imageSize, data) => {
this.assertWebGL2();
if (data) {
this.ctx.compressedTexSubImage3D(target, level, xoffset, yoffset, width, height, depth, format, this.mem.loadBytes(data, imageSize));
this.ctx.compressedTexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, this.mem.loadBytes(data, imageSize));
} else {
this.ctx.compressedTexSubImage3D(target, level, xoffset, yoffset, width, height, depth, format, null);
this.ctx.compressedTexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, null);
}
},
@@ -1031,4 +1031,4 @@ class WebGLInterface {
};
export {WebGLInterface};
export {WebGLInterface};