diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6742b56f3..c62fcd36b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,13 +17,16 @@ jobs: run: ./odin report timeout-minutes: 1 - name: Odin check - run: ./odin check examples/demo/demo.odin -vet + run: ./odin check examples/demo -vet timeout-minutes: 10 - name: Odin run - run: ./odin run examples/demo/demo.odin + run: ./odin run examples/demo timeout-minutes: 10 - name: Odin run -debug - run: ./odin run examples/demo/demo.odin -debug + run: ./odin run examples/demo -debug + timeout-minutes: 10 + - name: Odin check examples/all + run: ./odin check examples/all -strict-style timeout-minutes: 10 - name: Core library tests run: | @@ -54,13 +57,16 @@ jobs: run: ./odin report timeout-minutes: 1 - name: Odin check - run: ./odin check examples/demo/demo.odin -vet + run: ./odin check examples/demo -vet timeout-minutes: 10 - name: Odin run - run: ./odin run examples/demo/demo.odin + run: ./odin run examples/demo timeout-minutes: 10 - name: Odin run -debug - run: ./odin run examples/demo/demo.odin -debug + run: ./odin run examples/demo -debug + timeout-minutes: 10 + - name: Odin check examples/all + run: ./odin check examples/all -strict-style timeout-minutes: 10 - name: Core library tests run: | @@ -73,7 +79,7 @@ jobs: make timeout-minutes: 10 build_windows: - runs-on: windows-latest + runs-on: windows-2019 steps: - uses: actions/checkout@v1 - name: build Odin @@ -91,19 +97,25 @@ jobs: shell: cmd run: | call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat - odin check examples/demo/demo.odin -vet + odin check examples/demo -vet timeout-minutes: 10 - name: Odin run shell: cmd run: | call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat - odin run examples/demo/demo.odin + odin run examples/demo timeout-minutes: 10 - name: Odin run -debug shell: cmd run: | call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat - odin run examples/demo/demo.odin -debug + odin run examples/demo -debug + timeout-minutes: 10 + - name: Odin check examples/all + shell: cmd + run: | + call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat + odin check examples/all -strict-style timeout-minutes: 10 - name: Core library tests shell: cmd diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 2b33c45a8..7175843f5 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -7,7 +7,7 @@ on: jobs: build_windows: - runs-on: windows-latest + runs-on: windows-2019 steps: - uses: actions/checkout@v1 - name: build Odin diff --git a/.gitignore b/.gitignore index 0d606498e..abbdccecd 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,9 @@ # User-specific files (MonoDevelop/Xamarin Studio) *.userprefs +# For macOS +.DS_Store + # Build results [Dd]ebug/ [Dd]ebugPublic/ diff --git a/Makefile b/Makefile index a7aecbb2d..d3d3c6a2d 100644 --- a/Makefile +++ b/Makefile @@ -8,22 +8,34 @@ CC=clang OS=$(shell uname) ifeq ($(OS), Darwin) + ARCH=$(shell uname -m) - LLVM_CONFIG=llvm-config + LLVM_CONFIG= - # LLVM Version Setting - LLVM_VERSION_PATTERN="^11\." - LLVM_VERSION="11" + # allow for arm only llvm's with version 13 ifeq ($(ARCH), arm64) - LLVM_VERSION="13" - LLVM_VERSION_PATTERN="^13" - endif + LLVM_VERSIONS = "13.%.%" + else + # allow for x86 / amd64 all llvm versions begining from 11 + LLVM_VERSIONS = "13.%.%" "12.0.1" "11.1.0" + endif - ifneq ($(shell llvm-config --version | grep $(LLVM_VERSION_PATTERN)),) + LLVM_VERSION_PATTERN_SEPERATOR = )|( + LLVM_VERSION_PATTERNS_ESCAPED_DOT = $(subst .,\.,$(LLVM_VERSIONS)) + LLVM_VERSION_PATTERNS_REPLACE_PERCENT = $(subst %,.*,$(LLVM_VERSION_PATTERNS_ESCAPED_DOT)) + LLVM_VERSION_PATTERN_REMOVE_ELEMENTS = $(subst " ",$(LLVM_VERSION_PATTERN_SEPERATOR),$(LLVM_VERSION_PATTERNS_REPLACE_PERCENT)) + LLMV_VERSION_PATTERN_REMOVE_SINGLE_STR = $(subst ",,$(LLVM_VERSION_PATTERN_REMOVE_ELEMENTS)) + LLVM_VERSION_PATTERN = "^(($(LLMV_VERSION_PATTERN_REMOVE_SINGLE_STR)))" + + ifneq ($(shell llvm-config --version | grep -E $(LLVM_VERSION_PATTERN)),) LLVM_CONFIG=llvm-config else - $(error "Requirement: llvm-config must be version $(LLVM_VERSION)") - endif + ifeq ($(ARCH), arm64) + $(error "Requirement: llvm-config must be base version 13 for arm64") + else + $(error "Requirement: llvm-config must be base version greater than 11 for amd64/x86") + endif + endif LDFLAGS:=$(LDFLAGS) -liconv CFLAGS:=$(CFLAGS) $(shell $(LLVM_CONFIG) --cxxflags --ldflags) diff --git a/core/builtin/builtin.odin b/core/builtin/builtin.odin index 74283720f..259fdef37 100644 --- a/core/builtin/builtin.odin +++ b/core/builtin/builtin.odin @@ -2,7 +2,7 @@ package builtin nil :: nil; -false :: 0!==0; +false :: 0!=0; true :: 0==0; ODIN_OS :: ODIN_OS; diff --git a/core/bytes/bytes.odin b/core/bytes/bytes.odin index 1bf11e0b0..09a3ed259 100644 --- a/core/bytes/bytes.odin +++ b/core/bytes/bytes.odin @@ -218,61 +218,37 @@ split_after_n :: proc(s, sep: []byte, n: int, allocator := context.allocator) -> @private -_split_iterator :: proc(s: ^[]byte, sep: []byte, sep_save, n: int) -> (res: []byte, ok: bool) { - s, n := s, n - - if n == 0 { - return - } - - if sep == nil { +_split_iterator :: proc(s: ^[]byte, sep: []byte, sep_save: int) -> (res: []byte, ok: bool) { + if len(sep) == 0 { res = s[:] ok = true s^ = s[len(s):] return } - if n < 0 { - n = count(s^, sep) + 1 - } - - n -= 1 - - i := 0 - for ; i < n; i += 1 { - m := index(s^, sep) - if m < 0 { - break - } + m := index(s^, sep) + if m < 0 { + // not found + res = s[:] + ok = len(res) != 0 + s^ = s[len(s):] + } else { res = s[:m+sep_save] ok = true s^ = s[m+len(sep):] - return } - res = s[:] - ok = res != nil - s^ = s[len(s):] return } split_iterator :: proc(s: ^[]byte, sep: []byte) -> ([]byte, bool) { - return _split_iterator(s, sep, 0, -1) -} - -split_n_iterator :: proc(s: ^[]byte, sep: []byte, n: int) -> ([]byte, bool) { - return _split_iterator(s, sep, 0, n) + return _split_iterator(s, sep, 0) } split_after_iterator :: proc(s: ^[]byte, sep: []byte) -> ([]byte, bool) { - return _split_iterator(s, sep, len(sep), -1) + return _split_iterator(s, sep, len(sep)) } -split_after_n_iterator :: proc(s: ^[]byte, sep: []byte, n: int) -> ([]byte, bool) { - return _split_iterator(s, sep, len(sep), n) -} - - index_byte :: proc(s: []byte, c: byte) -> int { for i := 0; i < len(s); i += 1 { diff --git a/core/c/libc/stdio.odin b/core/c/libc/stdio.odin index 33be34625..9c4a1a708 100644 --- a/core/c/libc/stdio.odin +++ b/core/c/libc/stdio.odin @@ -149,7 +149,7 @@ foreign libc { putchar :: proc() -> int --- puts :: proc(s: cstring) -> int --- ungetc :: proc(c: int, stream: ^FILE) -> int --- - fread :: proc(ptr: rawptr, size: size_t, stream: ^FILE) -> size_t --- + fread :: proc(ptr: rawptr, size: size_t, nmemb: size_t, stream: ^FILE) -> size_t --- fwrite :: proc(ptr: rawptr, size: size_t, nmemb: size_t, stream: ^FILE) -> size_t --- // 7.21.9 File positioning functions diff --git a/core/compress/gzip/gzip.odin b/core/compress/gzip/gzip.odin index 1a72500bf..96e9c49a0 100644 --- a/core/compress/gzip/gzip.odin +++ b/core/compress/gzip/gzip.odin @@ -66,7 +66,8 @@ OS :: enum u8 { _Unknown = 14, Unknown = 255, } -OS_Name :: #partial [OS]string{ +OS_Name :: #sparse[OS]string{ + ._Unknown = "", .FAT = "FAT", .Amiga = "Amiga", .VMS = "VMS/OpenVMS", diff --git a/core/compress/zlib/zlib.odin b/core/compress/zlib/zlib.odin index 9ae980042..d4c0f332c 100644 --- a/core/compress/zlib/zlib.odin +++ b/core/compress/zlib/zlib.odin @@ -111,9 +111,9 @@ ZFAST_MASK :: ((1 << ZFAST_BITS) - 1) */ Huffman_Table :: struct { fast: [1 << ZFAST_BITS]u16, - firstcode: [16]u16, + firstcode: [17]u16, maxcode: [17]int, - firstsymbol: [16]u16, + firstsymbol: [17]u16, size: [288]u8, value: [288]u16, } @@ -244,7 +244,7 @@ allocate_huffman_table :: proc(allocator := context.allocator) -> (z: ^Huffman_T @(optimization_mode="speed") build_huffman :: proc(z: ^Huffman_Table, code_lengths: []u8) -> (err: Error) { sizes: [HUFFMAN_MAX_BITS+1]int - next_code: [HUFFMAN_MAX_BITS]int + next_code: [HUFFMAN_MAX_BITS+1]int k := int(0) @@ -256,14 +256,14 @@ build_huffman :: proc(z: ^Huffman_Table, code_lengths: []u8) -> (err: Error) { } sizes[0] = 0 - for i in 1..<(HUFFMAN_MAX_BITS+1) { + for i in 1 ..< HUFFMAN_MAX_BITS { if sizes[i] > (1 << uint(i)) { return E_Deflate.Huffman_Bad_Sizes } } code := int(0) - for i in 1.. (it: Bit_Array_Iterator) { + return Bit_Array_Iterator { array = ba } +} + +/* + In: + - it: ^Bit_Array_Iterator - the iterator struct that holds the state. + + Out: + - set: bool - the state of the bit at `index` + - index: int - the next bit of the Bit_Array referenced by `it`. + - ok: bool - `true` if the iterator returned a valid index, + `false` if there were no more bits +*/ +iterate_by_all :: proc (it: ^Bit_Array_Iterator) -> (set: bool, index: int, ok: bool) { + index = it.word_idx * NUM_BITS + int(it.bit_idx) + it.array.bias + if index > it.array.max_index { return false, 0, false } + + word := it.array.bits[it.word_idx] if len(it.array.bits) > it.word_idx else 0 + set = (word >> it.bit_idx & 1) == 1 + + it.bit_idx += 1 + if it.bit_idx >= NUM_BITS { + it.bit_idx = 0 + it.word_idx += 1 + } + + return set, index, true +} + +/* + In: + - it: ^Bit_Array_Iterator - the iterator struct that holds the state. + + Out: + - index: int - the next set bit of the Bit_Array referenced by `it`. + - ok: bool - `true` if the iterator returned a valid index, + `false` if there were no more bits set +*/ +iterate_by_set :: proc (it: ^Bit_Array_Iterator) -> (index: int, ok: bool) { + return iterate_internal_(it, true) +} + +/* + In: + - it: ^Bit_Array_Iterator - the iterator struct that holds the state. + + Out: + - index: int - the next unset bit of the Bit_Array referenced by `it`. + - ok: bool - `true` if the iterator returned a valid index, + `false` if there were no more unset bits +*/ +iterate_by_unset:: proc (it: ^Bit_Array_Iterator) -> (index: int, ok: bool) { + return iterate_internal_(it, false) +} + +@(private="file") +iterate_internal_ :: proc (it: ^Bit_Array_Iterator, $ITERATE_SET_BITS: bool) -> (index: int, ok: bool) { + word := it.array.bits[it.word_idx] if len(it.array.bits) > it.word_idx else 0 + when ! ITERATE_SET_BITS { word = ~word } + + // if the word is empty or we have already gone over all the bits in it, + // b.bit_idx is greater than the index of any set bit in the word, + // meaning that word >> b.bit_idx == 0. + for it.word_idx < len(it.array.bits) && word >> it.bit_idx == 0 { + it.word_idx += 1 + it.bit_idx = 0 + word = it.array.bits[it.word_idx] if len(it.array.bits) > it.word_idx else 0 + when ! ITERATE_SET_BITS { word = ~word } + } + + // if we are iterating the set bits, reaching the end of the array means we have no more bits to check + when ITERATE_SET_BITS { + if it.word_idx >= len(it.array.bits) { + return 0, false + } + } + + // reaching here means that the word has some set bits + it.bit_idx += uint(intrinsics.count_trailing_zeros(word >> it.bit_idx)) + index = it.word_idx * NUM_BITS + int(it.bit_idx) + it.array.bias + + it.bit_idx += 1 + if it.bit_idx >= NUM_BITS { + it.bit_idx = 0 + it.word_idx += 1 + } + return index, index <= it.array.max_index +} + + /* In: - ba: ^Bit_Array - a pointer to the Bit Array @@ -70,6 +178,7 @@ set :: proc(ba: ^Bit_Array, #any_int index: uint, allocator := context.allocator resize_if_needed(ba, leg_index) or_return + ba.max_index = max(idx, ba.max_index) ba.bits[leg_index] |= 1 << uint(bit_index) return true } @@ -87,8 +196,9 @@ create :: proc(max_index: int, min_index := 0, allocator := context.allocator) - res = Bit_Array{ bias = min_index, + max_index = max_index, } - return res, resize_if_needed(&res, size_in_bits) + return res, resize_if_needed(&res, legs) } /* @@ -121,4 +231,4 @@ resize_if_needed :: proc(ba: ^Bit_Array, legs: int, allocator := context.allocat resize(&ba.bits, legs + 1) } return len(ba.bits) > legs -} \ No newline at end of file +} diff --git a/core/container/lru/lru_cache.odin b/core/container/lru/lru_cache.odin new file mode 100644 index 000000000..f8e6f7b46 --- /dev/null +++ b/core/container/lru/lru_cache.odin @@ -0,0 +1,192 @@ +package container_lru + +import "core:runtime" +import "core:intrinsics" +_ :: runtime +_ :: intrinsics + +Node :: struct($Key, $Value: typeid) where intrinsics.type_is_valid_map_key(Key) { + prev, next: ^Node(Key, Value), + key: Key, + value: Value, +} + +// Cache is an LRU cache. It automatically removes entries as new entries are +// added if the capacity is reached. Entries are removed based on how recently +// they were used where the oldest entries are removed first. +Cache :: struct($Key, $Value: typeid) where intrinsics.type_is_valid_map_key(Key) { + head: ^Node(Key, Value), + tail: ^Node(Key, Value), + + entries: map[Key]^Node(Key, Value), + + count: int, + capacity: int, + + node_allocator: runtime.Allocator, + + on_remove: proc(key: Key, value: Value, user_data: rawptr), + on_remove_user_data: rawptr, +} + +// init initializes a Cache +init :: proc(c: ^$C/Cache($Key, $Value), capacity: int, entries_allocator := context.allocator, node_allocator := context.allocator) { + c.entries.allocator = entries_allocator + c.node_allocator = node_allocator + c.capacity = capacity +} + +// destroy deinitializes a Cachem +destroy :: proc(c: ^$C/Cache($Key, $Value), call_on_remove: bool) { + clear(c, call_on_remove) + delete(c.entries) +} + +// clear the contents of a Cache +clear :: proc(c: ^$C/Cache($Key, $Value), call_on_remove: bool) { + for _, node in c.entries { + if call_on_remove { + _call_on_remove(c, node) + } + free(node, c.node_allocator) + } + runtime.clear(&c.entries) + c.head = nil + c.tail = nil + c.count = 0 +} + +// set the given key value pair. This operation updates the recent usage of the item. +set :: proc(c: ^$C/Cache($Key, $Value), key: Key, value: Value) -> runtime.Allocator_Error { + if e, ok := c.entries[key]; ok { + e.value = value + return nil + } + + e := new(Node(Key, Value), c.node_allocator) or_return + e.key = key + e.value = value + + _push_front_node(c, e) + if c.count > c.capacity { + _remove_node(c, c.tail) + } + + c.entries[key] = e + return nil +} + +// get a value from the cache from a given key. This operation updates the usage of the item. +get :: proc(c: ^$C/Cache($Key, $Value), key: Key) -> (value: Value, ok: bool) #optional_ok { + e: ^Node(Key, Value) + e, ok = c.entries[key] + if !ok { + return + } + _pop_node(c, e) + _push_front_node(c, e) + return e.value, true +} + +// get_ptr gets the pointer to a value the cache from a given key. This operation updates the usage of the item. +get_ptr :: proc(c: ^$C/Cache($Key, $Value), key: Key) -> (value: ^Value, ok: bool) #optional_ok { + e: ^Node(Key, Value) + e, ok = c.entries[key] + if !ok { + return + } + _pop_node(c, e) + _push_front_node(c, e) + return &e.value, true +} + +// peek gets the value from the cache from a given key without updating the recent usage. +peek :: proc(c: ^$C/Cache($Key, $Value), key: Key) -> (value: Value, ok: bool) #optional_ok { + e: ^Node(Key, Value) + e, ok = c.entries[key] + if !ok { + return + } + return e.value, true +} + +// exists checks for the existence of a value from a given key without updating the recent usage. +exists :: proc(c: ^$C/Cache($Key, $Value), key: Key) -> bool { + return key in c.entries +} + +// remove removes an item from the cache. +remove :: proc(c: ^$C/Cache($Key, $Value), key: Key) -> bool { + e, ok := c.entries[key] + if !ok { + return false + } + _remove_node(c, e) + return true +} + + +@(private) +_remove_node :: proc(c: ^$C/Cache($Key, $Value), node: ^Node(Key, Value)) { + if c.head == node { + c.head = node.next + } + if c.tail == node { + c.tail = node.prev + } + if node.prev != nil { + node.prev.next = node.next + } + if node.next != nil { + node.next.prev = node.prev + } + node.prev = nil + node.next = nil + + c.count -= 1 + + delete_key(&c.entries, node.key) + + _call_on_remove(c, node) + + free(node, c.node_allocator) + +} + +@(private) +_call_on_remove :: proc(c: ^$C/Cache($Key, $Value), node: ^Node(Key, Value)) { + if c.on_remove != nil { + c.on_remove(node.key, node.value, c.on_remove_user_data) + } +} + +@(private) +_push_front_node :: proc(c: ^$C/Cache($Key, $Value), e: ^Node(Key, Value)) { + if c.head != nil { + e.next = c.head + e.next.prev = e + } + c.head = e + if c.tail == nil { + c.tail = e + } + e.prev = nil + + c.count += 1 +} + +@(private) +_pop_node :: proc(c: ^$C/Cache($Key, $Value), e: ^Node(Key, Value)) { + if e == nil { + return + } + if e.prev != nil { + e.prev.next = e.next + } + + if e.next != nil { + e.next.prev = e.prev + } + e.prev = nil + e.next = nil +} \ No newline at end of file diff --git a/core/container/queue/queue.odin b/core/container/queue/queue.odin index feca6934c..8ca3a85ac 100644 --- a/core/container/queue/queue.odin +++ b/core/container/queue/queue.odin @@ -2,6 +2,7 @@ package container_queue import "core:builtin" import "core:runtime" +_ :: runtime // Dynamically resizable double-ended queue/ring-buffer Queue :: struct($T: typeid) { diff --git a/core/container/topological_sort/topological_sort.odin b/core/container/topological_sort/topological_sort.odin new file mode 100644 index 000000000..4b69930d5 --- /dev/null +++ b/core/container/topological_sort/topological_sort.odin @@ -0,0 +1,98 @@ +// The following is a generic O(V+E) topological sorter implementation. +// This is the fastest known method for topological sorting and Odin's +// map type is being used to accelerate lookups. +package container_topological_sort + +import "core:intrinsics" +import "core:runtime" +_ :: intrinsics +_ :: runtime + + +Relations :: struct($K: typeid) where intrinsics.type_is_valid_map_key(K) { + dependents: map[K]bool, + dependencies: int, +} + +Sorter :: struct(K: typeid) where intrinsics.type_is_valid_map_key(K) { + relations: map[K]Relations(K), + dependents_allocator: runtime.Allocator, +} + +@(private="file") +make_relations :: proc(sorter: ^$S/Sorter($K)) -> (r: Relations(K)) { + r.dependents.allocator = sorter.dependents_allocator + return +} + + +init :: proc(sorter: ^$S/Sorter($K)) { + sorter.relations = make(map[K]Relations(K)) + sorter.dependents_allocator = context.allocator +} + +destroy :: proc(sorter: ^$S/Sorter($K)) { + for _, v in &sorter.relations { + delete(v.dependents) + } + delete(sorter.relations) +} + +add_key :: proc(sorter: ^$S/Sorter($K), key: K) -> bool { + if key in sorter.relations { + return false + } + sorter.relations[key] = make_relations(sorter) + return true +} + +add_dependency :: proc(sorter: ^$S/Sorter($K), key, dependency: K) -> bool { + if key == dependency { + return false + } + + find := &sorter.relations[dependency] + if find == nil { + find = map_insert(&sorter.relations, dependency, make_relations(sorter)) + } + + if find.dependents[key] { + return true + } + find.dependents[key] = true + + find = &sorter.relations[key] + if find == nil { + find = map_insert(&sorter.relations, key, make_relations(sorter)) + } + + find.dependencies += 1 + + return true +} + +sort :: proc(sorter: ^$S/Sorter($K)) -> (sorted, cycled: [dynamic]K) { + relations := &sorter.relations + + for k, v in relations { + if v.dependencies == 0 { + append(&sorted, k) + } + } + + for root in &sorted do for k, _ in relations[root].dependents { + relation := &relations[k] + relation.dependencies -= 1 + if relation.dependencies == 0 { + append(&sorted, k) + } + } + + for k, v in relations { + if v.dependencies != 0 { + append(&cycled, k) + } + } + + return +} \ No newline at end of file diff --git a/core/crypto/blake/blake.odin b/core/crypto/blake/blake.odin index 81924ab1e..5fc0a02b9 100644 --- a/core/crypto/blake/blake.odin +++ b/core/crypto/blake/blake.odin @@ -44,7 +44,7 @@ hash_bytes_224 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the @@ -123,7 +123,7 @@ hash_bytes_256 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -202,7 +202,7 @@ hash_bytes_384 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_384]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash); + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the @@ -281,7 +281,7 @@ hash_bytes_512 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_512]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash); + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the diff --git a/core/crypto/blake2b/blake2b.odin b/core/crypto/blake2b/blake2b.odin index 6d4689b88..e75d74197 100644 --- a/core/crypto/blake2b/blake2b.odin +++ b/core/crypto/blake2b/blake2b.odin @@ -46,7 +46,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/blake2s/blake2s.odin b/core/crypto/blake2s/blake2s.odin index ad2e800fd..831335081 100644 --- a/core/crypto/blake2s/blake2s.odin +++ b/core/crypto/blake2s/blake2s.odin @@ -47,7 +47,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/gost/gost.odin b/core/crypto/gost/gost.odin index eed684f72..1d0274fae 100644 --- a/core/crypto/gost/gost.odin +++ b/core/crypto/gost/gost.odin @@ -41,7 +41,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/groestl/groestl.odin b/core/crypto/groestl/groestl.odin index 5434e31e0..8e5a2440d 100644 --- a/core/crypto/groestl/groestl.odin +++ b/core/crypto/groestl/groestl.odin @@ -44,7 +44,7 @@ hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the @@ -123,7 +123,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -202,7 +202,7 @@ hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash); + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the @@ -281,7 +281,7 @@ hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash); + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the diff --git a/core/crypto/haval/haval.odin b/core/crypto/haval/haval.odin index 442a348e9..811ecf95d 100644 --- a/core/crypto/haval/haval.odin +++ b/core/crypto/haval/haval.odin @@ -50,7 +50,7 @@ hash_bytes_128_3 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128_3(transmute([]byte)(data), hash); + hash_bytes_to_buffer_128_3(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128_3 will hash the given input and write the @@ -135,7 +135,7 @@ hash_bytes_128_4 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128_4(transmute([]byte)(data), hash); + hash_bytes_to_buffer_128_4(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128_4 will hash the given input and write the @@ -220,7 +220,7 @@ hash_bytes_128_5 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128_5(transmute([]byte)(data), hash); + hash_bytes_to_buffer_128_5(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128_5 will hash the given input and write the @@ -305,7 +305,7 @@ hash_bytes_160_3 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_160_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160_3(transmute([]byte)(data), hash); + hash_bytes_to_buffer_160_3(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_160_3 will hash the given input and write the @@ -390,7 +390,7 @@ hash_bytes_160_4 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_160_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160_4(transmute([]byte)(data), hash); + hash_bytes_to_buffer_160_4(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_160_4 will hash the given input and write the @@ -475,7 +475,7 @@ hash_bytes_160_5 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_160_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160_5(transmute([]byte)(data), hash); + hash_bytes_to_buffer_160_5(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_160_5 will hash the given input and write the @@ -560,7 +560,7 @@ hash_bytes_192_3 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_192_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192_3(transmute([]byte)(data), hash); + hash_bytes_to_buffer_192_3(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_192_3 will hash the given input and write the @@ -645,7 +645,7 @@ hash_bytes_192_4 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_192_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192_4(transmute([]byte)(data), hash); + hash_bytes_to_buffer_192_4(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_192_4 will hash the given input and write the @@ -730,7 +730,7 @@ hash_bytes_192_5 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_192_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192_5(transmute([]byte)(data), hash); + hash_bytes_to_buffer_192_5(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_192_5 will hash the given input and write the @@ -815,7 +815,7 @@ hash_bytes_224_3 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224_3(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224_3(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224_3 will hash the given input and write the @@ -900,7 +900,7 @@ hash_bytes_224_4 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224_4(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224_4(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224_4 will hash the given input and write the @@ -985,7 +985,7 @@ hash_bytes_224_5 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224_5(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224_5(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224_5 will hash the given input and write the @@ -1070,7 +1070,7 @@ hash_bytes_256_3 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256_3 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256_3(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256_3(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256_3 will hash the given input and write the @@ -1155,7 +1155,7 @@ hash_bytes_256_4 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256_4 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256_4(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256_4(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256_4 will hash the given input and write the @@ -1240,7 +1240,7 @@ hash_bytes_256_5 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256_5 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256_5(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256_5(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256_5 will hash the given input and write the diff --git a/core/crypto/jh/jh.odin b/core/crypto/jh/jh.odin index 4ebc0e5cb..42c2d1d34 100644 --- a/core/crypto/jh/jh.odin +++ b/core/crypto/jh/jh.odin @@ -44,7 +44,7 @@ hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the @@ -123,7 +123,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -202,7 +202,7 @@ hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash); + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the @@ -281,7 +281,7 @@ hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash); + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the diff --git a/core/crypto/keccak/keccak.odin b/core/crypto/keccak/keccak.odin index f5d4826b1..aeb5aac52 100644 --- a/core/crypto/keccak/keccak.odin +++ b/core/crypto/keccak/keccak.odin @@ -49,7 +49,7 @@ hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the @@ -131,7 +131,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -213,7 +213,7 @@ hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash); + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the @@ -295,7 +295,7 @@ hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash); + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the diff --git a/core/crypto/md2/md2.odin b/core/crypto/md2/md2.odin index 102c1b8b4..711e6e9f6 100644 --- a/core/crypto/md2/md2.odin +++ b/core/crypto/md2/md2.odin @@ -40,7 +40,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/md4/md4.odin b/core/crypto/md4/md4.odin index d944daa1d..b2651225b 100644 --- a/core/crypto/md4/md4.odin +++ b/core/crypto/md4/md4.odin @@ -44,7 +44,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/md5/md5.odin b/core/crypto/md5/md5.odin index 9129e6384..30a556102 100644 --- a/core/crypto/md5/md5.odin +++ b/core/crypto/md5/md5.odin @@ -43,7 +43,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/ripemd/ripemd.odin b/core/crypto/ripemd/ripemd.odin index c475c4803..702d29037 100644 --- a/core/crypto/ripemd/ripemd.odin +++ b/core/crypto/ripemd/ripemd.odin @@ -45,7 +45,7 @@ hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash); + hash_bytes_to_buffer_128(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128 will hash the given input and write the @@ -121,7 +121,7 @@ hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash); + hash_bytes_to_buffer_160(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_160 will hash the given input and write the @@ -197,7 +197,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -273,7 +273,7 @@ hash_bytes_320 :: proc(data: []byte) -> [DIGEST_SIZE_320]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_320 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_320(transmute([]byte)(data), hash); + hash_bytes_to_buffer_320(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_320 will hash the given input and write the diff --git a/core/crypto/sha1/sha1.odin b/core/crypto/sha1/sha1.odin index e8df3c7f6..b0dbd7dc8 100644 --- a/core/crypto/sha1/sha1.odin +++ b/core/crypto/sha1/sha1.odin @@ -43,7 +43,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/sha2/sha2.odin b/core/crypto/sha2/sha2.odin index 2178b70b5..7c7b2da81 100644 --- a/core/crypto/sha2/sha2.odin +++ b/core/crypto/sha2/sha2.odin @@ -48,7 +48,7 @@ hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the @@ -127,7 +127,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -206,7 +206,7 @@ hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash); + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the @@ -285,7 +285,7 @@ hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash); + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the diff --git a/core/crypto/sha3/sha3.odin b/core/crypto/sha3/sha3.odin index 2eceeaff6..1202f8b23 100644 --- a/core/crypto/sha3/sha3.odin +++ b/core/crypto/sha3/sha3.odin @@ -47,7 +47,7 @@ hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_224(transmute([]byte)(data), hash); + hash_bytes_to_buffer_224(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_224 will hash the given input and write the @@ -126,7 +126,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -205,7 +205,7 @@ hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_384(transmute([]byte)(data), hash); + hash_bytes_to_buffer_384(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_384 will hash the given input and write the @@ -284,7 +284,7 @@ hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash); + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the diff --git a/core/crypto/shake/shake.odin b/core/crypto/shake/shake.odin index 9fdc3ebf1..525dcfbd3 100644 --- a/core/crypto/shake/shake.odin +++ b/core/crypto/shake/shake.odin @@ -46,7 +46,7 @@ hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash); + hash_bytes_to_buffer_128(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128 will hash the given input and write the @@ -128,7 +128,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the diff --git a/core/crypto/sm3/sm3.odin b/core/crypto/sm3/sm3.odin index e72973e33..74c9f22e2 100644 --- a/core/crypto/sm3/sm3.odin +++ b/core/crypto/sm3/sm3.odin @@ -42,7 +42,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/crypto/streebog/streebog.odin b/core/crypto/streebog/streebog.odin index deb71120d..f85977cba 100644 --- a/core/crypto/streebog/streebog.odin +++ b/core/crypto/streebog/streebog.odin @@ -44,7 +44,7 @@ hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_256(transmute([]byte)(data), hash); + hash_bytes_to_buffer_256(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_256 will hash the given input and write the @@ -122,7 +122,7 @@ hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_512(transmute([]byte)(data), hash); + hash_bytes_to_buffer_512(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_512 will hash the given input and write the diff --git a/core/crypto/tiger/tiger.odin b/core/crypto/tiger/tiger.odin index 4ea80c66c..cf6159fad 100644 --- a/core/crypto/tiger/tiger.odin +++ b/core/crypto/tiger/tiger.odin @@ -45,7 +45,7 @@ hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash); + hash_bytes_to_buffer_128(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128 will hash the given input and write the @@ -124,7 +124,7 @@ hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash); + hash_bytes_to_buffer_160(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_160 will hash the given input and write the @@ -203,7 +203,7 @@ hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192(transmute([]byte)(data), hash); + hash_bytes_to_buffer_192(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_192 will hash the given input and write the diff --git a/core/crypto/tiger2/tiger2.odin b/core/crypto/tiger2/tiger2.odin index 84333f344..e8f2c4edb 100644 --- a/core/crypto/tiger2/tiger2.odin +++ b/core/crypto/tiger2/tiger2.odin @@ -45,7 +45,7 @@ hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_128(transmute([]byte)(data), hash); + hash_bytes_to_buffer_128(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_128 will hash the given input and write the @@ -124,7 +124,7 @@ hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_160(transmute([]byte)(data), hash); + hash_bytes_to_buffer_160(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_160 will hash the given input and write the @@ -203,7 +203,7 @@ hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer_192(transmute([]byte)(data), hash); + hash_bytes_to_buffer_192(transmute([]byte)(data), hash) } // hash_bytes_to_buffer_192 will hash the given input and write the diff --git a/core/crypto/whirlpool/whirlpool.odin b/core/crypto/whirlpool/whirlpool.odin index 255f57bc2..0cfef7c6b 100644 --- a/core/crypto/whirlpool/whirlpool.odin +++ b/core/crypto/whirlpool/whirlpool.odin @@ -42,7 +42,7 @@ hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { // computed hash to the second parameter. // It requires that the destination buffer is at least as big as the digest size hash_string_to_buffer :: proc(data: string, hash: []byte) { - hash_bytes_to_buffer(transmute([]byte)(data), hash); + hash_bytes_to_buffer(transmute([]byte)(data), hash) } // hash_bytes_to_buffer will hash the given input and write the diff --git a/core/fmt/fmt.odin b/core/fmt/fmt.odin index 2cc192c12..932fc0bb8 100644 --- a/core/fmt/fmt.odin +++ b/core/fmt/fmt.odin @@ -11,6 +11,7 @@ import "core:time" import "core:unicode/utf8" import "core:intrinsics" +// Internal data structure that stores the required information for formatted printing Info :: struct { minus: bool, plus: bool, @@ -46,9 +47,13 @@ Register_User_Formatter_Error :: enum { // it is prefixed with `_` rather than marked with a private attribute so that users can access it if necessary _user_formatters: ^map[typeid]User_Formatter +// set_user_formatters assigns m to a global value allowing the user have custom print formatting for specific +// types set_user_formatters :: proc(m: ^map[typeid]User_Formatter) { _user_formatters = m } +// register_user_formatter assigns a formatter to a specific typeid. set_user_formatters must be called +// before any use of this procedure. register_user_formatter :: proc(id: typeid, formatter: User_Formatter) -> Register_User_Formatter_Error { if _user_formatters == nil { return .No_User_Formatter @@ -61,7 +66,7 @@ register_user_formatter :: proc(id: typeid, formatter: User_Formatter) -> Regist } -// aprint* procedures return a string that was allocated with the current context +// aprint procedure return a string that was allocated with the current context // They must be freed accordingly aprint :: proc(args: ..any, sep := " ") -> string { str: strings.Builder @@ -69,12 +74,16 @@ aprint :: proc(args: ..any, sep := " ") -> string { sbprint(buf=&str, args=args, sep=sep) return strings.to_string(str) } +// aprintln procedure return a string that was allocated with the current context +// They must be freed accordingly aprintln :: proc(args: ..any, sep := " ") -> string { str: strings.Builder strings.init_builder(&str) sbprintln(buf=&str, args=args, sep=sep) return strings.to_string(str) } +// aprintf procedure return a string that was allocated with the current context +// They must be freed accordingly aprintf :: proc(fmt: string, args: ..any) -> string { str: strings.Builder strings.init_builder(&str) @@ -83,19 +92,21 @@ aprintf :: proc(fmt: string, args: ..any) -> string { } -// tprint* procedures return a string that was allocated with the current context's temporary allocator +// tprint procedure return a string that was allocated with the current context's temporary allocator tprint :: proc(args: ..any, sep := " ") -> string { str: strings.Builder strings.init_builder(&str, context.temp_allocator) sbprint(buf=&str, args=args, sep=sep) return strings.to_string(str) } +// tprintln procedure return a string that was allocated with the current context's temporary allocator tprintln :: proc(args: ..any, sep := " ") -> string { str: strings.Builder strings.init_builder(&str, context.temp_allocator) sbprintln(buf=&str, args=args, sep=sep) return strings.to_string(str) } +// tprintf procedure return a string that was allocated with the current context's temporary allocator tprintf :: proc(fmt: string, args: ..any) -> string { str: strings.Builder strings.init_builder(&str, context.temp_allocator) @@ -104,21 +115,24 @@ tprintf :: proc(fmt: string, args: ..any) -> string { } -// bprint* procedures return a string using a buffer from an array +// bprint procedures return a string using a buffer from an array bprint :: proc(buf: []byte, args: ..any, sep := " ") -> string { sb := strings.builder_from_slice(buf[0:len(buf)]) return sbprint(buf=&sb, args=args, sep=sep) } +// bprintln procedures return a string using a buffer from an array bprintln :: proc(buf: []byte, args: ..any, sep := " ") -> string { sb := strings.builder_from_slice(buf[0:len(buf)]) return sbprintln(buf=&sb, args=args, sep=sep) } +// bprintf procedures return a string using a buffer from an array bprintf :: proc(buf: []byte, fmt: string, args: ..any) -> string { sb := strings.builder_from_slice(buf[0:len(buf)]) return sbprintf(&sb, fmt, ..args) } +// formatted assert assertf :: proc(condition: bool, fmt: string, args: ..any, loc := #caller_location) -> bool { if !condition { p := context.assertion_failure_proc @@ -131,6 +145,7 @@ assertf :: proc(condition: bool, fmt: string, args: ..any, loc := #caller_locati return condition } +// formatted panic panicf :: proc(fmt: string, args: ..any, loc := #caller_location) -> ! { p := context.assertion_failure_proc if p == nil { @@ -142,24 +157,26 @@ panicf :: proc(fmt: string, args: ..any, loc := #caller_location) -> ! { - - +// sbprint formats using the default print settings and writes to buf sbprint :: proc(buf: ^strings.Builder, args: ..any, sep := " ") -> string { wprint(w=strings.to_writer(buf), args=args, sep=sep) return strings.to_string(buf^) } +// sbprintln formats using the default print settings and writes to buf sbprintln :: proc(buf: ^strings.Builder, args: ..any, sep := " ") -> string { wprintln(w=strings.to_writer(buf), args=args, sep=sep) return strings.to_string(buf^) } +// sbprintf formats according to the specififed format string and writes to buf sbprintf :: proc(buf: ^strings.Builder, fmt: string, args: ..any) -> string { wprintf(w=strings.to_writer(buf), fmt=fmt, args=args) return strings.to_string(buf^) } +// wprint formats using the default print settings and writes to w wprint :: proc(w: io.Writer, args: ..any, sep := " ") -> int { fi: Info fi.writer = w @@ -194,6 +211,7 @@ wprint :: proc(w: io.Writer, args: ..any, sep := " ") -> int { return int(size1 - size0) } +// wprintln formats using the default print settings and writes to w wprintln :: proc(w: io.Writer, args: ..any, sep := " ") -> int { fi: Info fi.writer = w @@ -214,6 +232,7 @@ wprintln :: proc(w: io.Writer, args: ..any, sep := " ") -> int { return int(size1 - size0) } +// wprintf formats according to the specififed format string and writes to w wprintf :: proc(w: io.Writer, fmt: string, args: ..any) -> int { fi: Info arg_index: int = 0 @@ -493,11 +512,13 @@ wprintf :: proc(w: io.Writer, fmt: string, args: ..any) -> int { return int(size1 - size0) } +// wprint_type is a utility procedure to write a ^runtime.Type_Info value to w wprint_type :: proc(w: io.Writer, info: ^runtime.Type_Info) -> (int, io.Error) { n, err := reflect.write_type(w, info) io.flush(auto_cast w) return n, err } +// wprint_typeid is a utility procedure to write a typeid value to w wprint_typeid :: proc(w: io.Writer, id: typeid) -> (int, io.Error) { n, err := reflect.write_type(w, type_info_of(id)) io.flush(auto_cast w) @@ -829,7 +850,7 @@ _pad :: proc(fi: ^Info, s: string) { fmt_float :: proc(fi: ^Info, v: f64, bit_size: int, verb: rune) { switch verb { - case 'f', 'F', 'v': + case 'f', 'F', 'g', 'G', 'v': prec: int = 3 if fi.prec_set { prec = fi.prec diff --git a/core/fmt/fmt_js.odin b/core/fmt/fmt_js.odin index bcd9688a1..7a9876127 100644 --- a/core/fmt/fmt_js.odin +++ b/core/fmt/fmt_js.odin @@ -34,11 +34,16 @@ stderr := io.Writer{ }, } -// print* procedures return the number of bytes written +// print formats using the default print settings and writes to stdout print :: proc(args: ..any, sep := " ") -> int { return wprint(w=stdout, args=args, sep=sep) } +// println formats using the default print settings and writes to stdout println :: proc(args: ..any, sep := " ") -> int { return wprintln(w=stdout, args=args, sep=sep) } +// printf formats according to the specififed format string and writes to stdout printf :: proc(fmt: string, args: ..any) -> int { return wprintf(stdout, fmt, ..args) } +// eprint formats using the default print settings and writes to stderr eprint :: proc(args: ..any, sep := " ") -> int { return wprint(w=stderr, args=args, sep=sep) } +// eprintln formats using the default print settings and writes to stderr eprintln :: proc(args: ..any, sep := " ") -> int { return wprintln(w=stderr, args=args, sep=sep) } +// eprintf formats according to the specififed format string and writes to stderr eprintf :: proc(fmt: string, args: ..any) -> int { return wprintf(stderr, fmt, ..args) } diff --git a/core/fmt/fmt_os.odin b/core/fmt/fmt_os.odin index 7434d939d..f5c8d75bd 100644 --- a/core/fmt/fmt_os.odin +++ b/core/fmt/fmt_os.odin @@ -5,15 +5,18 @@ import "core:runtime" import "core:os" import "core:io" +// fprint formats using the default print settings and writes to fd fprint :: proc(fd: os.Handle, args: ..any, sep := " ") -> int { w := io.to_writer(os.stream_from_handle(fd)) return wprint(w=w, args=args, sep=sep) } +// fprintln formats using the default print settings and writes to fd fprintln :: proc(fd: os.Handle, args: ..any, sep := " ") -> int { w := io.to_writer(os.stream_from_handle(fd)) return wprintln(w=w, args=args, sep=sep) } +// fprintf formats according to the specififed format string and writes to fd fprintf :: proc(fd: os.Handle, fmt: string, args: ..any) -> int { w := io.to_writer(os.stream_from_handle(fd)) return wprintf(w, fmt, ..args) @@ -27,11 +30,16 @@ fprint_typeid :: proc(fd: os.Handle, id: typeid) -> (n: int, err: io.Error) { return wprint_typeid(w, id) } -// print* procedures return the number of bytes written +// print formats using the default print settings and writes to os.stdout print :: proc(args: ..any, sep := " ") -> int { return fprint(fd=os.stdout, args=args, sep=sep) } +// println formats using the default print settings and writes to os.stdout println :: proc(args: ..any, sep := " ") -> int { return fprintln(fd=os.stdout, args=args, sep=sep) } +// printf formats according to the specififed format string and writes to os.stdout printf :: proc(fmt: string, args: ..any) -> int { return fprintf(os.stdout, fmt, ..args) } +// eprint formats using the default print settings and writes to os.stderr eprint :: proc(args: ..any, sep := " ") -> int { return fprint(fd=os.stderr, args=args, sep=sep) } +// eprintln formats using the default print settings and writes to os.stderr eprintln :: proc(args: ..any, sep := " ") -> int { return fprintln(fd=os.stderr, args=args, sep=sep) } +// eprintf formats according to the specififed format string and writes to os.stderr eprintf :: proc(fmt: string, args: ..any) -> int { return fprintf(os.stderr, fmt, ..args) } diff --git a/core/io/io.odin b/core/io/io.odin index b4757f8e5..e9d839efb 100644 --- a/core/io/io.odin +++ b/core/io/io.odin @@ -1,9 +1,13 @@ +// package io provides basic interfaces for generic data stream primitives. +// The purpose of this package is wrap existing data structures and their +// operations into an abstracted stream interface. package io import "core:intrinsics" import "core:runtime" import "core:unicode/utf8" +// Seek whence values Seek_From :: enum { Start = 0, // seek relative to the origin of the file Current = 1, // seek relative to the current offset @@ -139,6 +143,10 @@ destroy :: proc(s: Stream) -> Error { return .Empty } +// read reads up to len(p) bytes into s. It returns the number of bytes read and any error if occurred. +// +// When read encounters an .EOF or error after successfully reading n > 0 bytes, it returns the number of +// bytes read along with the error. read :: proc(s: Reader, p: []byte, n_read: ^int = nil) -> (n: int, err: Error) { if s.stream_vtable != nil && s.impl_read != nil { n, err = s->impl_read(p) @@ -150,6 +158,7 @@ read :: proc(s: Reader, p: []byte, n_read: ^int = nil) -> (n: int, err: Error) { return 0, .Empty } +// write writes up to len(p) bytes into s. It returns the number of bytes written and any error if occurred. write :: proc(s: Writer, p: []byte, n_written: ^int = nil) -> (n: int, err: Error) { if s.stream_vtable != nil && s.impl_write != nil { n, err = s->impl_write(p) @@ -161,6 +170,13 @@ write :: proc(s: Writer, p: []byte, n_written: ^int = nil) -> (n: int, err: Erro return 0, .Empty } +// seek sets the offset of the next read or write to offset. +// +// .Start means seek relative to the origin of the file. +// .Current means seek relative to the current offset. +// .End means seek relative to the end. +// +// seek returns the new offset to the start of the file/stream, and any error if occurred. seek :: proc(s: Seeker, offset: i64, whence: Seek_From) -> (n: i64, err: Error) { if s.stream_vtable != nil && s.impl_seek != nil { return s->impl_seek(offset, whence) @@ -168,6 +184,8 @@ seek :: proc(s: Seeker, offset: i64, whence: Seek_From) -> (n: i64, err: Error) return 0, .Empty } +// The behaviour of close after the first call is stream implementation defined. +// Different streams may document their own behaviour. close :: proc(s: Closer) -> Error { if s.stream_vtable != nil && s.impl_close != nil { return s->impl_close() @@ -184,6 +202,7 @@ flush :: proc(s: Flusher) -> Error { return .None } +// size returns the size of the stream. If the stream does not support querying its size, 0 will be returned. size :: proc(s: Stream) -> i64 { if s.stream_vtable == nil { return 0 @@ -214,7 +233,12 @@ size :: proc(s: Stream) -> i64 { - +// read_at reads len(p) bytes into p starting with the provided offset in the underlying Reader_At stream r. +// It returns the number of bytes read and any error if occurred. +// +// When read_at returns n < len(p), it returns a non-nil Error explaining why. +// +// If n == len(p), err may be either nil or .EOF read_at :: proc(r: Reader_At, p: []byte, offset: i64, n_read: ^int = nil) -> (n: int, err: Error) { defer if n_read != nil { n_read^ += n @@ -245,6 +269,11 @@ read_at :: proc(r: Reader_At, p: []byte, offset: i64, n_read: ^int = nil) -> (n: } +// write_at writes len(p) bytes into p starting with the provided offset in the underlying Writer_At stream w. +// It returns the number of bytes written and any error if occurred. +// +// If write_at is writing to a Writer_At which has a seek offset, then write_at should not affect the underlying +// seek offset. write_at :: proc(w: Writer_At, p: []byte, offset: i64, n_written: ^int = nil) -> (n: int, err: Error) { defer if n_written != nil { n_written^ += n @@ -294,6 +323,7 @@ read_from :: proc(w: Reader_From, r: Reader) -> (n: i64, err: Error) { } +// read_byte reads and returns the next byte from r. read_byte :: proc(r: Byte_Reader, n_read: ^int = nil) -> (b: byte, err: Error) { defer if err == nil && n_read != nil { n_read^ += 1 @@ -347,6 +377,7 @@ _write_byte :: proc(w: Byte_Writer, c: byte, n_written: ^int = nil) -> (err: Err return err } +// read_rune reads a single UTF-8 encoded Unicode codepoint and returns the rune and its size in bytes. read_rune :: proc(br: Rune_Reader, n_read: ^int = nil) -> (ch: rune, size: int, err: Error) { defer if err == nil && n_read != nil { n_read^ += size @@ -405,10 +436,12 @@ unread_rune :: proc(s: Rune_Scanner) -> Error { } +// write_string writes the contents of the string s to w. write_string :: proc(s: Writer, str: string, n_written: ^int = nil) -> (n: int, err: Error) { return write(s, transmute([]byte)str, n_written) } +// write_rune writes a UTF-8 encoded rune to w. write_rune :: proc(s: Writer, r: rune, n_written: ^int = nil) -> (size: int, err: Error) { defer if err == nil && n_written != nil { n_written^ += size @@ -430,12 +463,16 @@ write_rune :: proc(s: Writer, r: rune, n_written: ^int = nil) -> (size: int, err } - +// read_full expected exactly len(buf) bytes from r into buf. read_full :: proc(r: Reader, buf: []byte) -> (n: int, err: Error) { return read_at_least(r, buf, len(buf)) } +// read_at_least reads from r into buf until it has read at least min bytes. It returns the number +// of bytes copied and an error if fewer bytes were read. `.EOF` is only returned if no bytes were read. +// `.Unexpected_EOF` is returned when an `.EOF ` is returned by the passed Reader after reading +// fewer than min bytes. If len(buf) is less than min, `.Short_Buffer` is returned. read_at_least :: proc(r: Reader, buf: []byte, min: int) -> (n: int, err: Error) { if len(buf) < min { return 0, .Short_Buffer diff --git a/core/math/big/common.odin b/core/math/big/common.odin index 2b34a9163..74a641d83 100644 --- a/core/math/big/common.odin +++ b/core/math/big/common.odin @@ -172,7 +172,7 @@ Error :: enum int { Unimplemented = 127, } -Error_String :: #partial [Error]string{ +Error_String :: #sparse[Error]string{ .Okay = "Okay", .Out_Of_Memory = "Out of memory", .Invalid_Pointer = "Invalid pointer", @@ -182,6 +182,7 @@ Error_String :: #partial [Error]string{ .Max_Iterations_Reached = "Max iterations reached", .Buffer_Overflow = "Buffer overflow", .Integer_Overflow = "Integer overflow", + .Integer_Underflow = "Integer underflow", .Division_by_Zero = "Division by zero", .Math_Domain_Error = "Math domain error", diff --git a/core/math/linalg/glsl/linalg_glsl.odin b/core/math/linalg/glsl/linalg_glsl.odin index 7bc68b964..74753f66f 100644 --- a/core/math/linalg/glsl/linalg_glsl.odin +++ b/core/math/linalg/glsl/linalg_glsl.odin @@ -473,6 +473,25 @@ floor_dvec3 :: proc "c" (x: dvec3) -> dvec3 { return {floor(x.x), floor(x.y), fl floor_dvec4 :: proc "c" (x: dvec4) -> dvec4 { return {floor(x.x), floor(x.y), floor(x.z), floor(x.w)} } + +round :: proc{ + round_f32, + round_f64, + round_vec2, + round_vec3, + round_vec4, + round_dvec2, + round_dvec3, + round_dvec4, +} +round_vec2 :: proc "c" (x: vec2) -> vec2 { return {round(x.x), round(x.y)} } +round_vec3 :: proc "c" (x: vec3) -> vec3 { return {round(x.x), round(x.y), round(x.z)} } +round_vec4 :: proc "c" (x: vec4) -> vec4 { return {round(x.x), round(x.y), round(x.z), round(x.w)} } +round_dvec2 :: proc "c" (x: dvec2) -> dvec2 { return {round(x.x), round(x.y)} } +round_dvec3 :: proc "c" (x: dvec3) -> dvec3 { return {round(x.x), round(x.y), round(x.z)} } +round_dvec4 :: proc "c" (x: dvec4) -> dvec4 { return {round(x.x), round(x.y), round(x.z), round(x.w)} } + + ceil :: proc{ ceil_f32, ceil_f64, diff --git a/core/math/linalg/glsl/linalg_glsl_math.odin b/core/math/linalg/glsl/linalg_glsl_math.odin index 68f43a2f7..968a3fa5e 100644 --- a/core/math/linalg/glsl/linalg_glsl_math.odin +++ b/core/math/linalg/glsl/linalg_glsl_math.odin @@ -23,6 +23,7 @@ log_f32 :: proc "c" (x: f32) -> f32 { return math.ln(x) } exp2_f32 :: proc "c" (x: f32) -> f32 { return math.pow(f32(2), x) } sign_f32 :: proc "c" (x: f32) -> f32 { return math.sign(x) } floor_f32 :: proc "c" (x: f32) -> f32 { return math.floor(x) } +round_f32 :: proc "c" (x: f32) -> f32 { return math.round(x) } ceil_f32 :: proc "c" (x: f32) -> f32 { return math.ceil(x) } mod_f32 :: proc "c" (x, y: f32) -> f32 { return math.mod(x, y) } fract_f32 :: proc "c" (x: f32) -> f32 { @@ -53,6 +54,7 @@ log_f64 :: proc "c" (x: f64) -> f64 { return math.ln(x) } exp2_f64 :: proc "c" (x: f64) -> f64 { return math.pow(f64(2), x) } sign_f64 :: proc "c" (x: f64) -> f64 { return math.sign(x) } floor_f64 :: proc "c" (x: f64) -> f64 { return math.floor(x) } +round_f64 :: proc "c" (x: f64) -> f64 { return math.round(x) } ceil_f64 :: proc "c" (x: f64) -> f64 { return math.ceil(x) } mod_f64 :: proc "c" (x, y: f64) -> f64 { return math.mod(x, y) } fract_f64 :: proc "c" (x: f64) -> f64 { diff --git a/core/math/linalg/hlsl/linalg_hlsl.odin b/core/math/linalg/hlsl/linalg_hlsl.odin index 4391975ba..3f73dcd1f 100644 --- a/core/math/linalg/hlsl/linalg_hlsl.odin +++ b/core/math/linalg/hlsl/linalg_hlsl.odin @@ -551,6 +551,23 @@ floor_double2 :: proc "c" (x: double2) -> double2 { return {floor(x.x), floor(x. floor_double3 :: proc "c" (x: double3) -> double3 { return {floor(x.x), floor(x.y), floor(x.z)} } floor_double4 :: proc "c" (x: double4) -> double4 { return {floor(x.x), floor(x.y), floor(x.z), floor(x.w)} } +round :: proc{ + round_float, + round_double, + round_float2, + round_float3, + round_float4, + round_double2, + round_double3, + round_double4, +} +round_float2 :: proc "c" (x: float2) -> float2 { return {round(x.x), round(x.y)} } +round_float3 :: proc "c" (x: float3) -> float3 { return {round(x.x), round(x.y), round(x.z)} } +round_float4 :: proc "c" (x: float4) -> float4 { return {round(x.x), round(x.y), round(x.z), round(x.w)} } +round_double2 :: proc "c" (x: double2) -> double2 { return {round(x.x), round(x.y)} } +round_double3 :: proc "c" (x: double3) -> double3 { return {round(x.x), round(x.y), round(x.z)} } +round_double4 :: proc "c" (x: double4) -> double4 { return {round(x.x), round(x.y), round(x.z), round(x.w)} } + ceil :: proc{ ceil_float, @@ -570,6 +587,69 @@ ceil_double3 :: proc "c" (x: double3) -> double3 { return {ceil(x.x), ceil(x.y), ceil_double4 :: proc "c" (x: double4) -> double4 { return {ceil(x.x), ceil(x.y), ceil(x.z), ceil(x.w)} } +isfinite_float :: proc "c" (x: float) -> bool { return !isinf_float(x) } +isfinite_float2 :: proc "c" (x: float2) -> bool2 { return {isfinite_float(x.x), isfinite_float(x.y)} } +isfinite_float3 :: proc "c" (x: float3) -> bool3 { return {isfinite_float(x.x), isfinite_float(x.y), isfinite_float(x.z)} } +isfinite_float4 :: proc "c" (x: float4) -> bool4 { return {isfinite_float(x.x), isfinite_float(x.y), isfinite_float(x.z), isfinite_float(x.w)} } +isfinite_double :: proc "c" (x: double) -> bool { return !isinf_double(x) } +isfinite_double2 :: proc "c" (x: double2) -> bool2 { return {isfinite_double(x.x), isfinite_double(x.y)} } +isfinite_double3 :: proc "c" (x: double3) -> bool3 { return {isfinite_double(x.x), isfinite_double(x.y), isfinite_double(x.z)} } +isfinite_double4 :: proc "c" (x: double4) -> bool4 { return {isfinite_double(x.x), isfinite_double(x.y), isfinite_double(x.z), isfinite_double(x.w)} } + +// isfinite is the opposite of isinf and returns true if the number is neither positive-infinite or negative-infinite +isfinite :: proc{ + isfinite_float, + isfinite_float2, + isfinite_float3, + isfinite_float4, + isfinite_double, + isfinite_double2, + isfinite_double3, + isfinite_double4, +} + + +isinf_float :: proc "c" (x: float) -> bool { return x * 0.5 == x } +isinf_float2 :: proc "c" (x: float2) -> bool2 { return {isinf_float(x.x), isinf_float(x.y)} } +isinf_float3 :: proc "c" (x: float3) -> bool3 { return {isinf_float(x.x), isinf_float(x.y), isinf_float(x.z)} } +isinf_float4 :: proc "c" (x: float4) -> bool4 { return {isinf_float(x.x), isinf_float(x.y), isinf_float(x.z), isinf_float(x.w)} } +isinf_double :: proc "c" (x: double) -> bool { return x * 0.5 == x } +isinf_double2 :: proc "c" (x: double2) -> bool2 { return {isinf_double(x.x), isinf_double(x.y)} } +isinf_double3 :: proc "c" (x: double3) -> bool3 { return {isinf_double(x.x), isinf_double(x.y), isinf_double(x.z)} } +isinf_double4 :: proc "c" (x: double4) -> bool4 { return {isinf_double(x.x), isinf_double(x.y), isinf_double(x.z), isinf_double(x.w)} } + +// isinf is the opposite of isfinite and returns true if the number is either positive-infinite or negative-infinite +isinf :: proc{ + isinf_float, + isinf_float2, + isinf_float3, + isinf_float4, + isinf_double, + isinf_double2, + isinf_double3, + isinf_double4, +} + + +isnan_float2 :: proc "c" (x: float2) -> bool2 { return {isnan_float(x.x), isnan_float(x.y)} } +isnan_float3 :: proc "c" (x: float3) -> bool3 { return {isnan_float(x.x), isnan_float(x.y), isnan_float(x.z)} } +isnan_float4 :: proc "c" (x: float4) -> bool4 { return {isnan_float(x.x), isnan_float(x.y), isnan_float(x.z), isnan_float(x.w)} } +isnan_double2 :: proc "c" (x: double2) -> bool2 { return {isnan_double(x.x), isnan_double(x.y)} } +isnan_double3 :: proc "c" (x: double3) -> bool3 { return {isnan_double(x.x), isnan_double(x.y), isnan_double(x.z)} } +isnan_double4 :: proc "c" (x: double4) -> bool4 { return {isnan_double(x.x), isnan_double(x.y), isnan_double(x.z), isnan_double(x.w)} } + +// isnan returns true if the input value is the special case of Not-A-Number +isnan :: proc{ + isnan_float, + isnan_float2, + isnan_float3, + isnan_float4, + isnan_double, + isnan_double2, + isnan_double3, + isnan_double4, +} + fmod :: proc{ fmod_float, fmod_double, diff --git a/core/math/linalg/hlsl/linalg_hlsl_math.odin b/core/math/linalg/hlsl/linalg_hlsl_math.odin index d884c3d31..91c542b59 100644 --- a/core/math/linalg/hlsl/linalg_hlsl_math.odin +++ b/core/math/linalg/hlsl/linalg_hlsl_math.odin @@ -26,7 +26,9 @@ log10_float :: proc "c" (x: float) -> float { return math.log(x, 10) } exp2_float :: proc "c" (x: float) -> float { return math.pow(float(2), x) } sign_float :: proc "c" (x: float) -> float { return math.sign(x) } floor_float :: proc "c" (x: float) -> float { return math.floor(x) } +round_float :: proc "c" (x: float) -> float { return math.round(x) } ceil_float :: proc "c" (x: float) -> float { return math.ceil(x) } +isnan_float :: proc "c" (x: float) -> bool { return math.classify(x) == .NaN} fmod_float :: proc "c" (x, y: float) -> float { return math.mod(x, y) } frac_float :: proc "c" (x: float) -> float { if x >= 0 { @@ -35,6 +37,7 @@ frac_float :: proc "c" (x: float) -> float { return math.trunc(-x) + x } + cos_double :: proc "c" (x: double) -> double { return math.cos(x) } sin_double :: proc "c" (x: double) -> double { return math.sin(x) } tan_double :: proc "c" (x: double) -> double { return math.tan(x) } @@ -59,7 +62,9 @@ log10_double :: proc "c" (x: double) -> double { return math.log(x, 10) exp2_double :: proc "c" (x: double) -> double { return math.pow(double(2), x) } sign_double :: proc "c" (x: double) -> double { return math.sign(x) } floor_double :: proc "c" (x: double) -> double { return math.floor(x) } +round_double :: proc "c" (x: double) -> double { return math.round(x) } ceil_double :: proc "c" (x: double) -> double { return math.ceil(x) } +isnan_double :: proc "c" (x: double) -> bool { return math.classify(x) == .NaN} fmod_double :: proc "c" (x, y: double) -> double { return math.mod(x, y) } frac_double :: proc "c" (x: double) -> double { if x >= 0 { diff --git a/core/math/noise/internal.odin b/core/math/noise/internal.odin new file mode 100644 index 000000000..5837f9235 --- /dev/null +++ b/core/math/noise/internal.odin @@ -0,0 +1,734 @@ +/* + OpenSimplex2 noise implementation. + + Ported from https://github.com/KdotJPG/OpenSimplex2. + Copyright 2022 Yuki2 (https://github.com/NoahR02) +*/ +//+private +package math_noise + +/* + Private implementation details follow. +*/ + +PRIME_X :: i64(0x5205402B9270C86F) +PRIME_Y :: i64(0x598CD327003817B5) +PRIME_Z :: i64(0x5BCC226E9FA0BACB) +PRIME_W :: i64(0x56CC5227E58F554B) + +HASH_MULTIPLIER :: i64(0x53A3F72DEEC546F5) +SEED_FLIP_3D :: i64(-0x52D547B2E96ED629) +SEED_OFFSET_4D :: i64(0xE83DC3E0DA7164D) + +ROOT_2_OVER_2 :: f64(0.7071067811865476) +SKEW_2D :: f64(0.366025403784439) +UNSKEW_2D :: f64(-0.21132486540518713) +ROOT_3_OVER_3 :: f64(0.577350269189626) + +FALLBACK_ROTATE_3D :: f64(2.0) / f64(3.0) +ROTATE_3D_ORTHOGONALIZER :: f64(UNSKEW_2D) + +SKEW_4D :: f32(0hbe0d8369) +UNSKEW_4D :: f32(0.309016994374947) +LATTICE_STEP_4D :: f32(0.2) + +N_GRADS_2D_EXPONENT :: 7 +N_GRADS_3D_EXPONENT :: 8 +N_GRADS_4D_EXPONENT :: 9 +N_GRADS_2D :: 1 << N_GRADS_2D_EXPONENT +N_GRADS_3D :: 1 << N_GRADS_3D_EXPONENT +N_GRADS_4D :: 1 << N_GRADS_4D_EXPONENT + +NORMALIZER_2D :: f64(0.01001634121365712) +NORMALIZER_3D :: f64(0.07969837668935331) +NORMALIZER_4D :: f64(0.0220065933241897) +RSQUARED_2D :: f32(0.5) +RSQUARED_3D :: f32(0.6) +RSQUARED_4D :: f32(0.6) + +GRADIENTS_2D := [N_GRADS_2D * 2]f32{ + 0h4218d2da, 0h42b87975, 0h42b87975, 0h4218d2da, 0h42b87975, 0hc218d2da, 0h4218d2da, 0hc2b87975, + 0hc218d2da, 0hc2b87975, 0hc2b87975, 0hc218d2da, 0hc2b87975, 0h4218d2da, 0hc218d2da, 0h42b87975, + 0h4150804d, 0h42c5f72a, 0h42731b78, 0h429e696c, 0h429e696c, 0h42731b78, 0h42c5f72a, 0h4150804d, + 0h42c5f72a, 0hc150804d, 0h429e696c, 0hc2731b78, 0h42731b78, 0hc29e696c, 0h4150804d, 0hc2c5f72a, + 0hc150804d, 0hc2c5f72a, 0hc2731b78, 0hc29e696c, 0hc29e696c, 0hc2731b78, 0hc2c5f72a, 0hc150804d, + 0hc2c5f72a, 0h4150804d, 0hc29e696c, 0h42731b78, 0hc2731b78, 0h429e696c, 0hc150804d, 0h42c5f72a, + 0h4218d2da, 0h42b87975, 0h42b87975, 0h4218d2da, 0h42b87975, 0hc218d2da, 0h4218d2da, 0hc2b87975, + 0hc218d2da, 0hc2b87975, 0hc2b87975, 0hc218d2da, 0hc2b87975, 0h4218d2da, 0hc218d2da, 0h42b87975, + 0h4150804d, 0h42c5f72a, 0h42731b78, 0h429e696c, 0h429e696c, 0h42731b78, 0h42c5f72a, 0h4150804d, + 0h42c5f72a, 0hc150804d, 0h429e696c, 0hc2731b78, 0h42731b78, 0hc29e696c, 0h4150804d, 0hc2c5f72a, + 0hc150804d, 0hc2c5f72a, 0hc2731b78, 0hc29e696c, 0hc29e696c, 0hc2731b78, 0hc2c5f72a, 0hc150804d, + 0hc2c5f72a, 0h4150804d, 0hc29e696c, 0h42731b78, 0hc2731b78, 0h429e696c, 0hc150804d, 0h42c5f72a, + 0h4218d2da, 0h42b87975, 0h42b87975, 0h4218d2da, 0h42b87975, 0hc218d2da, 0h4218d2da, 0hc2b87975, + 0hc218d2da, 0hc2b87975, 0hc2b87975, 0hc218d2da, 0hc2b87975, 0h4218d2da, 0hc218d2da, 0h42b87975, + 0h4150804d, 0h42c5f72a, 0h42731b78, 0h429e696c, 0h429e696c, 0h42731b78, 0h42c5f72a, 0h4150804d, + 0h42c5f72a, 0hc150804d, 0h429e696c, 0hc2731b78, 0h42731b78, 0hc29e696c, 0h4150804d, 0hc2c5f72a, + 0hc150804d, 0hc2c5f72a, 0hc2731b78, 0hc29e696c, 0hc29e696c, 0hc2731b78, 0hc2c5f72a, 0hc150804d, + 0hc2c5f72a, 0h4150804d, 0hc29e696c, 0h42731b78, 0hc2731b78, 0h429e696c, 0hc150804d, 0h42c5f72a, + 0h4218d2da, 0h42b87975, 0h42b87975, 0h4218d2da, 0h42b87975, 0hc218d2da, 0h4218d2da, 0hc2b87975, + 0hc218d2da, 0hc2b87975, 0hc2b87975, 0hc218d2da, 0hc2b87975, 0h4218d2da, 0hc218d2da, 0h42b87975, + 0h4150804d, 0h42c5f72a, 0h42731b78, 0h429e696c, 0h429e696c, 0h42731b78, 0h42c5f72a, 0h4150804d, + 0h42c5f72a, 0hc150804d, 0h429e696c, 0hc2731b78, 0h42731b78, 0hc29e696c, 0h4150804d, 0hc2c5f72a, + 0hc150804d, 0hc2c5f72a, 0hc2731b78, 0hc29e696c, 0hc29e696c, 0hc2731b78, 0hc2c5f72a, 0hc150804d, + 0hc2c5f72a, 0h4150804d, 0hc29e696c, 0h42731b78, 0hc2731b78, 0h429e696c, 0hc150804d, 0h42c5f72a, + 0h4218d2da, 0h42b87975, 0h42b87975, 0h4218d2da, 0h42b87975, 0hc218d2da, 0h4218d2da, 0hc2b87975, + 0hc218d2da, 0hc2b87975, 0hc2b87975, 0hc218d2da, 0hc2b87975, 0h4218d2da, 0hc218d2da, 0h42b87975, + 0h4150804d, 0h42c5f72a, 0h42731b78, 0h429e696c, 0h429e696c, 0h42731b78, 0h42c5f72a, 0h4150804d, + 0h42c5f72a, 0hc150804d, 0h429e696c, 0hc2731b78, 0h42731b78, 0hc29e696c, 0h4150804d, 0hc2c5f72a, + 0hc150804d, 0hc2c5f72a, 0hc2731b78, 0hc29e696c, 0hc29e696c, 0hc2731b78, 0hc2c5f72a, 0hc150804d, + 0hc2c5f72a, 0h4150804d, 0hc29e696c, 0h42731b78, 0hc2731b78, 0h429e696c, 0hc150804d, 0h42c5f72a, + 0h4218d2da, 0h42b87975, 0h42b87975, 0h4218d2da, 0h42b87975, 0hc218d2da, 0h4218d2da, 0hc2b87975, + 0hc218d2da, 0hc2b87975, 0hc2b87975, 0hc218d2da, 0hc2b87975, 0h4218d2da, 0hc218d2da, 0h42b87975, +} + +GRADIENTS_3D := [N_GRADS_3D * 4]f32{ + 0h41df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, + 0h00000000, 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, + 0hc1df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0hc16b5146, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc16b5146, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0hc21ae5b8, 0h00000000, 0h416b5146, 0h00000000, 0hc16b5146, 0h00000000, 0h421ae5b8, 0h00000000, + 0hc148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, + 0hc148c1c5, 0h41df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0h41df5103, 0h00000000, + 0h00000000, 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, + 0h41df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, 0h00000000, + 0h41df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0h421ae5b8, 0h00000000, 0hc16b5146, 0h00000000, 0h416b5146, 0h00000000, 0hc21ae5b8, 0h00000000, + 0h41df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0h416b5146, 0h00000000, 0h421ae5b8, 0h00000000, 0h421ae5b8, 0h00000000, 0h416b5146, 0h00000000, + 0h41df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, + 0h00000000, 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, + 0hc1df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0hc16b5146, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc16b5146, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0hc21ae5b8, 0h00000000, 0h416b5146, 0h00000000, 0hc16b5146, 0h00000000, 0h421ae5b8, 0h00000000, + 0hc148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, + 0hc148c1c5, 0h41df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0h41df5103, 0h00000000, + 0h00000000, 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, + 0h41df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, 0h00000000, + 0h41df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0h421ae5b8, 0h00000000, 0hc16b5146, 0h00000000, 0h416b5146, 0h00000000, 0hc21ae5b8, 0h00000000, + 0h41df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0h416b5146, 0h00000000, 0h421ae5b8, 0h00000000, 0h421ae5b8, 0h00000000, 0h416b5146, 0h00000000, + 0h41df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, + 0h00000000, 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, + 0hc1df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0hc16b5146, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc16b5146, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0hc21ae5b8, 0h00000000, 0h416b5146, 0h00000000, 0hc16b5146, 0h00000000, 0h421ae5b8, 0h00000000, + 0hc148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, + 0hc148c1c5, 0h41df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0h41df5103, 0h00000000, + 0h00000000, 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, + 0h41df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, 0h00000000, + 0h41df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0h421ae5b8, 0h00000000, 0hc16b5146, 0h00000000, 0h416b5146, 0h00000000, 0hc21ae5b8, 0h00000000, + 0h41df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0h416b5146, 0h00000000, 0h421ae5b8, 0h00000000, 0h421ae5b8, 0h00000000, 0h416b5146, 0h00000000, + 0h41df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, + 0h00000000, 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, + 0hc1df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0hc16b5146, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc16b5146, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0hc21ae5b8, 0h00000000, 0h416b5146, 0h00000000, 0hc16b5146, 0h00000000, 0h421ae5b8, 0h00000000, + 0hc148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, + 0hc148c1c5, 0h41df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0h41df5103, 0h00000000, + 0h00000000, 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, + 0h41df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, 0h00000000, + 0h41df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0h421ae5b8, 0h00000000, 0hc16b5146, 0h00000000, 0h416b5146, 0h00000000, 0hc21ae5b8, 0h00000000, + 0h41df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0h416b5146, 0h00000000, 0h421ae5b8, 0h00000000, 0h421ae5b8, 0h00000000, 0h416b5146, 0h00000000, + 0h41df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, + 0h00000000, 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, + 0hc1df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0hc16b5146, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc21ae5b8, 0h00000000, 0hc16b5146, 0h00000000, + 0hc1df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0hc1df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0hc21ae5b8, 0h00000000, 0h416b5146, 0h00000000, 0hc16b5146, 0h00000000, 0h421ae5b8, 0h00000000, + 0hc148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, + 0hc148c1c5, 0h41df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0h41df5103, 0h41df5103, 0h00000000, + 0h00000000, 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, + 0h41df5103, 0hc1df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0hc1df5103, 0h4148c1c5, 0h00000000, + 0h416b5146, 0hc21ae5b8, 0h00000000, 0h00000000, 0h421ae5b8, 0hc16b5146, 0h00000000, 0h00000000, + 0h41df5103, 0hc148c1c5, 0hc1df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0hc1df5103, 0h00000000, + 0h421ae5b8, 0h00000000, 0hc16b5146, 0h00000000, 0h416b5146, 0h00000000, 0hc21ae5b8, 0h00000000, + 0h41df5103, 0hc148c1c5, 0h41df5103, 0h00000000, 0h41df5103, 0h4148c1c5, 0h41df5103, 0h00000000, + 0h416b5146, 0h00000000, 0h421ae5b8, 0h00000000, 0h421ae5b8, 0h00000000, 0h416b5146, 0h00000000, + 0h41df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0h41df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0h421ae5b8, 0h416b5146, 0h00000000, 0h00000000, 0h416b5146, 0h421ae5b8, 0h00000000, 0h00000000, + 0hc1df5103, 0h41df5103, 0hc148c1c5, 0h00000000, 0hc1df5103, 0h41df5103, 0h4148c1c5, 0h00000000, + 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0hc1df5103, 0h00000000, + 0h00000000, 0hc21ae5b8, 0hc16b5146, 0h00000000, 0h00000000, 0hc16b5146, 0hc21ae5b8, 0h00000000, + 0hc148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, 0h4148c1c5, 0hc1df5103, 0h41df5103, 0h00000000, + 0h00000000, 0hc16b5146, 0h421ae5b8, 0h00000000, 0h00000000, 0hc21ae5b8, 0h416b5146, 0h00000000, +} + +GRADIENTS_4D := [N_GRADS_4D * 4]f32{ + 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41d2a716, 0hc208695c, 0hc19194b0, 0h40de6d7d, 0h41b6d966, + 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41b6d966, 0hc22076c5, 0h406d72bf, 0h406d72bf, 0h41a58418, + 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h422076c5, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, 0h4208695c, + 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h4208695c, 0hc1d2a716, 0h416b8e00, 0h416b8e00, 0h41f50507, + 0hc1f50507, 0hc16b8e00, 0h41d2a716, 0hc16b8e00, 0hc208695c, 0hc19194b0, 0h41b6d966, 0h40de6d7d, + 0hc208695c, 0h40de6d7d, 0h41b6d966, 0hc19194b0, 0hc22076c5, 0h406d72bf, 0h41a58418, 0h406d72bf, + 0hc1a58418, 0hc06d72bf, 0h422076c5, 0hc06d72bf, 0hc1b6d966, 0hc0de6d7d, 0h4208695c, 0h419194b0, + 0hc1b6d966, 0h419194b0, 0h4208695c, 0hc0de6d7d, 0hc1d2a716, 0h416b8e00, 0h41f50507, 0h416b8e00, + 0hc1f50507, 0h41d2a716, 0hc16b8e00, 0hc16b8e00, 0hc208695c, 0h41b6d966, 0hc19194b0, 0h40de6d7d, + 0hc208695c, 0h41b6d966, 0h40de6d7d, 0hc19194b0, 0hc22076c5, 0h41a58418, 0h406d72bf, 0h406d72bf, + 0hc1a58418, 0h422076c5, 0hc06d72bf, 0hc06d72bf, 0hc1b6d966, 0h4208695c, 0hc0de6d7d, 0h419194b0, + 0hc1b6d966, 0h4208695c, 0h419194b0, 0hc0de6d7d, 0hc1d2a716, 0h41f50507, 0h416b8e00, 0h416b8e00, + 0h41d2a716, 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41b6d966, 0hc208695c, 0hc19194b0, 0h40de6d7d, + 0h41b6d966, 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41a58418, 0hc22076c5, 0h406d72bf, 0h406d72bf, + 0h422076c5, 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h4208695c, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, + 0h4208695c, 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h41f50507, 0hc1d2a716, 0h416b8e00, 0h416b8e00, + 0hc208ee18, 0hc18a0670, 0hc18a0670, 0hc18a0670, 0hc20e2b7a, 0hc19d18ee, 0hc19d18ee, 0h40b05c85, + 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0hc19d18ee, + 0hc21c1252, 0hc1b8e69d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0h40024b8d, + 0hc21c1252, 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc235739c, 0hbfc4b564, 0hbfc4b564, 0hbfc4b564, + 0hc18a0670, 0hc208ee18, 0hc18a0670, 0hc18a0670, 0hc19d18ee, 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, + 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, 0hc19d18ee, 0hc19d18ee, + 0hc1b8e69d, 0hc21c1252, 0h40024b8d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0hc1b8e69d, 0h40024b8d, + 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0hbfc4b564, 0hc235739c, 0hbfc4b564, 0hbfc4b564, + 0hc18a0670, 0hc18a0670, 0hc208ee18, 0hc18a0670, 0hc19d18ee, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, + 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0hc19d18ee, + 0hc1b8e69d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc21c1252, 0h40024b8d, + 0h40024b8d, 0h40024b8d, 0hc21c1252, 0hc1b8e69d, 0hbfc4b564, 0hbfc4b564, 0hc235739c, 0hbfc4b564, + 0hc18a0670, 0hc18a0670, 0hc18a0670, 0hc208ee18, 0hc19d18ee, 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, + 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0hc19d18ee, 0hc20e2b7a, + 0hc1b8e69d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0h40024b8d, 0hc21c1252, + 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc21c1252, 0hbfc4b564, 0hbfc4b564, 0hbfc4b564, 0hc235739c, + 0hc16b8e00, 0hc1f50507, 0hc16b8e00, 0h41d2a716, 0hc19194b0, 0hc208695c, 0h40de6d7d, 0h41b6d966, + 0h40de6d7d, 0hc208695c, 0hc19194b0, 0h41b6d966, 0h406d72bf, 0hc22076c5, 0h406d72bf, 0h41a58418, + 0hc06d72bf, 0hc1a58418, 0hc06d72bf, 0h422076c5, 0hc0de6d7d, 0hc1b6d966, 0h419194b0, 0h4208695c, + 0h419194b0, 0hc1b6d966, 0hc0de6d7d, 0h4208695c, 0h416b8e00, 0hc1d2a716, 0h416b8e00, 0h41f50507, + 0hc16b8e00, 0hc16b8e00, 0hc1f50507, 0h41d2a716, 0hc19194b0, 0h40de6d7d, 0hc208695c, 0h41b6d966, + 0h40de6d7d, 0hc19194b0, 0hc208695c, 0h41b6d966, 0h406d72bf, 0h406d72bf, 0hc22076c5, 0h41a58418, + 0hc06d72bf, 0hc06d72bf, 0hc1a58418, 0h422076c5, 0hc0de6d7d, 0h419194b0, 0hc1b6d966, 0h4208695c, + 0h419194b0, 0hc0de6d7d, 0hc1b6d966, 0h4208695c, 0h416b8e00, 0h416b8e00, 0hc1d2a716, 0h41f50507, + 0hc16b8e00, 0hc1f50507, 0h41d2a716, 0hc16b8e00, 0hc19194b0, 0hc208695c, 0h41b6d966, 0h40de6d7d, + 0h40de6d7d, 0hc208695c, 0h41b6d966, 0hc19194b0, 0h406d72bf, 0hc22076c5, 0h41a58418, 0h406d72bf, + 0hc06d72bf, 0hc1a58418, 0h422076c5, 0hc06d72bf, 0hc0de6d7d, 0hc1b6d966, 0h4208695c, 0h419194b0, + 0h419194b0, 0hc1b6d966, 0h4208695c, 0hc0de6d7d, 0h416b8e00, 0hc1d2a716, 0h41f50507, 0h416b8e00, + 0hc16b8e00, 0hc16b8e00, 0h41d2a716, 0hc1f50507, 0hc19194b0, 0h40de6d7d, 0h41b6d966, 0hc208695c, + 0h40de6d7d, 0hc19194b0, 0h41b6d966, 0hc208695c, 0h406d72bf, 0h406d72bf, 0h41a58418, 0hc22076c5, + 0hc06d72bf, 0hc06d72bf, 0h422076c5, 0hc1a58418, 0hc0de6d7d, 0h419194b0, 0h4208695c, 0hc1b6d966, + 0h419194b0, 0hc0de6d7d, 0h4208695c, 0hc1b6d966, 0h416b8e00, 0h416b8e00, 0h41f50507, 0hc1d2a716, + 0hc16b8e00, 0h41d2a716, 0hc1f50507, 0hc16b8e00, 0hc19194b0, 0h41b6d966, 0hc208695c, 0h40de6d7d, + 0h40de6d7d, 0h41b6d966, 0hc208695c, 0hc19194b0, 0h406d72bf, 0h41a58418, 0hc22076c5, 0h406d72bf, + 0hc06d72bf, 0h422076c5, 0hc1a58418, 0hc06d72bf, 0hc0de6d7d, 0h4208695c, 0hc1b6d966, 0h419194b0, + 0h419194b0, 0h4208695c, 0hc1b6d966, 0hc0de6d7d, 0h416b8e00, 0h41f50507, 0hc1d2a716, 0h416b8e00, + 0hc16b8e00, 0h41d2a716, 0hc16b8e00, 0hc1f50507, 0hc19194b0, 0h41b6d966, 0h40de6d7d, 0hc208695c, + 0h40de6d7d, 0h41b6d966, 0hc19194b0, 0hc208695c, 0h406d72bf, 0h41a58418, 0h406d72bf, 0hc22076c5, + 0hc06d72bf, 0h422076c5, 0hc06d72bf, 0hc1a58418, 0hc0de6d7d, 0h4208695c, 0h419194b0, 0hc1b6d966, + 0h419194b0, 0h4208695c, 0hc0de6d7d, 0hc1b6d966, 0h416b8e00, 0h41f50507, 0h416b8e00, 0hc1d2a716, + 0h41d2a716, 0hc16b8e00, 0hc1f50507, 0hc16b8e00, 0h41b6d966, 0hc19194b0, 0hc208695c, 0h40de6d7d, + 0h41b6d966, 0h40de6d7d, 0hc208695c, 0hc19194b0, 0h41a58418, 0h406d72bf, 0hc22076c5, 0h406d72bf, + 0h422076c5, 0hc06d72bf, 0hc1a58418, 0hc06d72bf, 0h4208695c, 0hc0de6d7d, 0hc1b6d966, 0h419194b0, + 0h4208695c, 0h419194b0, 0hc1b6d966, 0hc0de6d7d, 0h41f50507, 0h416b8e00, 0hc1d2a716, 0h416b8e00, + 0h41d2a716, 0hc16b8e00, 0hc16b8e00, 0hc1f50507, 0h41b6d966, 0hc19194b0, 0h40de6d7d, 0hc208695c, + 0h41b6d966, 0h40de6d7d, 0hc19194b0, 0hc208695c, 0h41a58418, 0h406d72bf, 0h406d72bf, 0hc22076c5, + 0h422076c5, 0hc06d72bf, 0hc06d72bf, 0hc1a58418, 0h4208695c, 0hc0de6d7d, 0h419194b0, 0hc1b6d966, + 0h4208695c, 0h419194b0, 0hc0de6d7d, 0hc1b6d966, 0h41f50507, 0h416b8e00, 0h416b8e00, 0hc1d2a716, + 0h3fc4b564, 0h3fc4b564, 0h3fc4b564, 0h4235739c, 0hc0024b8d, 0hc0024b8d, 0h41b8e69d, 0h421c1252, + 0hc0024b8d, 0h41b8e69d, 0hc0024b8d, 0h421c1252, 0hc0b05c85, 0h419d18ee, 0h419d18ee, 0h420e2b7a, + 0h41b8e69d, 0hc0024b8d, 0hc0024b8d, 0h421c1252, 0h419d18ee, 0hc0b05c85, 0h419d18ee, 0h420e2b7a, + 0h419d18ee, 0h419d18ee, 0hc0b05c85, 0h420e2b7a, 0h418a0670, 0h418a0670, 0h418a0670, 0h4208ee18, + 0h3fc4b564, 0h3fc4b564, 0h4235739c, 0h3fc4b564, 0hc0024b8d, 0h40024b8d, 0h421c1252, 0h41b8e69d, + 0hc0024b8d, 0h41b8e69d, 0h421c1252, 0hc0024b8d, 0hc0b05c85, 0h419d18ee, 0h420e2b7a, 0h419d18ee, + 0h41b8e69d, 0hc0024b8d, 0h421c1252, 0hc0024b8d, 0h419d18ee, 0hc0b05c85, 0h420e2b7a, 0h419d18ee, + 0h419d18ee, 0h419d18ee, 0h420e2b7a, 0hc0b05c85, 0h418a0670, 0h418a0670, 0h4208ee18, 0h418a0670, + 0h3fc4b564, 0h4235739c, 0h3fc4b564, 0h3fc4b564, 0hc0024b8d, 0h421c1252, 0hc0024b8d, 0h41b8e69d, + 0hc0024b8d, 0h421c1252, 0h41b8e69d, 0hc0024b8d, 0hc0b05c85, 0h420e2b7a, 0h419d18ee, 0h419d18ee, + 0h41b8e69d, 0h421c1252, 0hc0024b8d, 0hc0024b8d, 0h419d18ee, 0h420e2b7a, 0hc0b05c85, 0h419d18ee, + 0h419d18ee, 0h420e2b7a, 0h419d18ee, 0hc0b05c85, 0h418a0670, 0h4208ee18, 0h418a0670, 0h418a0670, + 0h4235739c, 0h3fc4b564, 0h3fc4b564, 0h3fc4b564, 0h421c1252, 0hc0024b8d, 0hc0024b8d, 0h41b8e69d, + 0h421c1252, 0hc0024b8d, 0h41b8e69d, 0hc0024b8d, 0h420e2b7a, 0hc0b05c85, 0h419d18ee, 0h419d18ee, + 0h421c1252, 0h41b8e69d, 0hc0024b8d, 0hc0024b8d, 0h420e2b7a, 0h419d18ee, 0hc0b05c85, 0h419d18ee, + 0h420e2b7a, 0h419d18ee, 0h419d18ee, 0hc0b05c85, 0h4208ee18, 0h418a0670, 0h418a0670, 0h418a0670, + 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41d2a716, 0hc208695c, 0hc19194b0, 0h40de6d7d, 0h41b6d966, + 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41b6d966, 0hc22076c5, 0h406d72bf, 0h406d72bf, 0h41a58418, + 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h422076c5, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, 0h4208695c, + 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h4208695c, 0hc1d2a716, 0h416b8e00, 0h416b8e00, 0h41f50507, + 0hc1f50507, 0hc16b8e00, 0h41d2a716, 0hc16b8e00, 0hc208695c, 0hc19194b0, 0h41b6d966, 0h40de6d7d, + 0hc208695c, 0h40de6d7d, 0h41b6d966, 0hc19194b0, 0hc22076c5, 0h406d72bf, 0h41a58418, 0h406d72bf, + 0hc1a58418, 0hc06d72bf, 0h422076c5, 0hc06d72bf, 0hc1b6d966, 0hc0de6d7d, 0h4208695c, 0h419194b0, + 0hc1b6d966, 0h419194b0, 0h4208695c, 0hc0de6d7d, 0hc1d2a716, 0h416b8e00, 0h41f50507, 0h416b8e00, + 0hc1f50507, 0h41d2a716, 0hc16b8e00, 0hc16b8e00, 0hc208695c, 0h41b6d966, 0hc19194b0, 0h40de6d7d, + 0hc208695c, 0h41b6d966, 0h40de6d7d, 0hc19194b0, 0hc22076c5, 0h41a58418, 0h406d72bf, 0h406d72bf, + 0hc1a58418, 0h422076c5, 0hc06d72bf, 0hc06d72bf, 0hc1b6d966, 0h4208695c, 0hc0de6d7d, 0h419194b0, + 0hc1b6d966, 0h4208695c, 0h419194b0, 0hc0de6d7d, 0hc1d2a716, 0h41f50507, 0h416b8e00, 0h416b8e00, + 0h41d2a716, 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41b6d966, 0hc208695c, 0hc19194b0, 0h40de6d7d, + 0h41b6d966, 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41a58418, 0hc22076c5, 0h406d72bf, 0h406d72bf, + 0h422076c5, 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h4208695c, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, + 0h4208695c, 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h41f50507, 0hc1d2a716, 0h416b8e00, 0h416b8e00, + 0hc208ee18, 0hc18a0670, 0hc18a0670, 0hc18a0670, 0hc20e2b7a, 0hc19d18ee, 0hc19d18ee, 0h40b05c85, + 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0hc19d18ee, + 0hc21c1252, 0hc1b8e69d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0h40024b8d, + 0hc21c1252, 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc235739c, 0hbfc4b564, 0hbfc4b564, 0hbfc4b564, + 0hc18a0670, 0hc208ee18, 0hc18a0670, 0hc18a0670, 0hc19d18ee, 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, + 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, 0hc19d18ee, 0hc19d18ee, + 0hc1b8e69d, 0hc21c1252, 0h40024b8d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0hc1b8e69d, 0h40024b8d, + 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0hbfc4b564, 0hc235739c, 0hbfc4b564, 0hbfc4b564, + 0hc18a0670, 0hc18a0670, 0hc208ee18, 0hc18a0670, 0hc19d18ee, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, + 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0hc19d18ee, + 0hc1b8e69d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc21c1252, 0h40024b8d, + 0h40024b8d, 0h40024b8d, 0hc21c1252, 0hc1b8e69d, 0hbfc4b564, 0hbfc4b564, 0hc235739c, 0hbfc4b564, + 0hc18a0670, 0hc18a0670, 0hc18a0670, 0hc208ee18, 0hc19d18ee, 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, + 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0hc19d18ee, 0hc20e2b7a, + 0hc1b8e69d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0h40024b8d, 0hc21c1252, + 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc21c1252, 0hbfc4b564, 0hbfc4b564, 0hbfc4b564, 0hc235739c, + 0hc16b8e00, 0hc1f50507, 0hc16b8e00, 0h41d2a716, 0hc19194b0, 0hc208695c, 0h40de6d7d, 0h41b6d966, + 0h40de6d7d, 0hc208695c, 0hc19194b0, 0h41b6d966, 0h406d72bf, 0hc22076c5, 0h406d72bf, 0h41a58418, + 0hc06d72bf, 0hc1a58418, 0hc06d72bf, 0h422076c5, 0hc0de6d7d, 0hc1b6d966, 0h419194b0, 0h4208695c, + 0h419194b0, 0hc1b6d966, 0hc0de6d7d, 0h4208695c, 0h416b8e00, 0hc1d2a716, 0h416b8e00, 0h41f50507, + 0hc16b8e00, 0hc16b8e00, 0hc1f50507, 0h41d2a716, 0hc19194b0, 0h40de6d7d, 0hc208695c, 0h41b6d966, + 0h40de6d7d, 0hc19194b0, 0hc208695c, 0h41b6d966, 0h406d72bf, 0h406d72bf, 0hc22076c5, 0h41a58418, + 0hc06d72bf, 0hc06d72bf, 0hc1a58418, 0h422076c5, 0hc0de6d7d, 0h419194b0, 0hc1b6d966, 0h4208695c, + 0h419194b0, 0hc0de6d7d, 0hc1b6d966, 0h4208695c, 0h416b8e00, 0h416b8e00, 0hc1d2a716, 0h41f50507, + 0hc16b8e00, 0hc1f50507, 0h41d2a716, 0hc16b8e00, 0hc19194b0, 0hc208695c, 0h41b6d966, 0h40de6d7d, + 0h40de6d7d, 0hc208695c, 0h41b6d966, 0hc19194b0, 0h406d72bf, 0hc22076c5, 0h41a58418, 0h406d72bf, + 0hc06d72bf, 0hc1a58418, 0h422076c5, 0hc06d72bf, 0hc0de6d7d, 0hc1b6d966, 0h4208695c, 0h419194b0, + 0h419194b0, 0hc1b6d966, 0h4208695c, 0hc0de6d7d, 0h416b8e00, 0hc1d2a716, 0h41f50507, 0h416b8e00, + 0hc16b8e00, 0hc16b8e00, 0h41d2a716, 0hc1f50507, 0hc19194b0, 0h40de6d7d, 0h41b6d966, 0hc208695c, + 0h40de6d7d, 0hc19194b0, 0h41b6d966, 0hc208695c, 0h406d72bf, 0h406d72bf, 0h41a58418, 0hc22076c5, + 0hc06d72bf, 0hc06d72bf, 0h422076c5, 0hc1a58418, 0hc0de6d7d, 0h419194b0, 0h4208695c, 0hc1b6d966, + 0h419194b0, 0hc0de6d7d, 0h4208695c, 0hc1b6d966, 0h416b8e00, 0h416b8e00, 0h41f50507, 0hc1d2a716, + 0hc16b8e00, 0h41d2a716, 0hc1f50507, 0hc16b8e00, 0hc19194b0, 0h41b6d966, 0hc208695c, 0h40de6d7d, + 0h40de6d7d, 0h41b6d966, 0hc208695c, 0hc19194b0, 0h406d72bf, 0h41a58418, 0hc22076c5, 0h406d72bf, + 0hc06d72bf, 0h422076c5, 0hc1a58418, 0hc06d72bf, 0hc0de6d7d, 0h4208695c, 0hc1b6d966, 0h419194b0, + 0h419194b0, 0h4208695c, 0hc1b6d966, 0hc0de6d7d, 0h416b8e00, 0h41f50507, 0hc1d2a716, 0h416b8e00, + 0hc16b8e00, 0h41d2a716, 0hc16b8e00, 0hc1f50507, 0hc19194b0, 0h41b6d966, 0h40de6d7d, 0hc208695c, + 0h40de6d7d, 0h41b6d966, 0hc19194b0, 0hc208695c, 0h406d72bf, 0h41a58418, 0h406d72bf, 0hc22076c5, + 0hc06d72bf, 0h422076c5, 0hc06d72bf, 0hc1a58418, 0hc0de6d7d, 0h4208695c, 0h419194b0, 0hc1b6d966, + 0h419194b0, 0h4208695c, 0hc0de6d7d, 0hc1b6d966, 0h416b8e00, 0h41f50507, 0h416b8e00, 0hc1d2a716, + 0h41d2a716, 0hc16b8e00, 0hc1f50507, 0hc16b8e00, 0h41b6d966, 0hc19194b0, 0hc208695c, 0h40de6d7d, + 0h41b6d966, 0h40de6d7d, 0hc208695c, 0hc19194b0, 0h41a58418, 0h406d72bf, 0hc22076c5, 0h406d72bf, + 0h422076c5, 0hc06d72bf, 0hc1a58418, 0hc06d72bf, 0h4208695c, 0hc0de6d7d, 0hc1b6d966, 0h419194b0, + 0h4208695c, 0h419194b0, 0hc1b6d966, 0hc0de6d7d, 0h41f50507, 0h416b8e00, 0hc1d2a716, 0h416b8e00, + 0h41d2a716, 0hc16b8e00, 0hc16b8e00, 0hc1f50507, 0h41b6d966, 0hc19194b0, 0h40de6d7d, 0hc208695c, + 0h41b6d966, 0h40de6d7d, 0hc19194b0, 0hc208695c, 0h41a58418, 0h406d72bf, 0h406d72bf, 0hc22076c5, + 0h422076c5, 0hc06d72bf, 0hc06d72bf, 0hc1a58418, 0h4208695c, 0hc0de6d7d, 0h419194b0, 0hc1b6d966, + 0h4208695c, 0h419194b0, 0hc0de6d7d, 0hc1b6d966, 0h41f50507, 0h416b8e00, 0h416b8e00, 0hc1d2a716, + 0h3fc4b564, 0h3fc4b564, 0h3fc4b564, 0h4235739c, 0hc0024b8d, 0hc0024b8d, 0h41b8e69d, 0h421c1252, + 0hc0024b8d, 0h41b8e69d, 0hc0024b8d, 0h421c1252, 0hc0b05c85, 0h419d18ee, 0h419d18ee, 0h420e2b7a, + 0h41b8e69d, 0hc0024b8d, 0hc0024b8d, 0h421c1252, 0h419d18ee, 0hc0b05c85, 0h419d18ee, 0h420e2b7a, + 0h419d18ee, 0h419d18ee, 0hc0b05c85, 0h420e2b7a, 0h418a0670, 0h418a0670, 0h418a0670, 0h4208ee18, + 0h3fc4b564, 0h3fc4b564, 0h4235739c, 0h3fc4b564, 0hc0024b8d, 0h40024b8d, 0h421c1252, 0h41b8e69d, + 0hc0024b8d, 0h41b8e69d, 0h421c1252, 0hc0024b8d, 0hc0b05c85, 0h419d18ee, 0h420e2b7a, 0h419d18ee, + 0h41b8e69d, 0hc0024b8d, 0h421c1252, 0hc0024b8d, 0h419d18ee, 0hc0b05c85, 0h420e2b7a, 0h419d18ee, + 0h419d18ee, 0h419d18ee, 0h420e2b7a, 0hc0b05c85, 0h418a0670, 0h418a0670, 0h4208ee18, 0h418a0670, + 0h3fc4b564, 0h4235739c, 0h3fc4b564, 0h3fc4b564, 0hc0024b8d, 0h421c1252, 0hc0024b8d, 0h41b8e69d, + 0hc0024b8d, 0h421c1252, 0h41b8e69d, 0hc0024b8d, 0hc0b05c85, 0h420e2b7a, 0h419d18ee, 0h419d18ee, + 0h41b8e69d, 0h421c1252, 0hc0024b8d, 0hc0024b8d, 0h419d18ee, 0h420e2b7a, 0hc0b05c85, 0h419d18ee, + 0h419d18ee, 0h420e2b7a, 0h419d18ee, 0hc0b05c85, 0h418a0670, 0h4208ee18, 0h418a0670, 0h418a0670, + 0h4235739c, 0h3fc4b564, 0h3fc4b564, 0h3fc4b564, 0h421c1252, 0hc0024b8d, 0hc0024b8d, 0h41b8e69d, + 0h421c1252, 0hc0024b8d, 0h41b8e69d, 0hc0024b8d, 0h420e2b7a, 0hc0b05c85, 0h419d18ee, 0h419d18ee, + 0h421c1252, 0h41b8e69d, 0hc0024b8d, 0hc0024b8d, 0h420e2b7a, 0h419d18ee, 0hc0b05c85, 0h419d18ee, + 0h420e2b7a, 0h419d18ee, 0h419d18ee, 0hc0b05c85, 0h4208ee18, 0h418a0670, 0h418a0670, 0h418a0670, + 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41d2a716, 0hc208695c, 0hc19194b0, 0h40de6d7d, 0h41b6d966, + 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41b6d966, 0hc22076c5, 0h406d72bf, 0h406d72bf, 0h41a58418, + 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h422076c5, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, 0h4208695c, + 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h4208695c, 0hc1d2a716, 0h416b8e00, 0h416b8e00, 0h41f50507, + 0hc1f50507, 0hc16b8e00, 0h41d2a716, 0hc16b8e00, 0hc208695c, 0hc19194b0, 0h41b6d966, 0h40de6d7d, + 0hc208695c, 0h40de6d7d, 0h41b6d966, 0hc19194b0, 0hc22076c5, 0h406d72bf, 0h41a58418, 0h406d72bf, + 0hc1a58418, 0hc06d72bf, 0h422076c5, 0hc06d72bf, 0hc1b6d966, 0hc0de6d7d, 0h4208695c, 0h419194b0, + 0hc1b6d966, 0h419194b0, 0h4208695c, 0hc0de6d7d, 0hc1d2a716, 0h416b8e00, 0h41f50507, 0h416b8e00, + 0hc1f50507, 0h41d2a716, 0hc16b8e00, 0hc16b8e00, 0hc208695c, 0h41b6d966, 0hc19194b0, 0h40de6d7d, + 0hc208695c, 0h41b6d966, 0h40de6d7d, 0hc19194b0, 0hc22076c5, 0h41a58418, 0h406d72bf, 0h406d72bf, + 0hc1a58418, 0h422076c5, 0hc06d72bf, 0hc06d72bf, 0hc1b6d966, 0h4208695c, 0hc0de6d7d, 0h419194b0, + 0hc1b6d966, 0h4208695c, 0h419194b0, 0hc0de6d7d, 0hc1d2a716, 0h41f50507, 0h416b8e00, 0h416b8e00, + 0h41d2a716, 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41b6d966, 0hc208695c, 0hc19194b0, 0h40de6d7d, + 0h41b6d966, 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41a58418, 0hc22076c5, 0h406d72bf, 0h406d72bf, + 0h422076c5, 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h4208695c, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, + 0h4208695c, 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h41f50507, 0hc1d2a716, 0h416b8e00, 0h416b8e00, + 0hc208ee18, 0hc18a0670, 0hc18a0670, 0hc18a0670, 0hc20e2b7a, 0hc19d18ee, 0hc19d18ee, 0h40b05c85, + 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0hc19d18ee, + 0hc21c1252, 0hc1b8e69d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0h40024b8d, + 0hc21c1252, 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc235739c, 0hbfc4b564, 0hbfc4b564, 0hbfc4b564, + 0hc18a0670, 0hc208ee18, 0hc18a0670, 0hc18a0670, 0hc19d18ee, 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, + 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, 0hc19d18ee, 0hc19d18ee, + 0hc1b8e69d, 0hc21c1252, 0h40024b8d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0hc1b8e69d, 0h40024b8d, + 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0hbfc4b564, 0hc235739c, 0hbfc4b564, 0hbfc4b564, + 0hc18a0670, 0hc18a0670, 0hc208ee18, 0hc18a0670, 0hc19d18ee, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, + 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0hc19d18ee, + 0hc1b8e69d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc21c1252, 0h40024b8d, + 0h40024b8d, 0h40024b8d, 0hc21c1252, 0hc1b8e69d, 0hbfc4b564, 0hbfc4b564, 0hc235739c, 0hbfc4b564, + 0hc18a0670, 0hc18a0670, 0hc18a0670, 0hc208ee18, 0hc19d18ee, 0hc19d18ee, 0h40b05c85, 0hc20e2b7a, + 0hc19d18ee, 0h40b05c85, 0hc19d18ee, 0hc20e2b7a, 0h40b05c85, 0hc19d18ee, 0hc19d18ee, 0hc20e2b7a, + 0hc1b8e69d, 0h40024b8d, 0h40024b8d, 0hc21c1252, 0h40024b8d, 0hc1b8e69d, 0h40024b8d, 0hc21c1252, + 0h40024b8d, 0h40024b8d, 0hc1b8e69d, 0hc21c1252, 0hbfc4b564, 0hbfc4b564, 0hbfc4b564, 0hc235739c, + 0hc16b8e00, 0hc1f50507, 0hc16b8e00, 0h41d2a716, 0hc19194b0, 0hc208695c, 0h40de6d7d, 0h41b6d966, + 0h40de6d7d, 0hc208695c, 0hc19194b0, 0h41b6d966, 0h406d72bf, 0hc22076c5, 0h406d72bf, 0h41a58418, + 0hc06d72bf, 0hc1a58418, 0hc06d72bf, 0h422076c5, 0hc0de6d7d, 0hc1b6d966, 0h419194b0, 0h4208695c, + 0h419194b0, 0hc1b6d966, 0hc0de6d7d, 0h4208695c, 0h416b8e00, 0hc1d2a716, 0h416b8e00, 0h41f50507, + 0hc16b8e00, 0hc16b8e00, 0hc1f50507, 0h41d2a716, 0hc19194b0, 0h40de6d7d, 0hc208695c, 0h41b6d966, + 0h40de6d7d, 0hc19194b0, 0hc208695c, 0h41b6d966, 0h406d72bf, 0h406d72bf, 0hc22076c5, 0h41a58418, + 0hc06d72bf, 0hc06d72bf, 0hc1a58418, 0h422076c5, 0hc0de6d7d, 0h419194b0, 0hc1b6d966, 0h4208695c, + 0h419194b0, 0hc0de6d7d, 0hc1b6d966, 0h4208695c, 0h416b8e00, 0h416b8e00, 0hc1d2a716, 0h41f50507, + 0hc16b8e00, 0hc1f50507, 0h41d2a716, 0hc16b8e00, 0hc19194b0, 0hc208695c, 0h41b6d966, 0h40de6d7d, + 0h40de6d7d, 0hc208695c, 0h41b6d966, 0hc19194b0, 0h406d72bf, 0hc22076c5, 0h41a58418, 0h406d72bf, + 0hc06d72bf, 0hc1a58418, 0h422076c5, 0hc06d72bf, 0hc0de6d7d, 0hc1b6d966, 0h4208695c, 0h419194b0, + 0h419194b0, 0hc1b6d966, 0h4208695c, 0hc0de6d7d, 0h416b8e00, 0hc1d2a716, 0h41f50507, 0h416b8e00, + 0hc16b8e00, 0hc16b8e00, 0h41d2a716, 0hc1f50507, 0hc19194b0, 0h40de6d7d, 0h41b6d966, 0hc208695c, + 0h40de6d7d, 0hc19194b0, 0h41b6d966, 0hc208695c, 0h406d72bf, 0h406d72bf, 0h41a58418, 0hc22076c5, + 0hc06d72bf, 0hc06d72bf, 0h422076c5, 0hc1a58418, 0hc0de6d7d, 0h419194b0, 0h4208695c, 0hc1b6d966, + 0h419194b0, 0hc0de6d7d, 0h4208695c, 0hc1b6d966, 0h416b8e00, 0h416b8e00, 0h41f50507, 0hc1d2a716, + 0hc16b8e00, 0h41d2a716, 0hc1f50507, 0hc16b8e00, 0hc19194b0, 0h41b6d966, 0hc208695c, 0h40de6d7d, + 0h40de6d7d, 0h41b6d966, 0hc208695c, 0hc19194b0, 0h406d72bf, 0h41a58418, 0hc22076c5, 0h406d72bf, + 0hc06d72bf, 0h422076c5, 0hc1a58418, 0hc06d72bf, 0hc0de6d7d, 0h4208695c, 0hc1b6d966, 0h419194b0, + 0h419194b0, 0h4208695c, 0hc1b6d966, 0hc0de6d7d, 0h416b8e00, 0h41f50507, 0hc1d2a716, 0h416b8e00, + 0hc16b8e00, 0h41d2a716, 0hc16b8e00, 0hc1f50507, 0hc19194b0, 0h41b6d966, 0h40de6d7d, 0hc208695c, + 0h40de6d7d, 0h41b6d966, 0hc19194b0, 0hc208695c, 0h406d72bf, 0h41a58418, 0h406d72bf, 0hc22076c5, + 0hc06d72bf, 0h422076c5, 0hc06d72bf, 0hc1a58418, 0hc0de6d7d, 0h4208695c, 0h419194b0, 0hc1b6d966, + 0h419194b0, 0h4208695c, 0hc0de6d7d, 0hc1b6d966, 0h416b8e00, 0h41f50507, 0h416b8e00, 0hc1d2a716, + 0h41d2a716, 0hc16b8e00, 0hc1f50507, 0hc16b8e00, 0h41b6d966, 0hc19194b0, 0hc208695c, 0h40de6d7d, + 0h41b6d966, 0h40de6d7d, 0hc208695c, 0hc19194b0, 0h41a58418, 0h406d72bf, 0hc22076c5, 0h406d72bf, + 0h422076c5, 0hc06d72bf, 0hc1a58418, 0hc06d72bf, 0h4208695c, 0hc0de6d7d, 0hc1b6d966, 0h419194b0, + 0h4208695c, 0h419194b0, 0hc1b6d966, 0hc0de6d7d, 0h41f50507, 0h416b8e00, 0hc1d2a716, 0h416b8e00, + 0h41d2a716, 0hc16b8e00, 0hc16b8e00, 0hc1f50507, 0h41b6d966, 0hc19194b0, 0h40de6d7d, 0hc208695c, + 0h41b6d966, 0h40de6d7d, 0hc19194b0, 0hc208695c, 0h41a58418, 0h406d72bf, 0h406d72bf, 0hc22076c5, + 0h422076c5, 0hc06d72bf, 0hc06d72bf, 0hc1a58418, 0h4208695c, 0hc0de6d7d, 0h419194b0, 0hc1b6d966, + 0h4208695c, 0h419194b0, 0hc0de6d7d, 0hc1b6d966, 0h41f50507, 0h416b8e00, 0h416b8e00, 0hc1d2a716, + 0h3fc4b564, 0h3fc4b564, 0h3fc4b564, 0h4235739c, 0hc0024b8d, 0hc0024b8d, 0h41b8e69d, 0h421c1252, + 0hc0024b8d, 0h41b8e69d, 0hc0024b8d, 0h421c1252, 0hc0b05c85, 0h419d18ee, 0h419d18ee, 0h420e2b7a, + 0h41b8e69d, 0hc0024b8d, 0hc0024b8d, 0h421c1252, 0h419d18ee, 0hc0b05c85, 0h419d18ee, 0h420e2b7a, + 0h419d18ee, 0h419d18ee, 0hc0b05c85, 0h420e2b7a, 0h418a0670, 0h418a0670, 0h418a0670, 0h4208ee18, + 0h3fc4b564, 0h3fc4b564, 0h4235739c, 0h3fc4b564, 0hc0024b8d, 0h40024b8d, 0h421c1252, 0h41b8e69d, + 0hc0024b8d, 0h41b8e69d, 0h421c1252, 0hc0024b8d, 0hc0b05c85, 0h419d18ee, 0h420e2b7a, 0h419d18ee, + 0h41b8e69d, 0hc0024b8d, 0h421c1252, 0hc0024b8d, 0h419d18ee, 0hc0b05c85, 0h420e2b7a, 0h419d18ee, + 0h419d18ee, 0h419d18ee, 0h420e2b7a, 0hc0b05c85, 0h418a0670, 0h418a0670, 0h4208ee18, 0h418a0670, + 0h3fc4b564, 0h4235739c, 0h3fc4b564, 0h3fc4b564, 0hc0024b8d, 0h421c1252, 0hc0024b8d, 0h41b8e69d, + 0hc0024b8d, 0h421c1252, 0h41b8e69d, 0hc0024b8d, 0hc0b05c85, 0h420e2b7a, 0h419d18ee, 0h419d18ee, + 0h41b8e69d, 0h421c1252, 0hc0024b8d, 0hc0024b8d, 0h419d18ee, 0h420e2b7a, 0hc0b05c85, 0h419d18ee, + 0h419d18ee, 0h420e2b7a, 0h419d18ee, 0hc0b05c85, 0h418a0670, 0h4208ee18, 0h418a0670, 0h418a0670, + 0h4235739c, 0h3fc4b564, 0h3fc4b564, 0h3fc4b564, 0h421c1252, 0hc0024b8d, 0hc0024b8d, 0h41b8e69d, + 0h421c1252, 0hc0024b8d, 0h41b8e69d, 0hc0024b8d, 0h420e2b7a, 0hc0b05c85, 0h419d18ee, 0h419d18ee, + 0h421c1252, 0h41b8e69d, 0hc0024b8d, 0hc0024b8d, 0h420e2b7a, 0h419d18ee, 0hc0b05c85, 0h419d18ee, + 0h420e2b7a, 0h419d18ee, 0h419d18ee, 0hc0b05c85, 0h4208ee18, 0h418a0670, 0h418a0670, 0h418a0670, + 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41d2a716, 0hc208695c, 0hc19194b0, 0h40de6d7d, 0h41b6d966, + 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41b6d966, 0hc22076c5, 0h406d72bf, 0h406d72bf, 0h41a58418, + 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h422076c5, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, 0h4208695c, + 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h4208695c, 0hc1d2a716, 0h416b8e00, 0h416b8e00, 0h41f50507, + 0hc1f50507, 0hc16b8e00, 0h41d2a716, 0hc16b8e00, 0hc208695c, 0hc19194b0, 0h41b6d966, 0h40de6d7d, + 0hc208695c, 0h40de6d7d, 0h41b6d966, 0hc19194b0, 0hc22076c5, 0h406d72bf, 0h41a58418, 0h406d72bf, + 0hc1a58418, 0hc06d72bf, 0h422076c5, 0hc06d72bf, 0hc1b6d966, 0hc0de6d7d, 0h4208695c, 0h419194b0, + 0hc1b6d966, 0h419194b0, 0h4208695c, 0hc0de6d7d, 0hc1d2a716, 0h416b8e00, 0h41f50507, 0h416b8e00, + 0hc1f50507, 0h41d2a716, 0hc16b8e00, 0hc16b8e00, 0hc208695c, 0h41b6d966, 0hc19194b0, 0h40de6d7d, + 0hc208695c, 0h41b6d966, 0h40de6d7d, 0hc19194b0, 0hc22076c5, 0h41a58418, 0h406d72bf, 0h406d72bf, + 0hc1a58418, 0h422076c5, 0hc06d72bf, 0hc06d72bf, 0hc1b6d966, 0h4208695c, 0hc0de6d7d, 0h419194b0, + 0hc1b6d966, 0h4208695c, 0h419194b0, 0hc0de6d7d, 0hc1d2a716, 0h41f50507, 0h416b8e00, 0h416b8e00, + 0h41d2a716, 0hc1f50507, 0hc16b8e00, 0hc16b8e00, 0h41b6d966, 0hc208695c, 0hc19194b0, 0h40de6d7d, + 0h41b6d966, 0hc208695c, 0h40de6d7d, 0hc19194b0, 0h41a58418, 0hc22076c5, 0h406d72bf, 0h406d72bf, + 0h422076c5, 0hc1a58418, 0hc06d72bf, 0hc06d72bf, 0h4208695c, 0hc1b6d966, 0hc0de6d7d, 0h419194b0, + 0h4208695c, 0hc1b6d966, 0h419194b0, 0hc0de6d7d, 0h41f50507, 0hc1d2a716, 0h416b8e00, 0h416b8e00, +} + +/* + 2D Simplex noise base. +*/ +_internal_noise_2d_unskewed_base :: proc(seed: i64, coord: Vec2) -> (value: f32) { + // Get base points and offsets. + base := [2]i64{fast_floor(coord.x), fast_floor(coord.y)} + i := [2]f32{f32(coord.x - f64(base.x)), f32(coord.y - f64(base.y))} + + // Prime pre-multiplication for hash. + bp := base * [2]i64{PRIME_X, PRIME_Y} + + // Unskew. + t := f32(i.x + i.y) * f32(UNSKEW_2D) + d0 := i + [2]f32{t, t} + + // First vertex. + a0 := RSQUARED_2D - d0.x * d0.x - d0.y * d0.y + if a0 > 0 { + value = (a0 * a0) * (a0 * a0) * grad(seed, [2]i64{bp.x, bp.y}, d0) + } + + // Second vertex. + a1 := f32(2 * (1 + 2 * UNSKEW_2D) * (1 / UNSKEW_2D + 2)) * t + f32(-2 * (1 + 2 * UNSKEW_2D) * (1 + 2 * UNSKEW_2D)) + a0 + if a1 > 0 { + d1 := d0 - [2]f32{f32(1 + 2 * UNSKEW_2D), f32(1 + 2 * UNSKEW_2D)} + value += (a1 * a1) * (a1 * a1) * grad(seed, [2]i64{bp.x + PRIME_X, bp.y + PRIME_Y}, d1) + } + + // Third vertex. + if d0.y > d0.x { + d2 := d0 - [2]f32{f32(UNSKEW_2D), f32(UNSKEW_2D + 1)} + a2 := RSQUARED_2D - d2.x * d2.x - d2.y * d2.y + if(a2 > 0) { + value += (a2 * a2) * (a2 * a2) * grad(seed, [2]i64{bp.x, bp.y + PRIME_Y}, d2) + } + } else { + d2 := d0 - [2]f32{f32(UNSKEW_2D + 1), f32(UNSKEW_2D)} + a2 := RSQUARED_2D - d2.x * d2.x - d2.y * d2.y + if(a2 > 0) { + value += (a2 * a2) * (a2 * a2) * grad(seed, [2]i64{bp.x + PRIME_X, bp.y}, d2) + } + } + + return +} + + +/* + Generate overlapping cubic lattices for 3D OpenSimplex2 noise. +*/ +_internal_noise_3d_unrotated_base :: proc(seed: i64, coord: Vec3) -> (value: f32) { + seed := seed + // Get base points and offsets. + // xr, yr, zr := coord.x, coord.y, coord.z + + rb := [3]i64{fast_round(coord.x), fast_round(coord.y), fast_round(coord.z)} + ri := [3]f32{f32(coord.x - f64(rb.x)), f32(coord.y - f64(rb.y)), f32(coord.z - f64(rb.z))} + + // -1 if positive, 1 if negative. + i_sign := [3]i64{i64(-1.0 - ri.x) | 1, i64(-1.0 - ri.y) | 1, i64(-1.0 - ri.z) | 1} + f_sign := [3]f32{f32(i_sign.x), f32(i_sign.y), f32(i_sign.z)} + + // Compute absolute values, using the above as a shortcut. This was faster in my tests for some reason. + a0 := f_sign * -ri + + // Prime pre-multiplication for hash. + rbp := rb * [3]i64{PRIME_X, PRIME_Y, PRIME_Z} + + // Loop: Pick an edge on each lattice copy. + a := (RSQUARED_3D - ri.x * ri.x) - (ri.y * ri.y + ri.z * ri.z) + + l := 0 + for { + defer l += 1 + + // Closest point on cube. + if a > 0 { + a2 := a * a; a4 := a2 * a2 + value += a4 * grad(seed, rbp, ri) + } + + // Second-closest point. + if a0.x >= a0.y && a0.x >= a0.z { + b := a + a0.x + a0.x + if b > 1 { + b -= 1 + b2 := b * b; b4 := b2 * b2 + value += b4 * grad(seed, [3]i64{rbp.x - i_sign.x * PRIME_X, rbp.y, rbp.z}, [3]f32{ri.x + f_sign.x, ri.y, ri.z}) + } + } else if a0.y > a0.x && a0.y >= a0.z { + b := a + a0.y + a0.y + if b > 1 { + b -= 1 + b2 := b * b; b4 := b2 * b2 + value += b4 * grad(seed, [3]i64{rbp.x, rbp.y - i_sign.y * PRIME_Y, rbp.z}, [3]f32{ri.x, ri.y + f_sign.y, ri.z}) + } + } else { + b := a + a0.z + a0.z + if b > 1 { + b -= 1 + b2 := b * b; b4 := b2 * b2 + value += b4 * grad(seed, [3]i64{rbp.x, rbp.y, rbp.z - i_sign.z * PRIME_Z}, [3]f32{ri.x, ri.y, ri.z + f_sign.z}) + } + } + + // Break from loop if we're done, skipping updates below. + if l == 1 { + break + } + + // Update absolute value. + a0 = 0.5 - a0 + + // Update relative coordinate. + ri = a0 * f_sign + + // Update falloff. + a += (0.75 - a0.x) - (a0.y + a0.z) + + // Update prime for hash. + rbp += [3]i64{i_sign.x >> 1, i_sign.y >> 1, i_sign.z >> 1} & {PRIME_X, PRIME_Y, PRIME_Z} + + // Update the reverse sign indicators. + i_sign = -i_sign + f_sign = -f_sign + + // And finally update the seed for the other lattice copy. + seed ~= SEED_FLIP_3D + } + + return value +} + +/* + 4D OpenSimplex2 noise base. +*/ +_internal_noise_4d_unskewed_base :: proc(seed: i64, coord: Vec4) -> (value: f32) { + seed := seed + + // Get base points and offsets + base := [4]i64{fast_floor(coord.x), fast_floor(coord.y), fast_floor(coord.z), fast_floor(coord.w)} + si := [4]f32{f32(coord.x - f64(base.x)), f32(coord.y - f64(base.y)), f32(coord.z - f64(base.z)), f32(coord.w - f64(base.w))} + + // Determine which lattice we can be confident has a contributing point its corresponding cell's base simplex. + // We only look at the spaces between the diagonal planes. This proved effective in all of my tests. + si_sum := (si.x + si.y) + (si.z + si.w) + starting_lattice := i64(si_sum * 1.25) + + // Offset for seed based on first lattice copy. + seed += starting_lattice * SEED_OFFSET_4D + + // Offset for lattice point relative positions (skewed) + starting_lattice_offset := f32(starting_lattice) * -LATTICE_STEP_4D + si += starting_lattice_offset + + // Prep for vertex contributions. + ssi := (si_sum + starting_lattice_offset * 4) * UNSKEW_4D + + // Prime pre-multiplication for hash. + svp := base * [4]i64{PRIME_X, PRIME_Y, PRIME_Z, PRIME_W} + + // Five points to add, total, from five copies of the A4 lattice. + for i : i64 = 0; ; i += 1 { + + // Next point is the closest vertex on the 4-simplex whose base vertex is the aforementioned vertex. + score := 1.0 + ssi * (-1.0 / UNSKEW_4D) // Seems slightly faster than 1.0-xsi-ysi-zsi-wsi + if si.x >= si.x && si.x >= si.z && si.x >= si.w && si.x >= score { + svp.x += PRIME_X + si.x -= 1 + ssi -= UNSKEW_4D + } + else if si.y > si.x && si.y >= si.z && si.y >= si.w && si.y >= score { + svp.y += PRIME_Y + si.y -= 1 + ssi -= UNSKEW_4D + } + else if si.z > si.x && si.z > si.y && si.z >= si.w && si.z >= score { + svp.z += PRIME_Z + si.z -= 1 + ssi -= UNSKEW_4D + } + else if si.w > si.x && si.w > si.y && si.w > si.z && si.w >= score { + svp.w += PRIME_W + si.w -= 1 + ssi -= UNSKEW_4D + } + + // gradient contribution with falloff. + d := si + ssi + a := (d.x * d.x + d.y * d.y) + (d.z * d.z + d.w * d.w) + + if a < RSQUARED_4D { + a -= RSQUARED_4D + a *= a; a4 := a * a + value += a4 * grad(seed, svp, d) + } + + // Break from loop if we're done, skipping updates below. + if i == 4 { + break + } + + // Update for next lattice copy shifted down by <-0.2, -0.2, -0.2, -0.2>. + si += LATTICE_STEP_4D + ssi += LATTICE_STEP_4D * 4 * UNSKEW_4D + seed -= SEED_OFFSET_4D + + // Because we don't always start on the same lattice copy, there's a special reset case. + if i == starting_lattice { + svp -= {PRIME_X, PRIME_Y, PRIME_Z, PRIME_W} + seed += SEED_OFFSET_4D * 5 + } + } + return +} + +/* + Utility functions +*/ +@(optimization_mode="speed") +grad_2d :: proc(seed: i64, svp: [2]i64, delta: [2]f32) -> (value: f32) { + hash := seed ~ svp.x ~ svp.y + hash *= HASH_MULTIPLIER + hash ~= hash >> (64 - N_GRADS_2D_EXPONENT + 1) + + gi := hash & ((N_GRADS_2D - 1) << 1) + return GRADIENTS_2D[gi] * delta.x + GRADIENTS_2D[gi | 1] * delta.y +} + +@(optimization_mode="speed") +grad_3d :: proc(seed: i64, rvp: [3]i64, delta: [3]f32) -> (value: f32) { + hash := (seed ~ rvp.x) ~ (rvp.y ~ rvp.z) + hash *= HASH_MULTIPLIER + hash ~= hash >> (64 - N_GRADS_3D_EXPONENT + 2) + + gi := hash & ((N_GRADS_3D - 1) << 2) + return GRADIENTS_3D[gi] * delta.x + GRADIENTS_3D[gi | 1] * delta.y + GRADIENTS_3D[gi | 2] * delta.z +} + +@(optimization_mode="speed") +grad_4d :: proc(seed: i64, svp: [4]i64, delta: [4]f32) -> (value: f32) { + hash := seed ~ (svp.x ~ svp.y) ~ (svp.z ~ svp.w) + hash *= HASH_MULTIPLIER + hash ~= hash >> (64 - N_GRADS_4D_EXPONENT + 2) + + gi := hash & ((N_GRADS_4D - 1) << 2) + return (GRADIENTS_4D[gi] * delta.x + GRADIENTS_4D[gi | 1] * delta.y) + (GRADIENTS_4D[gi | 2] * delta.z + GRADIENTS_4D[gi | 3] * delta.w) +} + +grad :: proc {grad_2d, grad_3d, grad_4d} + +@(optimization_mode="speed") +fast_floor :: proc(x: f64) -> (floored: i64) { + xi := i64(x) + return x < f64(xi) ? xi - 1 : xi +} + +@(optimization_mode="speed") +fast_round :: proc(x: f64) -> (rounded: i64) { + return x < 0 ? i64(x - 0.5) : i64(x + 0.5) +} \ No newline at end of file diff --git a/core/math/noise/opensimplex2.odin b/core/math/noise/opensimplex2.odin new file mode 100644 index 000000000..d90dafdf5 --- /dev/null +++ b/core/math/noise/opensimplex2.odin @@ -0,0 +1,171 @@ +/* + OpenSimplex2 noise implementation. + + Ported from https://github.com/KdotJPG/OpenSimplex2. + Copyright 2022 Yuki2 (https://github.com/NoahR02) +*/ +package math_noise + +/* + Input coordinate vectors +*/ +Vec2 :: [2]f64 +Vec3 :: [3]f64 +Vec4 :: [4]f64 + +/* + Noise Evaluators +*/ + +/* + 2D Simplex noise, standard lattice orientation. +*/ +noise_2d :: proc(seed: i64, coord: Vec2) -> (value: f32) { + // Get points for A2* lattice + skew := SKEW_2D * (coord.x + coord.y) + skewed := coord + skew + + return _internal_noise_2d_unskewed_base(seed, skewed) +} + +/* + 2D Simplex noise, with Y pointing down the main diagonal. + Might be better for a 2D sandbox style game, where Y is vertical. + Probably slightly less optimal for heightmaps or continent maps, + unless your map is centered around an equator. It's a subtle + difference, but the option is here to make it an easy choice. +*/ +noise_2d_improve_x :: proc(seed: i64, coord: Vec2) -> (value: f32) { + // Skew transform and rotation baked into one. + xx := coord.x * ROOT_2_OVER_2 + yy := coord.y * (ROOT_2_OVER_2 * (1 + 2 * SKEW_2D)) + return _internal_noise_2d_unskewed_base(seed, Vec2{yy + xx, yy - xx}) +} + + +/* + 3D OpenSimplex2 noise, with better visual isotropy in (X, Y). + Recommended for 3D terrain and time-varied animations. + The Z coordinate should always be the "different" coordinate in whatever your use case is. + If Y is vertical in world coordinates, call `noise_3d_improve_xz(x, z, Y)` or use `noise_3d_xz_before_y`. + If Z is vertical in world coordinates, call `noise_3d_improve_xz(x, y, Z)`. + For a time varied animation, call `noise_3d_improve_xz(x, y, T)`. +*/ +noise_3d_improve_xy :: proc(seed: i64, coord: Vec3) -> (value: f32) { + /* + Re-orient the cubic lattices without skewing, so Z points up the main lattice diagonal, + and the planes formed by XY are moved far out of alignment with the cube faces. + Orthonormal rotation. Not a skew transform. + */ + xy := coord.x + coord.y + s2 := xy * ROTATE_3D_ORTHOGONALIZER + zz := coord.z * ROOT_3_OVER_3 + + r := Vec3{coord.x + s2 + zz, coord.y + s2 + zz, xy * -ROOT_3_OVER_3 + zz} + + // Evaluate both lattices to form a BCC lattice. + return _internal_noise_3d_unrotated_base(seed, r) +} + +/* + 3D OpenSimplex2 noise, with better visual isotropy in (X, Z). + Recommended for 3D terrain and time-varied animations. + The Y coordinate should always be the "different" coordinate in whatever your use case is. + If Y is vertical in world coordinates, call `noise_3d_improve_xz(x, Y, z)`. + If Z is vertical in world coordinates, call `noise_3d_improve_xz(x, Z, y)` or use `noise_3d_improve_xy`. + For a time varied animation, call `noise_3d_improve_xz(x, T, y)` or use `noise_3d_improve_xy`. +*/ +noise_3d_improve_xz :: proc(seed: i64, coord: Vec3) -> (value: f32) { + /* + Re-orient the cubic lattices without skewing, so Y points up the main lattice diagonal, + and the planes formed by XZ are moved far out of alignment with the cube faces. + Orthonormal rotation. Not a skew transform. + */ + xz := coord.x + coord.z + s2 := xz * ROTATE_3D_ORTHOGONALIZER + yy := coord.y * ROOT_3_OVER_3 + + r := Vec3{coord.x + s2 + yy, xz * -ROOT_3_OVER_3 + yy, coord.z + s2 + yy} + + // Evaluate both lattices to form a BCC lattice. + return _internal_noise_3d_unrotated_base(seed, r) +} + +/* + 3D OpenSimplex2 noise, fallback rotation option + Use `noise_3d_improve_xy` or `noise_3d_improve_xz` instead, wherever appropriate. + They have less diagonal bias. This function's best use is as a fallback. +*/ +noise_3d_fallback :: proc(seed: i64, coord: Vec3) -> (value: f32) { + /* + Re-orient the cubic lattices via rotation, to produce a familiar look. + Orthonormal rotation. Not a skew transform. + */ + bias := FALLBACK_ROTATE_3D * (coord.x + coord.y + coord.z) + biased := bias - coord + // Evaluate both lattices to form a BCC lattice. + return _internal_noise_3d_unrotated_base(seed, biased) +} + + +/* + 4D OpenSimplex2 noise, with XYZ oriented like `noise_3d_improve_xy` + and W for an extra degree of freedom. W repeats eventually. + Recommended for time-varied animations which texture a 3D object (W=time) + in a space where Z is vertical. +*/ +noise_4d_improve_xyz_improve_xy :: proc(seed: i64, coord: Vec4) -> (value: f32) { + xy := coord.x + coord.y + s2 := xy * -0.21132486540518699998 + zz := coord.z * 0.28867513459481294226 + ww := coord.w * 0.2236067977499788 + + xr, yr : f64 = coord.x + (zz + ww + s2), coord.y + (zz + ww + s2) + zr : f64 = xy * -0.57735026918962599998 + (zz + ww) + wr : f64 = coord.z * -0.866025403784439 + ww + + return _internal_noise_4d_unskewed_base(seed, Vec4{xr, yr, zr, wr}) +} + +/* + 4D OpenSimplex2 noise, with XYZ oriented like `noise_3d_improve_xz` + and W for an extra degree of freedom. W repeats eventually. + Recommended for time-varied animations which texture a 3D object (W=time) + in a space where Y is vertical. +*/ +noise_4d_improve_xyz_improve_xz :: proc(seed: i64, coord: Vec4) -> (value: f32) { + xz := coord.x + coord.z + s2 := xz * -0.21132486540518699998 + yy := coord.y * 0.28867513459481294226 + ww := coord.w * 0.2236067977499788 + + xr, zr : f64 = coord.x + (yy + ww + s2), coord.z + (yy + ww + s2) + yr := xz * -0.57735026918962599998 + (yy + ww) + wr := coord.y * -0.866025403784439 + ww + + return _internal_noise_4d_unskewed_base(seed, Vec4{xr, yr, zr, wr}) +} + +/* + 4D OpenSimplex2 noise, with XYZ oriented like `noise_3d_fallback` + and W for an extra degree of freedom. W repeats eventually. + Recommended for time-varied animations which texture a 3D object (W=time) + where there isn't a clear distinction between horizontal and vertical +*/ +noise_4d_improve_xyz :: proc(seed: i64, coord: Vec4) -> (value: f32) { + xyz := coord.x + coord.y + coord.z + ww := coord.w * 0.2236067977499788 + s2 := xyz * -0.16666666666666666 + ww + + skewed := Vec4{coord.x + s2, coord.y + s2, coord.z + s2, -0.5 * xyz + ww} + return _internal_noise_4d_unskewed_base(seed, skewed) +} + +/* + 4D OpenSimplex2 noise, fallback lattice orientation. +*/ +noise_4d_fallback :: proc(seed: i64, coord: Vec4) -> (value: f32) { + // Get points for A4 lattice + skew := f64(SKEW_4D) * (coord.x + coord.y + coord.z + coord.w) + return _internal_noise_4d_unskewed_base(seed, coord + skew) +} \ No newline at end of file diff --git a/core/math/rand/exp.odin b/core/math/rand/exp.odin new file mode 100644 index 000000000..c0f92e99c --- /dev/null +++ b/core/math/rand/exp.odin @@ -0,0 +1,214 @@ +package rand + +import "core:math" + +// exp_float64 returns a exponential distribution in the range (0, max(f64)], +// with an exponential distribution who rate parameter is 1 (lambda) and whose mean +// is 1 (1/lambda). +// +// To produce a distribution with a differetn rate parameter, divide the result by +// the desired rate parameter +// +// "The Ziggurat Method for Generating Random Variables" +// Authors: George Marsaglia, Wai Wan Tsang +// Submitted: 2000-04-15. Published: 2000-10-02. +// https://www.jstatsoft.org/index.php/jss/article/view/v005i08/ziggurat.pdf [pdf] +// https://www.jstatsoft.org/article/view/v005i08 [web page] +// +exp_float64 :: proc(r: ^Rand = nil) -> f64 { + re :: 7.69711747013104972 + + @(static) + ke := [256]u32{ + 0xe290a139, 0x0, 0x9beadebc, 0xc377ac71, 0xd4ddb990, + 0xde893fb8, 0xe4a8e87c, 0xe8dff16a, 0xebf2deab, 0xee49a6e8, + 0xf0204efd, 0xf19bdb8e, 0xf2d458bb, 0xf3da104b, 0xf4b86d78, + 0xf577ad8a, 0xf61de83d, 0xf6afb784, 0xf730a573, 0xf7a37651, + 0xf80a5bb6, 0xf867189d, 0xf8bb1b4f, 0xf9079062, 0xf94d70ca, + 0xf98d8c7d, 0xf9c8928a, 0xf9ff175b, 0xfa319996, 0xfa6085f8, + 0xfa8c3a62, 0xfab5084e, 0xfadb36c8, 0xfaff0410, 0xfb20a6ea, + 0xfb404fb4, 0xfb5e2951, 0xfb7a59e9, 0xfb95038c, 0xfbae44ba, + 0xfbc638d8, 0xfbdcf892, 0xfbf29a30, 0xfc0731df, 0xfc1ad1ed, + 0xfc2d8b02, 0xfc3f6c4d, 0xfc5083ac, 0xfc60ddd1, 0xfc708662, + 0xfc7f8810, 0xfc8decb4, 0xfc9bbd62, 0xfca9027c, 0xfcb5c3c3, + 0xfcc20864, 0xfccdd70a, 0xfcd935e3, 0xfce42ab0, 0xfceebace, + 0xfcf8eb3b, 0xfd02c0a0, 0xfd0c3f59, 0xfd156b7b, 0xfd1e48d6, + 0xfd26daff, 0xfd2f2552, 0xfd372af7, 0xfd3eeee5, 0xfd4673e7, + 0xfd4dbc9e, 0xfd54cb85, 0xfd5ba2f2, 0xfd62451b, 0xfd68b415, + 0xfd6ef1da, 0xfd750047, 0xfd7ae120, 0xfd809612, 0xfd8620b4, + 0xfd8b8285, 0xfd90bcf5, 0xfd95d15e, 0xfd9ac10b, 0xfd9f8d36, + 0xfda43708, 0xfda8bf9e, 0xfdad2806, 0xfdb17141, 0xfdb59c46, + 0xfdb9a9fd, 0xfdbd9b46, 0xfdc170f6, 0xfdc52bd8, 0xfdc8ccac, + 0xfdcc542d, 0xfdcfc30b, 0xfdd319ef, 0xfdd6597a, 0xfdd98245, + 0xfddc94e5, 0xfddf91e6, 0xfde279ce, 0xfde54d1f, 0xfde80c52, + 0xfdeab7de, 0xfded5034, 0xfdefd5be, 0xfdf248e3, 0xfdf4aa06, + 0xfdf6f984, 0xfdf937b6, 0xfdfb64f4, 0xfdfd818d, 0xfdff8dd0, + 0xfe018a08, 0xfe03767a, 0xfe05536c, 0xfe07211c, 0xfe08dfc9, + 0xfe0a8fab, 0xfe0c30fb, 0xfe0dc3ec, 0xfe0f48b1, 0xfe10bf76, + 0xfe122869, 0xfe1383b4, 0xfe14d17c, 0xfe1611e7, 0xfe174516, + 0xfe186b2a, 0xfe19843e, 0xfe1a9070, 0xfe1b8fd6, 0xfe1c8289, + 0xfe1d689b, 0xfe1e4220, 0xfe1f0f26, 0xfe1fcfbc, 0xfe2083ed, + 0xfe212bc3, 0xfe21c745, 0xfe225678, 0xfe22d95f, 0xfe234ffb, + 0xfe23ba4a, 0xfe241849, 0xfe2469f2, 0xfe24af3c, 0xfe24e81e, + 0xfe25148b, 0xfe253474, 0xfe2547c7, 0xfe254e70, 0xfe25485a, + 0xfe25356a, 0xfe251586, 0xfe24e88f, 0xfe24ae64, 0xfe2466e1, + 0xfe2411df, 0xfe23af34, 0xfe233eb4, 0xfe22c02c, 0xfe22336b, + 0xfe219838, 0xfe20ee58, 0xfe20358c, 0xfe1f6d92, 0xfe1e9621, + 0xfe1daef0, 0xfe1cb7ac, 0xfe1bb002, 0xfe1a9798, 0xfe196e0d, + 0xfe1832fd, 0xfe16e5fe, 0xfe15869d, 0xfe141464, 0xfe128ed3, + 0xfe10f565, 0xfe0f478c, 0xfe0d84b1, 0xfe0bac36, 0xfe09bd73, + 0xfe07b7b5, 0xfe059a40, 0xfe03644c, 0xfe011504, 0xfdfeab88, + 0xfdfc26e9, 0xfdf98629, 0xfdf6c83b, 0xfdf3ec01, 0xfdf0f04a, + 0xfdedd3d1, 0xfdea953d, 0xfde7331e, 0xfde3abe9, 0xfddffdfb, + 0xfddc2791, 0xfdd826cd, 0xfdd3f9a8, 0xfdcf9dfc, 0xfdcb1176, + 0xfdc65198, 0xfdc15bb3, 0xfdbc2ce2, 0xfdb6c206, 0xfdb117be, + 0xfdab2a63, 0xfda4f5fd, 0xfd9e7640, 0xfd97a67a, 0xfd908192, + 0xfd8901f2, 0xfd812182, 0xfd78d98e, 0xfd7022bb, 0xfd66f4ed, + 0xfd5d4732, 0xfd530f9c, 0xfd48432b, 0xfd3cd59a, 0xfd30b936, + 0xfd23dea4, 0xfd16349e, 0xfd07a7a3, 0xfcf8219b, 0xfce7895b, + 0xfcd5c220, 0xfcc2aadb, 0xfcae1d5e, 0xfc97ed4e, 0xfc7fe6d4, + 0xfc65ccf3, 0xfc495762, 0xfc2a2fc8, 0xfc07ee19, 0xfbe213c1, + 0xfbb8051a, 0xfb890078, 0xfb5411a5, 0xfb180005, 0xfad33482, + 0xfa839276, 0xfa263b32, 0xf9b72d1c, 0xf930a1a2, 0xf889f023, + 0xf7b577d2, 0xf69c650c, 0xf51530f0, 0xf2cb0e3c, 0xeeefb15d, + 0xe6da6ecf, + } + @(static) + we := [256]f32{ + 2.0249555e-09, 1.486674e-11, 2.4409617e-11, 3.1968806e-11, + 3.844677e-11, 4.4228204e-11, 4.9516443e-11, 5.443359e-11, + 5.905944e-11, 6.344942e-11, 6.7643814e-11, 7.1672945e-11, + 7.556032e-11, 7.932458e-11, 8.298079e-11, 8.654132e-11, + 9.0016515e-11, 9.3415074e-11, 9.674443e-11, 1.0001099e-10, + 1.03220314e-10, 1.06377254e-10, 1.09486115e-10, 1.1255068e-10, + 1.1557435e-10, 1.1856015e-10, 1.2151083e-10, 1.2442886e-10, + 1.2731648e-10, 1.3017575e-10, 1.3300853e-10, 1.3581657e-10, + 1.3860142e-10, 1.4136457e-10, 1.4410738e-10, 1.4683108e-10, + 1.4953687e-10, 1.5222583e-10, 1.54899e-10, 1.5755733e-10, + 1.6020171e-10, 1.6283301e-10, 1.6545203e-10, 1.6805951e-10, + 1.7065617e-10, 1.732427e-10, 1.7581973e-10, 1.7838787e-10, + 1.8094774e-10, 1.8349985e-10, 1.8604476e-10, 1.8858298e-10, + 1.9111498e-10, 1.9364126e-10, 1.9616223e-10, 1.9867835e-10, + 2.0119004e-10, 2.0369768e-10, 2.0620168e-10, 2.087024e-10, + 2.1120022e-10, 2.136955e-10, 2.1618855e-10, 2.1867974e-10, + 2.2116936e-10, 2.2365775e-10, 2.261452e-10, 2.2863202e-10, + 2.311185e-10, 2.3360494e-10, 2.360916e-10, 2.3857874e-10, + 2.4106667e-10, 2.4355562e-10, 2.4604588e-10, 2.485377e-10, + 2.5103128e-10, 2.5352695e-10, 2.560249e-10, 2.585254e-10, + 2.6102867e-10, 2.6353494e-10, 2.6604446e-10, 2.6855745e-10, + 2.7107416e-10, 2.7359479e-10, 2.761196e-10, 2.7864877e-10, + 2.8118255e-10, 2.8372119e-10, 2.8626485e-10, 2.888138e-10, + 2.9136826e-10, 2.939284e-10, 2.9649452e-10, 2.9906677e-10, + 3.016454e-10, 3.0423064e-10, 3.0682268e-10, 3.0942177e-10, + 3.1202813e-10, 3.1464195e-10, 3.1726352e-10, 3.19893e-10, + 3.2253064e-10, 3.251767e-10, 3.2783135e-10, 3.3049485e-10, + 3.3316744e-10, 3.3584938e-10, 3.3854083e-10, 3.4124212e-10, + 3.4395342e-10, 3.46675e-10, 3.4940711e-10, 3.5215003e-10, + 3.5490397e-10, 3.5766917e-10, 3.6044595e-10, 3.6323455e-10, + 3.660352e-10, 3.6884823e-10, 3.7167386e-10, 3.745124e-10, + 3.773641e-10, 3.802293e-10, 3.8310827e-10, 3.860013e-10, + 3.8890866e-10, 3.918307e-10, 3.9476775e-10, 3.9772008e-10, + 4.0068804e-10, 4.0367196e-10, 4.0667217e-10, 4.09689e-10, + 4.1272286e-10, 4.1577405e-10, 4.1884296e-10, 4.2192994e-10, + 4.250354e-10, 4.281597e-10, 4.313033e-10, 4.3446652e-10, + 4.3764986e-10, 4.408537e-10, 4.4407847e-10, 4.4732465e-10, + 4.5059267e-10, 4.5388301e-10, 4.571962e-10, 4.6053267e-10, + 4.6389292e-10, 4.6727755e-10, 4.70687e-10, 4.741219e-10, + 4.7758275e-10, 4.810702e-10, 4.845848e-10, 4.8812715e-10, + 4.9169796e-10, 4.9529775e-10, 4.989273e-10, 5.0258725e-10, + 5.0627835e-10, 5.100013e-10, 5.1375687e-10, 5.1754584e-10, + 5.21369e-10, 5.2522725e-10, 5.2912136e-10, 5.330522e-10, + 5.370208e-10, 5.4102806e-10, 5.45075e-10, 5.491625e-10, + 5.532918e-10, 5.5746385e-10, 5.616799e-10, 5.6594107e-10, + 5.7024857e-10, 5.746037e-10, 5.7900773e-10, 5.834621e-10, + 5.8796823e-10, 5.925276e-10, 5.971417e-10, 6.018122e-10, + 6.065408e-10, 6.113292e-10, 6.1617933e-10, 6.2109295e-10, + 6.260722e-10, 6.3111916e-10, 6.3623595e-10, 6.4142497e-10, + 6.4668854e-10, 6.5202926e-10, 6.5744976e-10, 6.6295286e-10, + 6.6854156e-10, 6.742188e-10, 6.79988e-10, 6.858526e-10, + 6.9181616e-10, 6.978826e-10, 7.04056e-10, 7.103407e-10, + 7.167412e-10, 7.2326256e-10, 7.2990985e-10, 7.366886e-10, + 7.4360473e-10, 7.5066453e-10, 7.5787476e-10, 7.6524265e-10, + 7.7277595e-10, 7.80483e-10, 7.883728e-10, 7.9645507e-10, + 8.047402e-10, 8.1323964e-10, 8.219657e-10, 8.309319e-10, + 8.401528e-10, 8.496445e-10, 8.594247e-10, 8.6951274e-10, + 8.799301e-10, 8.9070046e-10, 9.018503e-10, 9.134092e-10, + 9.254101e-10, 9.378904e-10, 9.508923e-10, 9.644638e-10, + 9.786603e-10, 9.935448e-10, 1.0091913e-09, 1.025686e-09, + 1.0431306e-09, 1.0616465e-09, 1.08138e-09, 1.1025096e-09, + 1.1252564e-09, 1.1498986e-09, 1.1767932e-09, 1.206409e-09, + 1.2393786e-09, 1.276585e-09, 1.3193139e-09, 1.3695435e-09, + 1.4305498e-09, 1.508365e-09, 1.6160854e-09, 1.7921248e-09, + } + @(static) + fe := [256]f32{ + 1, 0.9381437, 0.90046996, 0.87170434, 0.8477855, 0.8269933, + 0.8084217, 0.7915276, 0.77595687, 0.7614634, 0.7478686, + 0.7350381, 0.72286767, 0.71127474, 0.70019263, 0.6895665, + 0.67935055, 0.6695063, 0.66000086, 0.65080583, 0.6418967, + 0.63325197, 0.6248527, 0.6166822, 0.60872537, 0.60096896, + 0.5934009, 0.58601034, 0.5787874, 0.57172304, 0.5648092, + 0.5580383, 0.5514034, 0.5448982, 0.5385169, 0.53225386, + 0.5261042, 0.52006316, 0.5141264, 0.50828975, 0.5025495, + 0.496902, 0.49134386, 0.485872, 0.48048335, 0.4751752, + 0.46994483, 0.46478975, 0.45970762, 0.45469615, 0.44975325, + 0.44487688, 0.44006512, 0.43531612, 0.43062815, 0.42599955, + 0.42142874, 0.4169142, 0.41245446, 0.40804818, 0.403694, + 0.3993907, 0.39513698, 0.39093173, 0.38677382, 0.38266218, + 0.37859577, 0.37457356, 0.37059465, 0.3666581, 0.362763, + 0.35890847, 0.35509375, 0.351318, 0.3475805, 0.34388044, + 0.34021714, 0.3365899, 0.33299807, 0.32944095, 0.32591796, + 0.3224285, 0.3189719, 0.31554767, 0.31215525, 0.30879408, + 0.3054636, 0.3021634, 0.29889292, 0.2956517, 0.29243928, + 0.28925523, 0.28609908, 0.28297043, 0.27986884, 0.27679393, + 0.2737453, 0.2707226, 0.2677254, 0.26475343, 0.26180625, + 0.25888354, 0.25598502, 0.2531103, 0.25025907, 0.24743107, + 0.24462597, 0.24184346, 0.23908329, 0.23634516, 0.23362878, + 0.23093392, 0.2282603, 0.22560766, 0.22297576, 0.22036438, + 0.21777324, 0.21520215, 0.21265087, 0.21011916, 0.20760682, + 0.20511365, 0.20263945, 0.20018397, 0.19774707, 0.19532852, + 0.19292815, 0.19054577, 0.1881812, 0.18583426, 0.18350479, + 0.1811926, 0.17889754, 0.17661946, 0.17435817, 0.17211354, + 0.1698854, 0.16767362, 0.16547804, 0.16329853, 0.16113494, + 0.15898713, 0.15685499, 0.15473837, 0.15263714, 0.15055119, + 0.14848037, 0.14642459, 0.14438373, 0.14235765, 0.14034624, + 0.13834943, 0.13636707, 0.13439907, 0.13244532, 0.13050574, + 0.1285802, 0.12666863, 0.12477092, 0.12288698, 0.12101672, + 0.119160056, 0.1173169, 0.115487166, 0.11367077, 0.11186763, + 0.11007768, 0.10830083, 0.10653701, 0.10478614, 0.10304816, + 0.101323, 0.09961058, 0.09791085, 0.09622374, 0.09454919, + 0.09288713, 0.091237515, 0.08960028, 0.087975375, 0.08636274, + 0.08476233, 0.083174095, 0.081597984, 0.08003395, 0.07848195, + 0.076941945, 0.07541389, 0.07389775, 0.072393484, 0.07090106, + 0.069420435, 0.06795159, 0.066494495, 0.06504912, 0.063615434, + 0.062193416, 0.060783047, 0.059384305, 0.057997175, + 0.05662164, 0.05525769, 0.053905312, 0.052564494, 0.051235236, + 0.049917534, 0.048611384, 0.047316793, 0.046033762, 0.0447623, + 0.043502413, 0.042254124, 0.041017443, 0.039792392, + 0.038578995, 0.037377283, 0.036187284, 0.035009038, + 0.033842582, 0.032687962, 0.031545233, 0.030414443, 0.02929566, + 0.02818895, 0.027094385, 0.026012046, 0.024942026, 0.023884421, + 0.022839336, 0.021806888, 0.020787204, 0.019780423, 0.0187867, + 0.0178062, 0.016839107, 0.015885621, 0.014945968, 0.014020392, + 0.013109165, 0.012212592, 0.011331013, 0.01046481, 0.009614414, + 0.008780315, 0.007963077, 0.0071633533, 0.006381906, + 0.0056196423, 0.0048776558, 0.004157295, 0.0034602648, + 0.0027887989, 0.0021459677, 0.0015362998, 0.0009672693, + 0.00045413437, + } + + for { + j := uint32(r) + i := j & 0xFF + x := f64(j) * f64(we[i]) + if j < ke[i] { + return x + } + if i == 0 { + return re - math.ln(float64(r)) + } + if fe[i]+f32(float64(r))*(fe[i-1]-fe[i]) < f32(math.exp(-x)) { + return x + } + } +} \ No newline at end of file diff --git a/core/math/rand/normal.odin b/core/math/rand/normal.odin index 4a77543ba..a9edd0f19 100644 --- a/core/math/rand/normal.odin +++ b/core/math/rand/normal.odin @@ -2,6 +2,12 @@ package rand import "core:math" + +// norm_float64 returns a normally distributed f64 in the range -max(f64) through +max(f64) inclusive, +// with a standard normal distribution with a mean of 0 and standard deviation of 1. +// +// sample = norm_float64() * std_dev + mean +// // // Normal distribution // @@ -11,12 +17,6 @@ import "core:math" // https://www.jstatsoft.org/index.php/jss/article/view/v005i08/ziggurat.pdf [pdf] // https://www.jstatsoft.org/article/view/v005i08 [web page] // - -// norm_float64 returns a normally distributed f64 in the range -max(f64) through +max(f64) inclusive, -// with a standard normal distribution with a mean of 0 and standard deviation of 1. -// -// sample = norm_float64() * std_dev + mean -// norm_float64 :: proc(r: ^Rand = nil) -> f64 { rn :: 3.442619855899 @@ -49,7 +49,6 @@ norm_float64 :: proc(r: ^Rand = nil) -> f64 { 0x7da61a1e, 0x7d72a0fb, 0x7d30e097, 0x7cd9b4ab, 0x7c600f1a, 0x7ba90bdc, 0x7a722176, 0x77d664e5, } - @(static) wn := [128]f32{ 1.7290405e-09, 1.2680929e-10, 1.6897518e-10, 1.9862688e-10, @@ -85,7 +84,6 @@ norm_float64 :: proc(r: ^Rand = nil) -> f64 { 1.2601323e-09, 1.2857697e-09, 1.3146202e-09, 1.347784e-09, 1.3870636e-09, 1.4357403e-09, 1.5008659e-09, 1.6030948e-09, } - @(static) fn := [128]f32{ 1.00000000, 0.9635997, 0.9362827, 0.9130436, 0.89228165, diff --git a/core/math/rand/rand.odin b/core/math/rand/rand.odin index 9bd30c216..19e475835 100644 --- a/core/math/rand/rand.odin +++ b/core/math/rand/rand.odin @@ -1,5 +1,7 @@ package rand +import "core:intrinsics" + Rand :: struct { state: u64, inc: u64, @@ -7,9 +9,7 @@ Rand :: struct { @(private) -_GLOBAL_SEED_DATA := 1234567890 -@(private) -global_rand := create(u64(uintptr(&_GLOBAL_SEED_DATA))) +global_rand := create(u64(intrinsics.read_cycle_counter())) set_global_seed :: proc(seed: u64) { init(&global_rand, seed) @@ -70,7 +70,7 @@ int31_max :: proc(n: i32, r: ^Rand = nil) -> i32 { if n&(n-1) == 0 { return int31(r) & (n-1) } - max := i32((1<<31) - 1 - (1<<31)&u32(n)) + max := i32((1<<31) - 1 - (1<<31)%u32(n)) v := int31(r) for v > max { v = int31(r) @@ -85,7 +85,7 @@ int63_max :: proc(n: i64, r: ^Rand = nil) -> i64 { if n&(n-1) == 0 { return int63(r) & (n-1) } - max := i64((1<<63) - 1 - (1<<63)&u64(n)) + max := i64((1<<63) - 1 - (1<<63)%u64(n)) v := int63(r) for v > max { v = int63(r) @@ -100,7 +100,7 @@ int127_max :: proc(n: i128, r: ^Rand = nil) -> i128 { if n&(n-1) == 0 { return int127(r) & (n-1) } - max := i128((1<<63) - 1 - (1<<63)&u128(n)) + max := i128((1<<127) - 1 - (1<<127)%u128(n)) v := int127(r) for v > max { v = int127(r) @@ -142,8 +142,8 @@ read :: proc(p: []byte, r: ^Rand = nil) -> (n: int) { } // perm returns a slice of n ints in a pseudo-random permutation of integers in the range [0, n) -perm :: proc(n: int, r: ^Rand = nil) -> []int { - m := make([]int, n) +perm :: proc(n: int, r: ^Rand = nil, allocator := context.allocator) -> []int { + m := make([]int, n, allocator) for i := 0; i < n; i += 1 { j := int_max(i+1, r) m[i] = m[j] diff --git a/core/mem/doc.odin b/core/mem/doc.odin new file mode 100644 index 000000000..2a5ee06d3 --- /dev/null +++ b/core/mem/doc.odin @@ -0,0 +1,34 @@ +/* +package mem implements various types of allocators. + + +An example of how to use the `Tracking_Allocator` to track subsequent allocations +in your program and report leaks and bad frees: + +```odin +package foo + +import "core:mem" +import "core:fmt" + +_main :: proc() { + do stuff +} + +main :: proc() { + track: mem.Tracking_Allocator + mem.tracking_allocator_init(&track, context.allocator) + context.allocator = mem.tracking_allocator(&track) + + _main() + + for _, v in track.allocation_map { + fmt.printf("%v leaked %v bytes", v.location, v.size) + } + for bf in track.bad_free_array { + fmt.printf("%v allocation %p was freed badly", bf.location, bf.memory) + } +} +``` +*/ +package mem \ No newline at end of file diff --git a/core/odin/ast/ast.odin b/core/odin/ast/ast.odin index 9db57541b..8eb0def44 100644 --- a/core/odin/ast/ast.odin +++ b/core/odin/ast/ast.odin @@ -151,6 +151,7 @@ Comp_Lit :: struct { open: tokenizer.Pos, elems: []^Expr, close: tokenizer.Pos, + tag: ^Expr, } diff --git a/core/odin/doc-format/doc_format.odin b/core/odin/doc-format/doc_format.odin index 59eafdc09..62682004d 100644 --- a/core/odin/doc-format/doc_format.odin +++ b/core/odin/doc-format/doc_format.odin @@ -11,7 +11,7 @@ String :: distinct Array(byte) Version_Type_Major :: 0 Version_Type_Minor :: 2 -Version_Type_Patch :: 3 +Version_Type_Patch :: 4 Version_Type :: struct { major, minor, patch: u8, @@ -77,9 +77,15 @@ Pkg :: struct { flags: Pkg_Flags, docs: String, files: Array(File_Index), - entities: Array(Entity_Index), + entries: Array(Scope_Entry), } +Scope_Entry :: struct { + name: String, + entity: Entity_Index, +} + + Entity_Kind :: enum u32le { Invalid = 0, Constant = 1, @@ -89,6 +95,7 @@ Entity_Kind :: enum u32le { Proc_Group = 5, Import_Name = 6, Library_Name = 7, + Builtin = 8, } Entity_Flag :: enum u32le { @@ -105,6 +112,9 @@ Entity_Flag :: enum u32le { Type_Alias = 20, + Builtin_Pkg_Builtin = 30, + Builtin_Pkg_Intrinsics = 31, + Var_Thread_Local = 40, Var_Static = 41, diff --git a/core/odin/parser/parser.odin b/core/odin/parser/parser.odin index e8c2c848d..cc802e7d2 100644 --- a/core/odin/parser/parser.odin +++ b/core/odin/parser/parser.odin @@ -2273,6 +2273,24 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { return original_type case "partial": + tag := ast.new(ast.Basic_Directive, tok.pos, end_pos(name)) + tag.tok = tok + tag.name = name.text + original_expr := parse_expr(p, lhs) + expr := ast.unparen_expr(original_expr) + switch t in &expr.derived { + case ast.Comp_Lit: + t.tag = tag + case ast.Array_Type: + t.tag = tag + error(p, tok.pos, "#%s has been replaced with #sparse for non-contiguous enumerated array types", name.text) + case: + error(p, tok.pos, "expected a compound literal after #%s", name.text) + + } + return original_expr + + case "sparse": tag := ast.new(ast.Basic_Directive, tok.pos, end_pos(name)) tag.tok = tok tag.name = name.text @@ -2319,7 +2337,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr { return rt case "force_inline", "force_no_inline": - return parse_inlining_operand(p, lhs, tok) + return parse_inlining_operand(p, lhs, name) case: expr := parse_expr(p, lhs) te := ast.new(ast.Tag_Expr, tok.pos, expr.pos) diff --git a/core/os/os.odin b/core/os/os.odin index 19a8099ef..e880ec21e 100644 --- a/core/os/os.odin +++ b/core/os/os.odin @@ -206,11 +206,19 @@ heap_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode, } } - aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int) -> ([]byte, mem.Allocator_Error) { + aligned_resize :: proc(p: rawptr, old_size: int, new_size: int, new_alignment: int) -> (new_memory: []byte, err: mem.Allocator_Error) { if p == nil { return nil, nil } - return aligned_alloc(new_size, new_alignment, p) + + new_memory = aligned_alloc(new_size, new_alignment, p) or_return + + // NOTE: heap_resize does not zero the new memory, so we do it + if new_size > old_size { + new_region := mem.raw_data(new_memory[old_size:]) + mem.zero(new_region, new_size - old_size) + } + return } switch mode { diff --git a/core/os/os_darwin.odin b/core/os/os_darwin.odin index 4828c5167..71c9fac38 100644 --- a/core/os/os_darwin.odin +++ b/core/os/os_darwin.odin @@ -290,13 +290,21 @@ foreign libc { @(link_name="fstat64") _unix_fstat :: proc(fd: Handle, stat: ^OS_Stat) -> c.int --- @(link_name="readlink") _unix_readlink :: proc(path: cstring, buf: ^byte, bufsiz: c.size_t) -> c.ssize_t --- @(link_name="access") _unix_access :: proc(path: cstring, mask: int) -> int --- - @(link_name="fdopendir$INODE64") _unix_fdopendir :: proc(fd: Handle) -> Dir --- + + @(link_name="fdopendir$INODE64") _unix_fdopendir_amd64 :: proc(fd: Handle) -> Dir --- + @(link_name="readdir_r$INODE64") _unix_readdir_r_amd64 :: proc(dirp: Dir, entry: ^Dirent, result: ^^Dirent) -> c.int --- + @(link_name="fdopendir") _unix_fdopendir_arm64 :: proc(fd: Handle) -> Dir --- + @(link_name="readdir_r") _unix_readdir_r_arm64 :: proc(dirp: Dir, entry: ^Dirent, result: ^^Dirent) -> c.int --- + @(link_name="closedir") _unix_closedir :: proc(dirp: Dir) -> c.int --- @(link_name="rewinddir") _unix_rewinddir :: proc(dirp: Dir) --- - @(link_name="readdir_r$INODE64") _unix_readdir_r :: proc(dirp: Dir, entry: ^Dirent, result: ^^Dirent) -> c.int --- + @(link_name="fcntl") _unix_fcntl :: proc(fd: Handle, cmd: c.int, buf: ^byte) -> c.int --- - @(link_name="fchmod") _unix_fchmod :: proc(fildes: Handle, mode: u16) -> c.int ---; + @(link_name="rename") _unix_rename :: proc(old: cstring, new: cstring) -> c.int --- + @(link_name="remove") _unix_remove :: proc(path: cstring) -> c.int --- + + @(link_name="fchmod") _unix_fchmod :: proc(fildes: Handle, mode: u16) -> c.int --- @(link_name="malloc") _unix_malloc :: proc(size: int) -> rawptr --- @(link_name="calloc") _unix_calloc :: proc(num, size: int) -> rawptr --- @@ -307,11 +315,19 @@ foreign libc { @(link_name="chdir") _unix_chdir :: proc(buf: cstring) -> c.int --- @(link_name="realpath") _unix_realpath :: proc(path: cstring, resolved_path: rawptr) -> rawptr --- - @(link_name="strerror") _darwin_string_error :: proc(num : c.int) -> cstring ---; + @(link_name="strerror") _darwin_string_error :: proc(num : c.int) -> cstring --- @(link_name="exit") _unix_exit :: proc(status: c.int) -> ! --- } +when ODIN_ARCH != "arm64" { + _unix_fdopendir :: proc {_unix_fdopendir_amd64} + _unix_readdir_r :: proc {_unix_readdir_r_amd64} +} else { + _unix_fdopendir :: proc {_unix_fdopendir_arm64} + _unix_readdir_r :: proc {_unix_readdir_r_arm64} +} + foreign dl { @(link_name="dlopen") _unix_dlopen :: proc(filename: cstring, flags: int) -> rawptr --- @(link_name="dlsym") _unix_dlsym :: proc(handle: rawptr, symbol: cstring) -> rawptr --- @@ -324,7 +340,7 @@ get_last_error :: proc() -> int { } get_last_error_string :: proc() -> string { - return cast(string)_darwin_string_error(cast(c.int)get_last_error()); + return cast(string)_darwin_string_error(cast(c.int)get_last_error()) } open :: proc(path: string, flags: int = O_RDWR, mode: int = 0) -> (Handle, Errno) { @@ -412,6 +428,65 @@ is_path_separator :: proc(r: rune) -> bool { return r == '/' } +is_file_handle :: proc(fd: Handle) -> bool { + s, err := _fstat(fd) + if err != ERROR_NONE { + return false + } + return S_ISREG(cast(u32)s.mode) +} + +is_file_path :: proc(path: string, follow_links: bool = true) -> bool { + s: OS_Stat + err: Errno + if follow_links { + s, err = _stat(path) + } else { + s, err = _lstat(path) + } + if err != ERROR_NONE { + return false + } + return S_ISREG(cast(u32)s.mode) +} + + +is_dir_handle :: proc(fd: Handle) -> bool { + s, err := _fstat(fd) + if err != ERROR_NONE { + return false + } + return S_ISDIR(cast(u32)s.mode) +} + +is_dir_path :: proc(path: string, follow_links: bool = true) -> bool { + s: OS_Stat + err: Errno + if follow_links { + s, err = _stat(path) + } else { + s, err = _lstat(path) + } + if err != ERROR_NONE { + return false + } + return S_ISDIR(cast(u32)s.mode) +} + +is_file :: proc {is_file_path, is_file_handle} +is_dir :: proc {is_dir_path, is_dir_handle} + + +rename :: proc(old: string, new: string) -> bool { + old_cstr := strings.clone_to_cstring(old, context.temp_allocator) + new_cstr := strings.clone_to_cstring(new, context.temp_allocator) + return _unix_rename(old_cstr, new_cstr) != -1 +} + +remove :: proc(path: string) -> bool { + path_cstr := strings.clone_to_cstring(path, context.temp_allocator) + return _unix_remove(path_cstr) != -1 +} @private _stat :: proc(path: string) -> (OS_Stat, Errno) { @@ -553,6 +628,8 @@ heap_alloc :: proc(size: int) -> rawptr { return _unix_calloc(1, size) } heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr { + // NOTE: _unix_realloc doesn't guarantee new memory will be zeroed on + // POSIX platforms. Ensure your caller takes this into account. return _unix_realloc(ptr, new_size) } heap_free :: proc(ptr: rawptr) { diff --git a/core/os/os_freebsd.odin b/core/os/os_freebsd.odin index e9314b468..82317532d 100644 --- a/core/os/os_freebsd.odin +++ b/core/os/os_freebsd.odin @@ -378,6 +378,8 @@ heap_alloc :: proc(size: int) -> rawptr { } heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr { + // NOTE: _unix_realloc doesn't guarantee new memory will be zeroed on + // POSIX platforms. Ensure your caller takes this into account. return _unix_realloc(ptr, c.size_t(new_size)); } diff --git a/core/os/os_linux.odin b/core/os/os_linux.odin index 3eb76249c..ae4a03944 100644 --- a/core/os/os_linux.odin +++ b/core/os/os_linux.odin @@ -727,6 +727,8 @@ heap_alloc :: proc(size: int) -> rawptr { } heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr { + // NOTE: _unix_realloc doesn't guarantee new memory will be zeroed on + // POSIX platforms. Ensure your caller takes this into account. return _unix_realloc(ptr, c.size_t(new_size)) } diff --git a/core/path/filepath/match.odin b/core/path/filepath/match.odin index c045f3ece..6399f86a2 100644 --- a/core/path/filepath/match.odin +++ b/core/path/filepath/match.odin @@ -227,11 +227,10 @@ glob :: proc(pattern: string, allocator := context.allocator) -> (matches: []str return m[:], .None } - temp_buf: [8]byte - dir, file := split(pattern) volume_len := 0 when ODIN_OS == .Windows { + temp_buf: [8]byte volume_len, dir = clean_glob_path_windows(dir, temp_buf[:]) } else { dir = clean_glob_path(dir) diff --git a/core/path/filepath/walk.odin b/core/path/filepath/walk.odin index 29d4fd5b1..dad63cc09 100644 --- a/core/path/filepath/walk.odin +++ b/core/path/filepath/walk.odin @@ -71,7 +71,7 @@ _walk :: proc(info: os.File_Info, walk_proc: Walk_Proc) -> (err: os.Errno, skip_ @(private) read_dir :: proc(dir_name: string, allocator := context.temp_allocator) -> ([]os.File_Info, os.Errno) { - f, err := os.open(dir_name) + f, err := os.open(dir_name, os.O_RDONLY) if err != 0 { return nil, err } diff --git a/core/reflect/reflect.odin b/core/reflect/reflect.odin index 7f64d0974..05b3a5da0 100644 --- a/core/reflect/reflect.odin +++ b/core/reflect/reflect.odin @@ -234,7 +234,7 @@ is_nil :: proc(v: any) -> bool { return true } data := as_bytes(v) - if data != nil { + if data == nil { return true } for v in data { diff --git a/core/reflect/types.odin b/core/reflect/types.odin index 74778013a..a9a4a8d48 100644 --- a/core/reflect/types.odin +++ b/core/reflect/types.odin @@ -472,6 +472,9 @@ write_type_writer :: proc(w: io.Writer, ti: ^Type_Info, n_written: ^int = nil) - write_type(w, info.elem, &n) or_return case Type_Info_Enumerated_Array: + if info.is_sparse { + io.write_string(w, "#sparse", &n) or_return + } io.write_string(w, "[", &n) or_return write_type(w, info.index, &n) or_return io.write_string(w, "]", &n) or_return diff --git a/core/runtime/core.odin b/core/runtime/core.odin index 6c2ab1405..fec51f236 100644 --- a/core/runtime/core.odin +++ b/core/runtime/core.odin @@ -95,6 +95,7 @@ Type_Info_Enumerated_Array :: struct { count: int, min_value: Type_Info_Enum_Value, max_value: Type_Info_Enum_Value, + is_sparse: bool, } Type_Info_Dynamic_Array :: struct {elem: ^Type_Info, elem_size: int} Type_Info_Slice :: struct {elem: ^Type_Info, elem_size: int} diff --git a/core/runtime/core_builtin.odin b/core/runtime/core_builtin.odin index 44da894c1..3bafc0b1d 100644 --- a/core/runtime/core_builtin.odin +++ b/core/runtime/core_builtin.odin @@ -614,6 +614,10 @@ raw_data :: proc{raw_array_data, raw_slice_data, raw_dynamic_array_data, raw_str @(disabled=ODIN_DISABLE_ASSERT) assert :: proc(condition: bool, message := "", loc := #caller_location) { if !condition { + // NOTE(bill): This is wrapped in a procedure call + // to improve performance to make the CPU not + // execute speculatively, making it about an order of + // magnitude faster proc(message: string, loc: Source_Code_Location) { p := context.assertion_failure_proc if p == nil { diff --git a/core/runtime/internal.odin b/core/runtime/internal.odin index 0d0e196c4..30798f623 100644 --- a/core/runtime/internal.odin +++ b/core/runtime/internal.odin @@ -37,10 +37,8 @@ bswap_64 :: proc "contextless" (x: u64) -> u64 { bswap_128 :: proc "contextless" (x: u128) -> u128 { z := transmute([4]u32)x - z[0] = bswap_32(z[3]) - z[1] = bswap_32(z[2]) - z[2] = bswap_32(z[1]) - z[3] = bswap_32(z[0]) + z[0], z[3] = bswap_32(z[3]), bswap_32(z[0]) + z[1], z[2] = bswap_32(z[2]), bswap_32(z[1]) return transmute(u128)z } diff --git a/core/runtime/print.odin b/core/runtime/print.odin index 8a14eba08..06740bc75 100644 --- a/core/runtime/print.odin +++ b/core/runtime/print.odin @@ -143,11 +143,21 @@ print_int :: proc "contextless" (x: int) { print_i64(i64(x)) } print_caller_location :: proc "contextless" (using loc: Source_Code_Location) { print_string(file_path) - print_byte('(') - print_u64(u64(line)) - print_byte(':') - print_u64(u64(column)) - print_byte(')') + when ODIN_ERROR_POS_STYLE == .Default { + print_byte('(') + print_u64(u64(line)) + print_byte(':') + print_u64(u64(column)) + print_byte(')') + } else when ODIN_ERROR_POS_STYLE == .Unix { + print_byte(':') + print_u64(u64(line)) + print_byte(':') + print_u64(u64(column)) + print_byte(':') + } else { + #panic("unhandled ODIN_ERROR_POS_STYLE") + } } print_typeid :: proc "contextless" (id: typeid) { if id == nil { @@ -250,6 +260,9 @@ print_type :: proc "contextless" (ti: ^Type_Info) { print_type(info.elem) case Type_Info_Enumerated_Array: + if info.is_sparse { + print_string("#sparse") + } print_byte('[') print_type(info.index) print_byte(']') diff --git a/core/slice/slice.odin b/core/slice/slice.odin index 426829a22..5fecc76b1 100644 --- a/core/slice/slice.odin +++ b/core/slice/slice.odin @@ -305,21 +305,21 @@ filter :: proc(s: $S/[]$U, f: proc(U) -> bool, allocator := context.allocator) - } scanner :: proc (s: $S/[]$U, initializer: $V, f: proc(V, U)->V, allocator := context.allocator) -> []V { - if len(s) == 0 { return {} } + if len(s) == 0 { return {} } - res := make([]V, len(s), allocator) - p := as_ptr(s) - q := as_ptr(res) - r := initializer + res := make([]V, len(s), allocator) + p := as_ptr(s) + q := as_ptr(res) + r := initializer - for l := len(s); l > 0; l -= 1 { - r = f(r, p[0]) - q[0] = r - p = p[1:] - q = q[1:] - } + for l := len(s); l > 0; l -= 1 { + r = f(r, p[0]) + q[0] = r + p = p[1:] + q = q[1:] + } - return res + return res } diff --git a/core/slice/sort.odin b/core/slice/sort.odin index d9755ad0e..8a2dec039 100644 --- a/core/slice/sort.odin +++ b/core/slice/sort.odin @@ -1,10 +1,5 @@ package slice -import "core:intrinsics" -_ :: intrinsics - -ORD :: intrinsics.type_is_ordered - Ordering :: enum { Less = -1, Equal = 0, @@ -38,7 +33,7 @@ cmp_proc :: proc($E: typeid) -> (proc(E, E) -> Ordering) where ORD(E) { sort :: proc(data: $T/[]$E) where ORD(E) { when size_of(E) != 0 { if n := len(data); n > 1 { - _quick_sort(data, 0, n, _max_depth(n)) + _quick_sort_general(data, 0, n, _max_depth(n), struct{}{}, .Ordered) } } } @@ -48,7 +43,7 @@ sort :: proc(data: $T/[]$E) where ORD(E) { sort_by :: proc(data: $T/[]$E, less: proc(i, j: E) -> bool) { when size_of(E) != 0 { if n := len(data); n > 1 { - _quick_sort_less(data, 0, n, _max_depth(n), less) + _quick_sort_general(data, 0, n, _max_depth(n), less, .Less) } } } @@ -56,7 +51,33 @@ sort_by :: proc(data: $T/[]$E, less: proc(i, j: E) -> bool) { sort_by_cmp :: proc(data: $T/[]$E, cmp: proc(i, j: E) -> Ordering) { when size_of(E) != 0 { if n := len(data); n > 1 { - _quick_sort_cmp(data, 0, n, _max_depth(n), cmp) + _quick_sort_general(data, 0, n, _max_depth(n), cmp, .Cmp) + } + } +} + +// stable_sort sorts a slice +stable_sort :: proc(data: $T/[]$E) where ORD(E) { + when size_of(E) != 0 { + if n := len(data); n > 1 { + _stable_sort_general(data, struct{}{}, .Ordered) + } + } +} + +// stable_sort_by sorts a slice with a given procedure to test whether two values are ordered "i < j" +stable_sort_by :: proc(data: $T/[]$E, less: proc(i, j: E) -> bool) { + when size_of(E) != 0 { + if n := len(data); n > 1 { + _stable_sort_general(data, less, .Less) + } + } +} + +stable_sort_by_cmp :: proc(data: $T/[]$E, cmp: proc(i, j: E) -> Ordering) { + when size_of(E) != 0 { + if n := len(data); n > 1 { + _stable_sort_general(data, cmp, .Cmp) } } } @@ -79,6 +100,7 @@ is_sorted_by :: proc(array: $T/[]$E, less: proc(i, j: E) -> bool) -> bool { return true } +is_sorted_by_cmp :: is_sorted_cmp is_sorted_cmp :: proc(array: $T/[]$E, cmp: proc(i, j: E) -> Ordering) -> bool { for i := len(array)-1; i > 0; i -= 1 { if cmp(array[i], array[i-1]) == .Equal { @@ -140,489 +162,10 @@ is_sorted_by_key :: proc(array: $T/[]$E, key: proc(E) -> $K) -> bool where ORD(K return true } - - @(private) -_max_depth :: proc(n: int) -> int { // 2*ceil(log2(n+1)) - depth: int +_max_depth :: proc(n: int) -> (depth: int) { // 2*ceil(log2(n+1)) for i := n; i > 0; i >>= 1 { depth += 1 } return depth * 2 } - -@(private) -_quick_sort :: proc(data: $T/[]$E, a, b, max_depth: int) where ORD(E) #no_bounds_check { - median3 :: proc(data: T, m1, m0, m2: int) #no_bounds_check { - if data[m1] < data[m0] { - swap(data, m1, m0) - } - if data[m2] < data[m1] { - swap(data, m2, m1) - if data[m1] < data[m0] { - swap(data, m1, m0) - } - } - } - - do_pivot :: proc(data: T, lo, hi: int) -> (midlo, midhi: int) #no_bounds_check { - m := int(uint(lo+hi)>>1) - if hi-lo > 40 { - s := (hi-lo)/8 - median3(data, lo, lo+s, lo+s*2) - median3(data, m, m-s, m+s) - median3(data, hi-1, hi-1-s, hi-1-s*2) - } - median3(data, lo, m, hi-1) - - - pivot := lo - a, c := lo+1, hi-1 - - for ; a < c && data[a] < data[pivot]; a += 1 { - } - b := a - - for { - for ; b < c && !(data[pivot] < data[b]); b += 1 { // data[b] <= pivot - } - for ; b < c && data[pivot] < data[c-1]; c -=1 { // data[c-1] > pivot - } - if b >= c { - break - } - - swap(data, b, c-1) - b += 1 - c -= 1 - } - - protect := hi-c < 5 - if !protect && hi-c < (hi-lo)/4 { - dups := 0 - if !(data[pivot] < data[hi-1]) { - swap(data, c, hi-1) - c += 1 - dups += 1 - } - if !(data[b-1] < data[pivot]) { - b -= 1 - dups += 1 - } - - if !(data[m] < data[pivot]) { - swap(data, m, b-1) - b -= 1 - dups += 1 - } - protect = dups > 1 - } - if protect { - for { - for ; a < b && !(data[b-1] < data[pivot]); b -= 1 { - } - for ; a < b && data[a] < data[pivot]; a += 1 { - } - if a >= b { - break - } - swap(data, a, b-1) - a += 1 - b -= 1 - } - } - swap(data, pivot, b-1) - return b-1, c - } - - - a, b, max_depth := a, b, max_depth - - if b-a > 12 { // only use shell sort for lengths <= 12 - if max_depth == 0 { - _heap_sort(data, a, b) - return - } - max_depth -= 1 - mlo, mhi := do_pivot(data, a, b) - if mlo-a < b-mhi { - _quick_sort(data, a, mlo, max_depth) - a = mhi - } else { - _quick_sort(data, mhi, b, max_depth) - b = mlo - } - } - if b-a > 1 { - // Shell short with gap 6 - for i in a+6.. a && data[j] < data[j-1]; j -= 1 { - swap(data, j, j-1) - } - } -} - -@(private) -_heap_sort :: proc(data: $T/[]$E, a, b: int) where ORD(E) #no_bounds_check { - sift_down :: proc(data: T, lo, hi, first: int) #no_bounds_check { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && data[first+child] < data[first+child+1] { - child += 1 - } - if !(data[first+root] < data[first+child]) { - return - } - swap(data, first+root, first+child) - root = child - } - } - - - first, lo, hi := a, 0, b-a - - for i := (hi-1)/2; i >= 0; i -= 1 { - sift_down(data, i, hi, first) - } - - for i := hi-1; i >= 0; i -= 1 { - swap(data, first, first+i) - sift_down(data, lo, i, first) - } -} - - - - - - -@(private) -_quick_sort_less :: proc(data: $T/[]$E, a, b, max_depth: int, less: proc(i, j: E) -> bool) #no_bounds_check { - median3 :: proc(data: T, m1, m0, m2: int, less: proc(i, j: E) -> bool) #no_bounds_check { - if less(data[m1], data[m0]) { - swap(data, m1, m0) - } - if less(data[m2], data[m1]) { - swap(data, m2, m1) - if less(data[m1], data[m0]) { - swap(data, m1, m0) - } - } - } - - do_pivot :: proc(data: T, lo, hi: int, less: proc(i, j: E) -> bool) -> (midlo, midhi: int) #no_bounds_check { - m := int(uint(lo+hi)>>1) - if hi-lo > 40 { - s := (hi-lo)/8 - median3(data, lo, lo+s, lo+s*2, less) - median3(data, m, m-s, m+s, less) - median3(data, hi-1, hi-1-s, hi-1-s*2, less) - } - median3(data, lo, m, hi-1, less) - - pivot := lo - a, c := lo+1, hi-1 - - for ; a < c && less(data[a], data[pivot]); a += 1 { - } - b := a - - for { - for ; b < c && !less(data[pivot], data[b]); b += 1 { // data[b] <= pivot - } - for ; b < c && less(data[pivot], data[c-1]); c -=1 { // data[c-1] > pivot - } - if b >= c { - break - } - - swap(data, b, c-1) - b += 1 - c -= 1 - } - - protect := hi-c < 5 - if !protect && hi-c < (hi-lo)/4 { - dups := 0 - if !less(data[pivot], data[hi-1]) { - swap(data, c, hi-1) - c += 1 - dups += 1 - } - if !less(data[b-1], data[pivot]) { - b -= 1 - dups += 1 - } - - if !less(data[m], data[pivot]) { - swap(data, m, b-1) - b -= 1 - dups += 1 - } - protect = dups > 1 - } - if protect { - for { - for ; a < b && !less(data[b-1], data[pivot]); b -= 1 { - } - for ; a < b && less(data[a], data[pivot]); a += 1 { - } - if a >= b { - break - } - swap(data, a, b-1) - a += 1 - b -= 1 - } - } - swap(data, pivot, b-1) - return b-1, c - } - - - a, b, max_depth := a, b, max_depth - - if b-a > 12 { // only use shell sort for lengths <= 12 - if max_depth == 0 { - _heap_sort_less(data, a, b, less) - return - } - max_depth -= 1 - mlo, mhi := do_pivot(data, a, b, less) - if mlo-a < b-mhi { - _quick_sort_less(data, a, mlo, max_depth, less) - a = mhi - } else { - _quick_sort_less(data, mhi, b, max_depth, less) - b = mlo - } - } - if b-a > 1 { - // Shell short with gap 6 - for i in a+6.. bool) #no_bounds_check { - for i in a+1.. a && less(data[j], data[j-1]); j -= 1 { - swap(data, j, j-1) - } - } -} - -@(private) -_heap_sort_less :: proc(data: $T/[]$E, a, b: int, less: proc(i, j: E) -> bool) #no_bounds_check { - sift_down :: proc(data: T, lo, hi, first: int, less: proc(i, j: E) -> bool) #no_bounds_check { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && less(data[first+child], data[first+child+1]) { - child += 1 - } - if !less(data[first+root], data[first+child]) { - return - } - swap(data, first+root, first+child) - root = child - } - } - - - first, lo, hi := a, 0, b-a - - for i := (hi-1)/2; i >= 0; i -= 1 { - sift_down(data, i, hi, first, less) - } - - for i := hi-1; i >= 0; i -= 1 { - swap(data, first, first+i) - sift_down(data, lo, i, first, less) - } -} - - - - - - -@(private) -_quick_sort_cmp :: proc(data: $T/[]$E, a, b, max_depth: int, cmp: proc(i, j: E) -> Ordering) #no_bounds_check { - median3 :: proc(data: T, m1, m0, m2: int, cmp: proc(i, j: E) -> Ordering) #no_bounds_check { - if cmp(data[m1], data[m0]) == .Less { - swap(data, m1, m0) - } - if cmp(data[m2], data[m1]) == .Less { - swap(data, m2, m1) - if cmp(data[m1], data[m0]) == .Less { - swap(data, m1, m0) - } - } - } - - do_pivot :: proc(data: T, lo, hi: int, cmp: proc(i, j: E) -> Ordering) -> (midlo, midhi: int) #no_bounds_check { - m := int(uint(lo+hi)>>1) - if hi-lo > 40 { - s := (hi-lo)/8 - median3(data, lo, lo+s, lo+s*2, cmp) - median3(data, m, m-s, m+s, cmp) - median3(data, hi-1, hi-1-s, hi-1-s*2, cmp) - } - median3(data, lo, m, hi-1, cmp) - - pivot := lo - a, c := lo+1, hi-1 - - for ; a < c && cmp(data[a], data[pivot]) == .Less; a += 1 { - } - b := a - - for { - for ; b < c && cmp(data[pivot], data[b]) >= .Equal; b += 1 { // data[b] <= pivot - } - for ; b < c && cmp(data[pivot], data[c-1]) == .Less; c -=1 { // data[c-1] > pivot - } - if b >= c { - break - } - - swap(data, b, c-1) - b += 1 - c -= 1 - } - - protect := hi-c < 5 - if !protect && hi-c < (hi-lo)/4 { - dups := 0 - if cmp(data[pivot], data[hi-1]) != .Less { - swap(data, c, hi-1) - c += 1 - dups += 1 - } - if cmp(data[b-1], data[pivot]) != .Less { - b -= 1 - dups += 1 - } - - if cmp(data[m], data[pivot]) != .Less { - swap(data, m, b-1) - b -= 1 - dups += 1 - } - protect = dups > 1 - } - if protect { - for { - for ; a < b && cmp(data[b-1], data[pivot]) >= .Equal; b -= 1 { - } - for ; a < b && cmp(data[a], data[pivot]) == .Less; a += 1 { - } - if a >= b { - break - } - swap(data, a, b-1) - a += 1 - b -= 1 - } - } - swap(data, pivot, b-1) - return b-1, c - } - - - a, b, max_depth := a, b, max_depth - - if b-a > 12 { // only use shell sort for lengths <= 12 - if max_depth == 0 { - _heap_sort_cmp(data, a, b, cmp) - return - } - max_depth -= 1 - mlo, mhi := do_pivot(data, a, b, cmp) - if mlo-a < b-mhi { - _quick_sort_cmp(data, a, mlo, max_depth, cmp) - a = mhi - } else { - _quick_sort_cmp(data, mhi, b, max_depth, cmp) - b = mlo - } - } - if b-a > 1 { - // Shell short with gap 6 - for i in a+6.. Ordering) #no_bounds_check { - for i in a+1.. a && cmp(data[j], data[j-1]) == .Less; j -= 1 { - swap(data, j, j-1) - } - } -} - -@(private) -_heap_sort_cmp :: proc(data: $T/[]$E, a, b: int, cmp: proc(i, j: E) -> Ordering) #no_bounds_check { - sift_down :: proc(data: T, lo, hi, first: int, cmp: proc(i, j: E) -> Ordering) #no_bounds_check { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && cmp(data[first+child], data[first+child+1]) == .Less { - child += 1 - } - if cmp(data[first+root], data[first+child]) >= .Equal { - return - } - swap(data, first+root, first+child) - root = child - } - } - - - first, lo, hi := a, 0, b-a - - for i := (hi-1)/2; i >= 0; i -= 1 { - sift_down(data, i, hi, first, cmp) - } - - for i := hi-1; i >= 0; i -= 1 { - swap(data, first, first+i) - sift_down(data, lo, i, first, cmp) - } -} - - - diff --git a/core/slice/sort_private.odin b/core/slice/sort_private.odin new file mode 100644 index 000000000..7abd2f1ce --- /dev/null +++ b/core/slice/sort_private.odin @@ -0,0 +1,200 @@ +//+private +package slice + +import "core:intrinsics" +_ :: intrinsics + +ORD :: intrinsics.type_is_ordered + +Sort_Kind :: enum { + Ordered, + Less, + Cmp, +} + +_quick_sort_general :: proc(data: $T/[]$E, a, b, max_depth: int, call: $P, $KIND: Sort_Kind) where (ORD(E) && KIND == .Ordered) || (KIND != .Ordered) #no_bounds_check { + less :: #force_inline proc(a, b: E, call: P) -> bool { + when KIND == .Ordered { + return a < b + } else when KIND == .Less { + return call(a, b) + } else when KIND == .Cmp { + return call(a, b) == .Less + } else { + #panic("unhandled Sort_Kind") + } + } + + insertion_sort :: proc(data: $T/[]$E, a, b: int, call: P) #no_bounds_check { + for i in a+1.. a && less(data[j], data[j-1], call); j -= 1 { + swap(data, j, j-1) + } + } + } + + heap_sort :: proc(data: $T/[]$E, a, b: int, call: P) #no_bounds_check { + sift_down :: proc(data: T, lo, hi, first: int, call: P) #no_bounds_check { + root := lo + for { + child := 2*root + 1 + if child >= hi { + break + } + if child+1 < hi && less(data[first+child], data[first+child+1], call) { + child += 1 + } + if !less(data[first+root], data[first+child], call) { + return + } + swap(data, first+root, first+child) + root = child + } + } + + + first, lo, hi := a, 0, b-a + + for i := (hi-1)/2; i >= 0; i -= 1 { + sift_down(data, i, hi, first, call) + } + + for i := hi-1; i >= 0; i -= 1 { + swap(data, first, first+i) + sift_down(data, lo, i, first, call) + } + } + + median3 :: proc(data: T, m1, m0, m2: int, call: P) #no_bounds_check { + if less(data[m1], data[m0], call) { + swap(data, m1, m0) + } + if less(data[m2], data[m1], call) { + swap(data, m2, m1) + if less(data[m1], data[m0], call) { + swap(data, m1, m0) + } + } + } + + do_pivot :: proc(data: T, lo, hi: int, call: P) -> (midlo, midhi: int) #no_bounds_check { + m := int(uint(lo+hi)>>1) + if hi-lo > 40 { + s := (hi-lo)/8 + median3(data, lo, lo+s, lo+s*2, call) + median3(data, m, m-s, m+s, call) + median3(data, hi-1, hi-1-s, hi-1-s*2, call) + } + median3(data, lo, m, hi-1, call) + + pivot := lo + a, c := lo+1, hi-1 + + + for ; a < c && less(data[a], data[pivot], call); a += 1 { + } + b := a + + for { + for ; b < c && !less(data[pivot], data[b], call); b += 1 { // data[b] <= pivot + } + for ; b < c && less(data[pivot], data[c-1], call); c -=1 { // data[c-1] > pivot + } + if b >= c { + break + } + + swap(data, b, c-1) + b += 1 + c -= 1 + } + + protect := hi-c < 5 + if !protect && hi-c < (hi-lo)/4 { + dups := 0 + if !less(data[pivot], data[hi-1], call) { + swap(data, c, hi-1) + c += 1 + dups += 1 + } + if !less(data[b-1], data[pivot], call) { + b -= 1 + dups += 1 + } + + if !less(data[m], data[pivot], call) { + swap(data, m, b-1) + b -= 1 + dups += 1 + } + protect = dups > 1 + } + if protect { + for { + for ; a < b && !less(data[b-1], data[pivot], call); b -= 1 { + } + for ; a < b && less(data[a], data[pivot], call); a += 1 { + } + if a >= b { + break + } + swap(data, a, b-1) + a += 1 + b -= 1 + } + } + swap(data, pivot, b-1) + return b-1, c + } + + + a, b, max_depth := a, b, max_depth + + if b-a > 12 { // only use shell sort for lengths <= 12 + if max_depth == 0 { + heap_sort(data, a, b, call) + return + } + max_depth -= 1 + mlo, mhi := do_pivot(data, a, b, call) + if mlo-a < b-mhi { + _quick_sort_general(data, a, mlo, max_depth, call, KIND) + a = mhi + } else { + _quick_sort_general(data, mhi, b, max_depth, call, KIND) + b = mlo + } + } + if b-a > 1 { + // Shell short with gap 6 + for i in a+6.. bool { + when KIND == .Ordered { + return a < b + } else when KIND == .Less { + return call(a, b) + } else when KIND == .Cmp { + return call(a, b) == .Less + } else { + #panic("unhandled Sort_Kind") + } + } + + n := len(data) + for i in 1.. 0 && less(data[j], data[j-1], call); j -= 1 { + swap(data, j, j-1) + } + } +} diff --git a/core/strings/strings.odin b/core/strings/strings.odin index b93c5bcc0..4daa0bacd 100644 --- a/core/strings/strings.odin +++ b/core/strings/strings.odin @@ -298,13 +298,7 @@ split_after_n :: proc(s, sep: string, n: int, allocator := context.allocator) -> @private -_split_iterator :: proc(s: ^string, sep: string, sep_save, n: int) -> (res: string, ok: bool) { - s, n := s, n - - if n == 0 { - return - } - +_split_iterator :: proc(s: ^string, sep: string, sep_save: int) -> (res: string, ok: bool) { if sep == "" { res = s[:] ok = true @@ -312,47 +306,88 @@ _split_iterator :: proc(s: ^string, sep: string, sep_save, n: int) -> (res: stri return } - if n < 0 { - n = count(s^, sep) + 1 - } - - n -= 1 - - i := 0 - for ; i < n; i += 1 { - m := index(s^, sep) - if m < 0 { - break - } + m := index(s^, sep) + if m < 0 { + // not found + res = s[:] + ok = res != "" + s^ = s[len(s):] + } else { res = s[:m+sep_save] ok = true s^ = s[m+len(sep):] - return } - res = s[:] - ok = res != "" - s^ = s[len(s):] return } split_iterator :: proc(s: ^string, sep: string) -> (string, bool) { - return _split_iterator(s, sep, 0, -1) -} - -split_n_iterator :: proc(s: ^string, sep: string, n: int) -> (string, bool) { - return _split_iterator(s, sep, 0, n) + return _split_iterator(s, sep, 0) } split_after_iterator :: proc(s: ^string, sep: string) -> (string, bool) { - return _split_iterator(s, sep, len(sep), -1) -} - -split_after_n_iterator :: proc(s: ^string, sep: string, n: int) -> (string, bool) { - return _split_iterator(s, sep, len(sep), n) + return _split_iterator(s, sep, len(sep)) } +@(private) +_trim_cr :: proc(s: string) -> string { + n := len(s) + if n > 0 { + if s[n-1] == '\r' { + return s[:n-1] + } + } + return s +} + +split_lines :: proc(s: string, allocator := context.allocator) -> []string { + sep :: "\n" + lines := _split(s, sep, 0, -1, allocator) + for line in &lines { + line = _trim_cr(line) + } + return lines +} + +split_lines_n :: proc(s: string, n: int, allocator := context.allocator) -> []string { + sep :: "\n" + lines := _split(s, sep, 0, n, allocator) + for line in &lines { + line = _trim_cr(line) + } + return lines +} + +split_lines_after :: proc(s: string, allocator := context.allocator) -> []string { + sep :: "\n" + lines := _split(s, sep, len(sep), -1, allocator) + for line in &lines { + line = _trim_cr(line) + } + return lines +} + +split_lines_after_n :: proc(s: string, n: int, allocator := context.allocator) -> []string { + sep :: "\n" + lines := _split(s, sep, len(sep), n, allocator) + for line in &lines { + line = _trim_cr(line) + } + return lines +} + +split_lines_iterator :: proc(s: ^string) -> (line: string, ok: bool) { + sep :: "\n" + line = _split_iterator(s, sep, 0) or_return + return _trim_cr(line), true +} + +split_lines_after_iterator :: proc(s: ^string) -> (line: string, ok: bool) { + sep :: "\n" + line = _split_iterator(s, sep, len(sep)) or_return + return _trim_cr(line), true +} diff --git a/core/testing/runner_other.odin b/core/testing/runner_other.odin index 3978a3c83..f3271d209 100644 --- a/core/testing/runner_other.odin +++ b/core/testing/runner_other.odin @@ -6,7 +6,7 @@ import "core:time" run_internal_test :: proc(t: ^T, it: Internal_Test) { // TODO(bill): Catch panics on other platforms - it.p(t); + it.p(t) } _fail_timeout :: proc(t: ^T, duration: time.Duration, loc := #caller_location) { diff --git a/core/thread/thread_unix.odin b/core/thread/thread_unix.odin index cee278c7a..6cb91df86 100644 --- a/core/thread/thread_unix.odin +++ b/core/thread/thread_unix.odin @@ -167,8 +167,6 @@ _join_multiple :: proc(threads: ..^Thread) { _destroy :: proc(t: ^Thread) { _join(t) - sync.condition_destroy(&t.start_gate) - sync.mutex_destroy(&t.start_mutex) t.unix_thread = {} free(t, t.creation_allocator) } diff --git a/examples/all/all_main.odin b/examples/all/all_main.odin index c24238602..e38dd5150 100644 --- a/examples/all/all_main.odin +++ b/examples/all/all_main.odin @@ -5,25 +5,65 @@ package all import bufio "core:bufio" import bytes "core:bytes" + import c "core:c" import libc "core:c/libc" + import compress "core:compress" import gzip "core:compress/gzip" import zlib "core:compress/zlib" -import container "core:container" + +import bit_array "core:container/bit_array" +import priority_queue "core:container/priority_queue" +import queue "core:container/queue" +import small_array "core:container/small_array" +import lru "core:container/lru" + +import crypto "core:crypto" +import blake "core:crypto/blake" +import blake2b "core:crypto/blake2b" +import blake2s "core:crypto/blake2s" +import chacha20 "core:crypto/chacha20" +import chacha20poly1305 "core:crypto/chacha20poly1305" +import gost "core:crypto/gost" +import groestl "core:crypto/groestl" +import haval "core:crypto/haval" +import jh "core:crypto/jh" +import keccak "core:crypto/keccak" +import md2 "core:crypto/md2" +import md4 "core:crypto/md4" +import md5 "core:crypto/md5" +import poly1305 "core:crypto/poly1305" +import ripemd "core:crypto/ripemd" +import sha1 "core:crypto/sha1" +import sha2 "core:crypto/sha2" +import sha3 "core:crypto/sha3" +import shake "core:crypto/shake" +import sm3 "core:crypto/sm3" +import streebog "core:crypto/streebog" +import tiger "core:crypto/tiger" +import tiger2 "core:crypto/tiger2" +import crypto_util "core:crypto/util" +import whirlpool "core:crypto/whirlpool" +import x25519 "core:crypto/x25519" + import dynlib "core:dynlib" -import encoding "core:encoding" + import base32 "core:encoding/base32" import base64 "core:encoding/base64" import csv "core:encoding/csv" import hxa "core:encoding/hxa" import json "core:encoding/json" + import fmt "core:fmt" import hash "core:hash" + import image "core:image" import png "core:image/png" + import io "core:io" import log "core:log" + import math "core:math" import big "core:math/big" import bits "core:math/bits" @@ -32,16 +72,22 @@ import linalg "core:math/linalg" import glm "core:math/linalg/glsl" import hlm "core:math/linalg/hlsl" import rand "core:math/rand" + import mem "core:mem" +// import virtual "core:mem/virtual" + import ast "core:odin/ast" import doc_format "core:odin/doc-format" import odin_format "core:odin/format" import odin_parser "core:odin/parser" import odin_printer "core:odin/printer" import odin_tokenizer "core:odin/tokenizer" + import os "core:os" + import slashpath "core:path/slashpath" import filepath "core:path/filepath" + import reflect "core:reflect" import runtime "core:runtime" import slice "core:slice" @@ -50,9 +96,11 @@ import strconv "core:strconv" import strings "core:strings" import sync "core:sync" import sync2 "core:sync/sync2" +import testing "core:testing" import scanner "core:text/scanner" import thread "core:thread" import time "core:time" + import unicode "core:unicode" import utf8 "core:unicode/utf8" import utf16 "core:unicode/utf16" @@ -67,9 +115,38 @@ _ :: libc _ :: compress _ :: gzip _ :: zlib -_ :: container +_ :: bit_array +_ :: priority_queue +_ :: queue +_ :: small_array +_ :: crypto +_ :: blake +_ :: blake2b +_ :: blake2s +_ :: chacha20 +_ :: chacha20poly1305 +_ :: gost +_ :: groestl +_ :: haval +_ :: jh +_ :: keccak +_ :: md2 +_ :: md4 +_ :: md5 +_ :: poly1305 +_ :: ripemd +_ :: sha1 +_ :: sha2 +_ :: sha3 +_ :: shake +_ :: sm3 +_ :: streebog +_ :: tiger +_ :: tiger2 +_ :: crypto_util +_ :: whirlpool +_ :: x25519 _ :: dynlib -_ :: encoding _ :: base32 _ :: base64 _ :: csv @@ -107,6 +184,7 @@ _ :: strconv _ :: strings _ :: sync _ :: sync2 +_ :: testing _ :: scanner _ :: thread _ :: time diff --git a/examples/all/all_vendor.odin b/examples/all/all_vendor.odin index 777c184f9..f94e092af 100644 --- a/examples/all/all_vendor.odin +++ b/examples/all/all_vendor.odin @@ -1,26 +1,47 @@ //+build windows package all -import glfw "vendor:glfw" -import gl "vendor:OpenGL" -import rl "vendor:raylib" -import PM "vendor:portmidi" + +import botan "vendor:botan" +import ENet "vendor:ENet" +import gl "vendor:OpenGL" +import glfw "vendor:glfw" +import microui "vendor:microui" +import miniaudio "vendor:miniaudio" +import PM "vendor:portmidi" +import rl "vendor:raylib" + import SDL "vendor:sdl2" -import IMG "vendor:sdl2/image" import SDLNet "vendor:sdl2/net" +import IMG "vendor:sdl2/image" import MIX "vendor:sdl2/mixer" import TTF "vendor:sdl2/ttf" -import vk "vendor:vulkan" -import ENet "vendor:ENet" -_ :: glfw +import stb_easy_font "vendor:stb/easy_font" +import stbi "vendor:stb/image" +import stbrp "vendor:stb/rect_pack" +import stbtt "vendor:stb/truetype" +import stb_vorbis "vendor:stb/vorbis" + +import vk "vendor:vulkan" + + +_ :: botan +_ :: ENet _ :: gl -_ :: rl +_ :: glfw +_ :: microui +_ :: miniaudio _ :: PM +_ :: rl _ :: SDL -_ :: IMG _ :: SDLNet +_ :: IMG _ :: MIX _ :: TTF -_ :: vk -_ :: ENet \ No newline at end of file +_ :: stb_easy_font +_ :: stbi +_ :: stbrp +_ :: stbtt +_ :: stb_vorbis +_ :: vk \ No newline at end of file diff --git a/examples/demo/demo.odin b/examples/demo/demo.odin index 13dcf9466..a4b678ae7 100644 --- a/examples/demo/demo.odin +++ b/examples/demo/demo.odin @@ -1921,14 +1921,14 @@ constant_literal_expressions :: proc() { fmt.println("-------") - Partial_Baz :: enum{A=5, B, C, D=16} - #assert(len(Partial_Baz) < len(#partial [Partial_Baz]int)) - PARTIAL_ENUM_ARRAY_CONST :: #partial [Partial_Baz]int{.A ..= .C = 1, .D = 16} + Sparse_Baz :: enum{A=5, B, C, D=16} + #assert(len(Sparse_Baz) < len(#sparse[Sparse_Baz]int)) + SPARSE_ENUM_ARRAY_CONST :: #sparse[Sparse_Baz]int{.A ..= .C = 1, .D = 16} - fmt.println(PARTIAL_ENUM_ARRAY_CONST[.A]) - fmt.println(PARTIAL_ENUM_ARRAY_CONST[.B]) - fmt.println(PARTIAL_ENUM_ARRAY_CONST[.C]) - fmt.println(PARTIAL_ENUM_ARRAY_CONST[.D]) + fmt.println(SPARSE_ENUM_ARRAY_CONST[.A]) + fmt.println(SPARSE_ENUM_ARRAY_CONST[.B]) + fmt.println(SPARSE_ENUM_ARRAY_CONST[.C]) + fmt.println(SPARSE_ENUM_ARRAY_CONST[.D]) fmt.println("-------") diff --git a/src/build_settings.cpp b/src/build_settings.cpp index b4a934ec8..610e4f847 100644 --- a/src/build_settings.cpp +++ b/src/build_settings.cpp @@ -165,6 +165,13 @@ enum TimingsExportFormat : i32 { TimingsExportCSV = 2, }; +enum ErrorPosStyle { + ErrorPosStyle_Default, // path(line:column) msg + ErrorPosStyle_Unix, // path:line:column: msg + + ErrorPosStyle_COUNT +}; + // This stores the information for the specify architecture of this build struct BuildContext { // Constants @@ -175,7 +182,9 @@ struct BuildContext { String ODIN_ROOT; // Odin ROOT bool ODIN_DEBUG; // Odin in debug mode bool ODIN_DISABLE_ASSERT; // Whether the default 'assert' et al is disabled in code or not -bool ODIN_DEFAULT_TO_NIL_ALLOCATOR; // Whether the default allocator is a "nil" allocator or not (i.e. it does nothing) + bool ODIN_DEFAULT_TO_NIL_ALLOCATOR; // Whether the default allocator is a "nil" allocator or not (i.e. it does nothing) + + ErrorPosStyle ODIN_ERROR_POS_STYLE; TargetEndianKind endian_kind; @@ -254,6 +263,7 @@ bool ODIN_DEFAULT_TO_NIL_ALLOCATOR; // Whether the default allocator is a "nil isize thread_count; PtrMap defined_values; + }; @@ -843,6 +853,22 @@ bool has_asm_extension(String const &path) { return false; } +// temporary +char *token_pos_to_string(TokenPos const &pos) { + gbString s = gb_string_make_reserve(temporary_allocator(), 128); + String file = get_file_path_string(pos.file_id); + switch (build_context.ODIN_ERROR_POS_STYLE) { + default: /*fallthrough*/ + case ErrorPosStyle_Default: + s = gb_string_append_fmt(s, "%.*s(%d:%d)", LIT(file), pos.line, pos.column); + break; + case ErrorPosStyle_Unix: + s = gb_string_append_fmt(s, "%.*s:%d:%d:", LIT(file), pos.line, pos.column); + break; + } + return s; +} + void init_build_context(TargetMetrics *cross_target) { BuildContext *bc = &build_context; @@ -855,6 +881,31 @@ void init_build_context(TargetMetrics *cross_target) { bc->ODIN_VENDOR = str_lit("odin"); bc->ODIN_VERSION = ODIN_VERSION; bc->ODIN_ROOT = odin_root_dir(); + + { + char const *found = gb_get_env("ODIN_ERROR_POS_STYLE", permanent_allocator()); + if (found) { + ErrorPosStyle kind = ErrorPosStyle_Default; + String style = make_string_c(found); + style = string_trim_whitespace(style); + if (style == "" || style == "default" || style == "odin") { + kind = ErrorPosStyle_Default; + } else if (style == "unix" || style == "gcc" || style == "clang" || style == "llvm") { + kind = ErrorPosStyle_Unix; + } else { + gb_printf_err("Invalid ODIN_ERROR_POS_STYLE: got %.*s\n", LIT(style)); + gb_printf_err("Valid formats:\n"); + gb_printf_err("\t\"default\" or \"odin\"\n"); + gb_printf_err("\t\tpath(line:column) message\n"); + gb_printf_err("\t\"unix\"\n"); + gb_printf_err("\t\tpath:line:column: message\n"); + gb_exit(1); + } + + build_context.ODIN_ERROR_POS_STYLE = kind; + } + } + bc->copy_file_contents = true; diff --git a/src/check_builtin.cpp b/src/check_builtin.cpp index a42741976..1fb3d6037 100644 --- a/src/check_builtin.cpp +++ b/src/check_builtin.cpp @@ -2183,9 +2183,43 @@ bool check_builtin_procedure(CheckerContext *c, Operand *operand, Ast *call, i32 } operand->mode = Addressing_Value; - if (is_type_array(t)) { + if (t->kind == Type_Array) { + i32 rank = type_math_rank(t); // Do nothing - operand->type = x.type; + operand->type = x.type; + if (rank > 2) { + gbString s = type_to_string(x.type); + error(call, "'%.*s' expects a matrix or array with a rank of 2, got %s of rank %d", LIT(builtin_name), s, rank); + gb_string_free(s); + return false; + } else if (rank == 2) { + Type *inner = base_type(t->Array.elem); + GB_ASSERT(inner->kind == Type_Array); + Type *elem = inner->Array.elem; + Type *array_inner = alloc_type_array(elem, t->Array.count); + Type *array_outer = alloc_type_array(array_inner, inner->Array.count); + operand->type = array_outer; + + i64 elements = t->Array.count*inner->Array.count; + i64 size = type_size_of(operand->type); + if (!is_type_valid_for_matrix_elems(elem)) { + gbString s = type_to_string(x.type); + error(call, "'%.*s' expects a matrix or array with a base element type of an integer, float, or complex number, got %s", LIT(builtin_name), s); + gb_string_free(s); + } else if (elements > MATRIX_ELEMENT_COUNT_MAX) { + gbString s = type_to_string(x.type); + error(call, "'%.*s' expects a matrix or array with a maximum of %d elements, got %s with %lld elements", LIT(builtin_name), MATRIX_ELEMENT_COUNT_MAX, s, elements); + gb_string_free(s); + } else if (elements > MATRIX_ELEMENT_COUNT_MAX) { + gbString s = type_to_string(x.type); + error(call, "'%.*s' expects a matrix or array with non-zero elements, got %s", LIT(builtin_name), MATRIX_ELEMENT_COUNT_MAX, s); + gb_string_free(s); + } else if (size > MATRIX_ELEMENT_MAX_SIZE) { + gbString s = type_to_string(x.type); + error(call, "Too large of a type for '%.*s', got %s of size %lld, maximum size %d", LIT(builtin_name), s, cast(long long)size, MATRIX_ELEMENT_MAX_SIZE); + gb_string_free(s); + } + } } else { GB_ASSERT(t->kind == Type_Matrix); operand->type = alloc_type_matrix(t->Matrix.elem, t->Matrix.column_count, t->Matrix.row_count); @@ -3310,9 +3344,11 @@ bool check_builtin_procedure(CheckerContext *c, Operand *operand, Ast *call, i32 case BuiltinProc_type_is_simple_compare: case BuiltinProc_type_is_dereferenceable: case BuiltinProc_type_is_valid_map_key: + case BuiltinProc_type_is_valid_matrix_elements: case BuiltinProc_type_is_named: case BuiltinProc_type_is_pointer: case BuiltinProc_type_is_array: + case BuiltinProc_type_is_enumerated_array: case BuiltinProc_type_is_slice: case BuiltinProc_type_is_dynamic_array: case BuiltinProc_type_is_map: @@ -3320,10 +3356,9 @@ bool check_builtin_procedure(CheckerContext *c, Operand *operand, Ast *call, i32 case BuiltinProc_type_is_union: case BuiltinProc_type_is_enum: case BuiltinProc_type_is_proc: - case BuiltinProc_type_is_bit_field: - case BuiltinProc_type_is_bit_field_value: case BuiltinProc_type_is_bit_set: case BuiltinProc_type_is_simd_vector: + case BuiltinProc_type_is_matrix: case BuiltinProc_type_is_specialized_polymorphic_record: case BuiltinProc_type_is_unspecialized_polymorphic_record: case BuiltinProc_type_has_nil: diff --git a/src/check_decl.cpp b/src/check_decl.cpp index f9bc17ba4..f6dade812 100644 --- a/src/check_decl.cpp +++ b/src/check_decl.cpp @@ -238,6 +238,51 @@ isize total_attribute_count(DeclInfo *decl) { return attribute_count; } +Type *clone_enum_type(CheckerContext *ctx, Type *original_enum_type, Type *named_type) { + // NOTE(bill, 2022-02-05): Stupid edge case for `distinct` declarations + // + // X :: enum {A, B, C} + // Y :: distinct X + // + // To make Y be just like X, it will need to copy the elements of X and change their type + // so that they match Y rather than X. + GB_ASSERT(original_enum_type != nullptr); + GB_ASSERT(named_type != nullptr); + GB_ASSERT(original_enum_type->kind == Type_Enum); + GB_ASSERT(named_type->kind == Type_Named); + + Scope *parent = original_enum_type->Enum.scope->parent; + Scope *scope = create_scope(nullptr, parent); + + + Type *et = alloc_type_enum(); + et->Enum.base_type = original_enum_type->Enum.base_type; + et->Enum.min_value = original_enum_type->Enum.min_value; + et->Enum.max_value = original_enum_type->Enum.max_value; + et->Enum.min_value_index = original_enum_type->Enum.min_value_index; + et->Enum.max_value_index = original_enum_type->Enum.max_value_index; + et->Enum.scope = scope; + + auto fields = array_make(permanent_allocator(), original_enum_type->Enum.fields.count); + for_array(i, fields) { + Entity *old = original_enum_type->Enum.fields[i]; + + Entity *e = alloc_entity_constant(scope, old->token, named_type, old->Constant.value); + e->file = old->file; + e->identifier = clone_ast(old->identifier); + e->flags |= EntityFlag_Visited; + e->state = EntityState_Resolved; + e->Constant.flags = old->Constant.flags; + e->Constant.docs = old->Constant.docs; + e->Constant.comment = old->Constant.comment; + + fields[i] = e; + add_entity(ctx, scope, nullptr, e); + add_entity_use(ctx, e->identifier, e); + } + et->Enum.fields = fields; + return et; +} void check_type_decl(CheckerContext *ctx, Entity *e, Ast *init_expr, Type *def) { GB_ASSERT(e->type == nullptr); @@ -258,7 +303,11 @@ void check_type_decl(CheckerContext *ctx, Entity *e, Ast *init_expr, Type *def) Type *bt = check_type_expr(ctx, te, named); check_type_path_pop(ctx); - named->Named.base = base_type(bt); + Type *base = base_type(bt); + if (is_distinct && bt->kind == Type_Named && base->kind == Type_Enum) { + base = clone_enum_type(ctx, base, named); + } + named->Named.base = base; if (is_distinct && is_type_typeid(e->type)) { error(init_expr, "'distinct' cannot be applied to 'typeid'"); @@ -385,7 +434,45 @@ void check_const_decl(CheckerContext *ctx, Entity *e, Ast *type_expr, Ast *init, Operand operand = {}; if (init != nullptr) { - Entity *entity = nullptr; + Entity *entity = check_entity_from_ident_or_selector(ctx, init, false); + if (entity != nullptr && entity->kind == Entity_TypeName) { + // @TypeAliasingProblem + // NOTE(bill, 2022-02-03): This is used to solve the problem caused by type aliases + // being "confused" as constants + // + // A :: B + // C :: proc "c" (^A) + // B :: struct {x: C} + // + // A gets evaluated first, and then checks B. + // B then checks C. + // C then tries to check A which is unresolved but thought to be a constant. + // Therefore within C's check, A errs as "not a type". + // + // This is because a const declaration may or may not be a type and this cannot + // be determined from a syntactical standpoint. + // This check allows the compiler to override the entity to be checked as a type. + // + // There is no problem if B is prefixed with the `#type` helper enforcing at + // both a syntax and semantic level that B must be a type. + // + // A :: #type B + // + // This approach is not fool proof and can fail in case such as: + // + // X :: type_of(x) + // X :: Foo(int).Type + // + // Since even these kind of declarations may cause weird checking cycles. + // For the time being, these are going to be treated as an unfortunate error + // until there is a proper delaying system to try declaration again if they + // have failed. + + e->kind = Entity_TypeName; + check_type_decl(ctx, e, init, named_type); + return; + } + entity = nullptr; if (init->kind == Ast_Ident) { entity = check_ident(ctx, &operand, init, nullptr, e->type, true); } else if (init->kind == Ast_SelectorExpr) { diff --git a/src/check_expr.cpp b/src/check_expr.cpp index 8667d8734..3f31ac810 100644 --- a/src/check_expr.cpp +++ b/src/check_expr.cpp @@ -508,6 +508,10 @@ bool check_cast_internal(CheckerContext *c, Operand *x, Type *type); #define MAXIMUM_TYPE_DISTANCE 10 i64 check_distance_between_types(CheckerContext *c, Operand *operand, Type *type) { + if (c == nullptr) { + GB_ASSERT(operand->mode == Addressing_Value); + GB_ASSERT(is_type_typed(operand->type)); + } if (operand->mode == Addressing_Invalid || type == t_invalid) { return -1; @@ -673,6 +677,42 @@ i64 check_distance_between_types(CheckerContext *c, Operand *operand, Type *type return 1; } } + + // TODO(bill): Determine which rule is a better on in practice + #if 1 + if (dst->Union.variants.count == 1) { + Type *vt = dst->Union.variants[0]; + i64 score = check_distance_between_types(c, operand, vt); + if (score >= 0) { + return score+2; + } + } + #else + // NOTE(bill): check to see you can assign to it with one of the variants? + i64 prev_lowest_score = -1; + i64 lowest_score = -1; + for_array(i, dst->Union.variants) { + Type *vt = dst->Union.variants[i]; + i64 score = check_distance_between_types(c, operand, vt); + if (score >= 0) { + if (lowest_score < 0) { + lowest_score = score; + } else { + if (prev_lowest_score < 0) { + prev_lowest_score = lowest_score; + } else { + prev_lowest_score = gb_min(prev_lowest_score, lowest_score); + } + lowest_score = gb_min(lowest_score, score); + } + } + } + if (lowest_score >= 0) { + if (prev_lowest_score != lowest_score) { // remove possible ambiguities + return lowest_score+2; + } + } + #endif } if (is_type_relative_pointer(dst)) { @@ -782,6 +822,13 @@ bool check_is_assignable_to(CheckerContext *c, Operand *operand, Type *type) { return check_is_assignable_to_with_score(c, operand, type, &score); } +bool internal_check_is_assignable_to(Type *src, Type *dst) { + Operand x = {}; + x.type = src; + x.mode = Addressing_Value; + return check_is_assignable_to(nullptr, &x, dst); +} + AstPackage *get_package_of_type(Type *type) { for (;;) { if (type == nullptr) { @@ -1286,7 +1333,6 @@ bool check_cycle(CheckerContext *c, Entity *curr, bool report) { return false; } - Entity *check_ident(CheckerContext *c, Operand *o, Ast *n, Type *named_type, Type *type_hint, bool allow_import_name) { GB_ASSERT(n->kind == Ast_Ident); o->mode = Addressing_Invalid; @@ -1422,8 +1468,12 @@ Entity *check_ident(CheckerContext *c, Operand *o, Ast *n, Type *named_type, Typ case Entity_TypeName: o->mode = Addressing_Type; if (check_cycle(c, e, true)) { - type = t_invalid; + o->type = t_invalid; } + if (o->type != nullptr && type->kind == Type_Named && o->type->Named.type_name->TypeName.is_type_alias) { + o->type = base_type(o->type); + } + break; case Entity_ImportName: @@ -3419,7 +3469,6 @@ void convert_untyped_error(CheckerContext *c, Operand *operand, Type *target_typ if (operand->value.kind == ExactValue_String) { String key = operand->value.value_string; if (is_type_string(operand->type) && is_type_enum(target_type)) { - gb_printf_err("HERE!\n"); Type *et = base_type(target_type); check_did_you_mean_type(key, et->Enum.fields, "."); } @@ -4065,6 +4114,101 @@ Type *determine_swizzle_array_type(Type *original_type, Type *type_hint, isize n } +bool is_entity_declared_for_selector(Entity *entity, Scope *import_scope, bool *allow_builtin) { + bool is_declared = entity != nullptr; + if (is_declared) { + if (entity->kind == Entity_Builtin) { + // NOTE(bill): Builtin's are in the universal scope which is part of every scopes hierarchy + // This means that we should just ignore the found result through it + *allow_builtin = entity->scope == import_scope || entity->scope != builtin_pkg->scope; + } else if ((entity->scope->flags&ScopeFlag_Global) == ScopeFlag_Global && (import_scope->flags&ScopeFlag_Global) == 0) { + is_declared = false; + } + } + return is_declared; +} + +// NOTE(bill, 2022-02-03): see `check_const_decl` for why it exists reasoning +Entity *check_entity_from_ident_or_selector(CheckerContext *c, Ast *node, bool ident_only) { + if (node->kind == Ast_Ident) { + String name = node->Ident.token.string; + return scope_lookup(c->scope, name); + } else if (!ident_only) if (node->kind == Ast_SelectorExpr) { + ast_node(se, SelectorExpr, node); + if (se->token.kind == Token_ArrowRight) { + return nullptr; + } + + Ast *op_expr = se->expr; + Ast *selector = unparen_expr(se->selector); + if (selector == nullptr) { + return nullptr; + } + if (selector->kind != Ast_Ident) { + return nullptr; + } + + Entity *entity = nullptr; + Entity *expr_entity = nullptr; + bool check_op_expr = true; + + if (op_expr->kind == Ast_Ident) { + String op_name = op_expr->Ident.token.string; + Entity *e = scope_lookup(c->scope, op_name); + if (e == nullptr) { + return nullptr; + } + add_entity_use(c, op_expr, e); + expr_entity = e; + + if (e != nullptr && e->kind == Entity_ImportName && selector->kind == Ast_Ident) { + // IMPORTANT NOTE(bill): This is very sloppy code but it's also very fragile + // It pretty much needs to be in this order and this way + // If you can clean this up, please do but be really careful + String import_name = op_name; + Scope *import_scope = e->ImportName.scope; + String entity_name = selector->Ident.token.string; + + check_op_expr = false; + entity = scope_lookup_current(import_scope, entity_name); + bool allow_builtin = false; + if (!is_entity_declared_for_selector(entity, import_scope, &allow_builtin)) { + return nullptr; + } + + check_entity_decl(c, entity, nullptr, nullptr); + if (entity->kind == Entity_ProcGroup) { + return entity; + } + GB_ASSERT_MSG(entity->type != nullptr, "%.*s (%.*s)", LIT(entity->token.string), LIT(entity_strings[entity->kind])); + } + } + + Operand operand = {}; + if (check_op_expr) { + check_expr_base(c, &operand, op_expr, nullptr); + if (operand.mode == Addressing_Invalid) { + return nullptr; + } + } + + if (entity == nullptr && selector->kind == Ast_Ident) { + String field_name = selector->Ident.token.string; + if (is_type_dynamic_array(type_deref(operand.type))) { + init_mem_allocator(c->checker); + } + auto sel = lookup_field(operand.type, field_name, operand.mode == Addressing_Type); + entity = sel.entity; + } + + if (entity != nullptr) { + return entity; + } + } + return nullptr; +} + + Entity *check_selector(CheckerContext *c, Operand *operand, Ast *node, Type *type_hint) { ast_node(se, SelectorExpr, node); @@ -4113,18 +4257,8 @@ Entity *check_selector(CheckerContext *c, Operand *operand, Ast *node, Type *typ check_op_expr = false; entity = scope_lookup_current(import_scope, entity_name); - bool is_declared = entity != nullptr; bool allow_builtin = false; - if (is_declared) { - if (entity->kind == Entity_Builtin) { - // NOTE(bill): Builtin's are in the universal scope which is part of every scopes hierarchy - // This means that we should just ignore the found result through it - allow_builtin = entity->scope == import_scope || entity->scope != builtin_pkg->scope; - } else if ((entity->scope->flags&ScopeFlag_Global) == ScopeFlag_Global && (import_scope->flags&ScopeFlag_Global) == 0) { - is_declared = false; - } - } - if (!is_declared) { + if (!is_entity_declared_for_selector(entity, import_scope, &allow_builtin)) { error(op_expr, "'%.*s' is not declared by '%.*s'", LIT(entity_name), LIT(import_name)); operand->mode = Addressing_Invalid; operand->expr = node; @@ -4214,7 +4348,7 @@ Entity *check_selector(CheckerContext *c, Operand *operand, Ast *node, Type *typ } } - if (entity == nullptr && selector->kind == Ast_Ident && is_type_array(type_deref(operand->type))) { + if (entity == nullptr && selector->kind == Ast_Ident && is_type_array(type_deref(operand->type))) { // TODO(bill): Simd_Vector swizzling String field_name = selector->Ident.token.string; @@ -5756,8 +5890,12 @@ CallArgumentData check_call_arguments(CheckerContext *c, Operand *operand, Type ctx.curr_proc_sig = e->type; GB_ASSERT(decl->proc_lit->kind == Ast_ProcLit); - evaluate_where_clauses(&ctx, call, decl->scope, &decl->proc_lit->ProcLit.where_clauses, true); + bool ok = evaluate_where_clauses(&ctx, call, decl->scope, &decl->proc_lit->ProcLit.where_clauses, true); decl->where_clauses_evaluated = true; + + if (ok && (data.gen_entity->flags & EntityFlag_ProcBodyChecked) == 0) { + check_procedure_later(c, e->file, e->token, decl, e->type, decl->proc_lit->ProcLit.body, decl->proc_lit->ProcLit.tags); + } } return data; } @@ -5770,6 +5908,7 @@ CallArgumentData check_call_arguments(CheckerContext *c, Operand *operand, Type Entity *e = entity_of_node(ident); + CallArgumentData data = {}; CallArgumentError err = call_checker(c, call, proc_type, e, operands, CallArgumentMode_ShowErrors, &data); gb_unused(err); @@ -5778,7 +5917,6 @@ CallArgumentData check_call_arguments(CheckerContext *c, Operand *operand, Type if (entity_to_use != nullptr) { update_untyped_expr_type(c, operand->expr, entity_to_use->type, true); } - if (data.gen_entity != nullptr) { Entity *e = data.gen_entity; DeclInfo *decl = data.gen_entity->decl_info; @@ -5790,8 +5928,12 @@ CallArgumentData check_call_arguments(CheckerContext *c, Operand *operand, Type ctx.curr_proc_sig = e->type; GB_ASSERT(decl->proc_lit->kind == Ast_ProcLit); - evaluate_where_clauses(&ctx, call, decl->scope, &decl->proc_lit->ProcLit.where_clauses, true); + bool ok = evaluate_where_clauses(&ctx, call, decl->scope, &decl->proc_lit->ProcLit.where_clauses, true); decl->where_clauses_evaluated = true; + + if (ok && (data.gen_entity->flags & EntityFlag_ProcBodyChecked) == 0) { + check_procedure_later(c, e->file, e->token, decl, e->type, decl->proc_lit->ProcLit.body, decl->proc_lit->ProcLit.tags); + } } return data; } @@ -6085,7 +6227,8 @@ CallArgumentError check_polymorphic_record_type(CheckerContext *c, Operand *oper } // NOTE(bill): Add type info the parameters - add_type_info_type(c, o->type); + // TODO(bill, 2022-01-23): why was this line added in the first place? I'm commenting it out for the time being + // add_type_info_type(c, o->type); } { @@ -6860,6 +7003,1900 @@ void check_matrix_index_expr(CheckerContext *c, Operand *o, Ast *node, Type *typ } +struct TypeAndToken { + Type *type; + Token token; +}; + +typedef PtrMap SeenMap; + +void add_constant_switch_case(CheckerContext *ctx, SeenMap *seen, Operand operand, bool use_expr = true) { + if (operand.mode != Addressing_Constant) { + return; + } + if (operand.value.kind == ExactValue_Invalid) { + return; + } + + uintptr key = hash_exact_value(operand.value); + TypeAndToken *found = map_get(seen, key); + if (found != nullptr) { + isize count = multi_map_count(seen, key); + TypeAndToken *taps = gb_alloc_array(temporary_allocator(), TypeAndToken, count); + + multi_map_get_all(seen, key, taps); + for (isize i = 0; i < count; i++) { + TypeAndToken tap = taps[i]; + if (!are_types_identical(operand.type, tap.type)) { + continue; + } + + TokenPos pos = tap.token.pos; + if (use_expr) { + gbString expr_str = expr_to_string(operand.expr); + error(operand.expr, + "Duplicate case '%s'\n" + "\tprevious case at %s", + expr_str, + token_pos_to_string(pos)); + gb_string_free(expr_str); + } else { + error(operand.expr, "Duplicate case found with previous case at %s", token_pos_to_string(pos)); + } + return; + } + } + + TypeAndToken tap = {operand.type, ast_token(operand.expr)}; + multi_map_insert(seen, key, tap); +} + + +void add_to_seen_map(CheckerContext *ctx, SeenMap *seen, TokenKind upper_op, Operand const &x, Operand const &lhs, Operand const &rhs) { + if (is_type_enum(x.type)) { + // TODO(bill): Fix this logic so it's fast!!! + + i64 v0 = exact_value_to_i64(lhs.value); + i64 v1 = exact_value_to_i64(rhs.value); + Operand v = {}; + v.mode = Addressing_Constant; + v.type = x.type; + v.expr = x.expr; + + Type *bt = base_type(x.type); + GB_ASSERT(bt->kind == Type_Enum); + for (i64 vi = v0; vi <= v1; vi++) { + if (upper_op != Token_LtEq && vi == v1) { + break; + } + + bool found = false; + for_array(j, bt->Enum.fields) { + Entity *f = bt->Enum.fields[j]; + GB_ASSERT(f->kind == Entity_Constant); + + i64 fv = exact_value_to_i64(f->Constant.value); + if (fv == vi) { + found = true; + break; + } + } + if (found) { + v.value = exact_value_i64(vi); + add_constant_switch_case(ctx, seen, v); + } + } + } else { + add_constant_switch_case(ctx, seen, lhs); + if (upper_op == Token_LtEq) { + add_constant_switch_case(ctx, seen, rhs); + } + } +} +void add_to_seen_map(CheckerContext *ctx, SeenMap *seen, Operand const &x) { + add_constant_switch_case(ctx, seen, x); +} + +ExprKind check_basic_directive_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ast_node(bd, BasicDirective, node); + + ExprKind kind = Expr_Expr; + + o->mode = Addressing_Constant; + String name = bd->name.string; + if (name == "file") { + o->type = t_untyped_string; + o->value = exact_value_string(get_file_path_string(bd->token.pos.file_id)); + } else if (name == "line") { + o->type = t_untyped_integer; + o->value = exact_value_i64(bd->token.pos.line); + } else if (name == "procedure") { + if (c->curr_proc_decl == nullptr) { + error(node, "#procedure may only be used within procedures"); + o->type = t_untyped_string; + o->value = exact_value_string(str_lit("")); + } else { + o->type = t_untyped_string; + o->value = exact_value_string(c->proc_name); + } + } else if (name == "caller_location") { + init_core_source_code_location(c->checker); + error(node, "#caller_location may only be used as a default argument parameter"); + o->type = t_source_code_location; + o->mode = Addressing_Value; + } else { + if (name == "location") { + init_core_source_code_location(c->checker); + error(node, "'#%.*s' must be used in a call expression", LIT(name)); + o->type = t_source_code_location; + o->mode = Addressing_Value; + } else if ( + name == "assert" || + name == "defined" || + name == "config" || + name == "load" || + name == "load_hash" || + name == "load_or" + ) { + error(node, "'#%.*s' must be used as a call", LIT(name)); + o->type = t_invalid; + o->mode = Addressing_Invalid; + } else { + error(node, "Unknown directive: #%.*s", LIT(name)); + o->type = t_invalid; + o->mode = Addressing_Invalid; + } + + } + return kind; +} + +ExprKind check_ternary_if_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ExprKind kind = Expr_Expr; + Operand cond = {Addressing_Invalid}; + ast_node(te, TernaryIfExpr, node); + check_expr(c, &cond, te->cond); + node->viral_state_flags |= te->cond->viral_state_flags; + + if (cond.mode != Addressing_Invalid && !is_type_boolean(cond.type)) { + error(te->cond, "Non-boolean condition in ternary if expression"); + } + + Operand x = {Addressing_Invalid}; + Operand y = {Addressing_Invalid}; + check_expr_or_type(c, &x, te->x, type_hint); + node->viral_state_flags |= te->x->viral_state_flags; + + if (te->y != nullptr) { + check_expr_or_type(c, &y, te->y, type_hint); + node->viral_state_flags |= te->y->viral_state_flags; + } else { + error(node, "A ternary expression must have an else clause"); + return kind; + } + + if (x.type == nullptr || x.type == t_invalid || + y.type == nullptr || y.type == t_invalid) { + return kind; + } + + convert_to_typed(c, &x, y.type); + if (x.mode == Addressing_Invalid) { + return kind; + } + convert_to_typed(c, &y, x.type); + if (y.mode == Addressing_Invalid) { + x.mode = Addressing_Invalid; + return kind; + } + + if (!ternary_compare_types(x.type, y.type)) { + gbString its = type_to_string(x.type); + gbString ets = type_to_string(y.type); + error(node, "Mismatched types in ternary if expression, %s vs %s", its, ets); + gb_string_free(ets); + gb_string_free(its); + return kind; + } + + o->type = x.type; + if (is_type_untyped_nil(o->type) || is_type_untyped_undef(o->type)) { + o->type = y.type; + } + + o->mode = Addressing_Value; + o->expr = node; + if (type_hint != nullptr && is_type_untyped(o->type)) { + if (check_cast_internal(c, &x, type_hint) && + check_cast_internal(c, &y, type_hint)) { + convert_to_typed(c, o, type_hint); + update_untyped_expr_type(c, node, type_hint, !is_type_untyped(type_hint)); + } + } + return kind; +} + +ExprKind check_ternary_when_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ExprKind kind = Expr_Expr; + Operand cond = {}; + ast_node(te, TernaryWhenExpr, node); + check_expr(c, &cond, te->cond); + node->viral_state_flags |= te->cond->viral_state_flags; + + if (cond.mode != Addressing_Constant || !is_type_boolean(cond.type)) { + error(te->cond, "Expected a constant boolean condition in ternary when expression"); + return kind; + } + + if (cond.value.value_bool) { + check_expr_or_type(c, o, te->x, type_hint); + node->viral_state_flags |= te->x->viral_state_flags; + } else { + if (te->y != nullptr) { + check_expr_or_type(c, o, te->y, type_hint); + node->viral_state_flags |= te->y->viral_state_flags; + } else { + error(node, "A ternary when expression must have an else clause"); + return kind; + } + } + return kind; +} + +ExprKind check_or_else_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ast_node(oe, OrElseExpr, node); + + String name = oe->token.string; + Ast *arg = oe->x; + Ast *default_value = oe->y; + + Operand x = {}; + Operand y = {}; + check_multi_expr_with_type_hint(c, &x, arg, type_hint); + if (x.mode == Addressing_Invalid) { + o->mode = Addressing_Value; + o->type = t_invalid; + o->expr = node; + return Expr_Expr; + } + + check_multi_expr_with_type_hint(c, &y, default_value, x.type); + error_operand_no_value(&y); + if (y.mode == Addressing_Invalid) { + o->mode = Addressing_Value; + o->type = t_invalid; + o->expr = node; + return Expr_Expr; + } + + Type *left_type = nullptr; + Type *right_type = nullptr; + check_or_else_split_types(c, &x, name, &left_type, &right_type); + add_type_and_value(&c->checker->info, arg, x.mode, x.type, x.value); + + if (left_type != nullptr) { + check_assignment(c, &y, left_type, name); + } else { + check_or_else_expr_no_value_error(c, name, x, type_hint); + } + + if (left_type == nullptr) { + left_type = t_invalid; + } + o->mode = Addressing_Value; + o->type = left_type; + o->expr = node; + return Expr_Expr; +} + +ExprKind check_or_return_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ast_node(re, OrReturnExpr, node); + + String name = re->token.string; + Operand x = {}; + check_multi_expr_with_type_hint(c, &x, re->expr, type_hint); + if (x.mode == Addressing_Invalid) { + o->mode = Addressing_Value; + o->type = t_invalid; + o->expr = node; + return Expr_Expr; + } + + Type *left_type = nullptr; + Type *right_type = nullptr; + check_or_return_split_types(c, &x, name, &left_type, &right_type); + add_type_and_value(&c->checker->info, re->expr, x.mode, x.type, x.value); + + if (right_type == nullptr) { + check_or_else_expr_no_value_error(c, name, x, type_hint); + } else { + Type *proc_type = base_type(c->curr_proc_sig); + GB_ASSERT(proc_type->kind == Type_Proc); + Type *result_type = proc_type->Proc.results; + if (result_type == nullptr) { + error(node, "'%.*s' requires the current procedure to have at least one return value", LIT(name)); + } else { + GB_ASSERT(result_type->kind == Type_Tuple); + + auto const &vars = result_type->Tuple.variables; + Type *end_type = vars[vars.count-1]->type; + + if (vars.count > 1) { + if (!proc_type->Proc.has_named_results) { + error(node, "'%.*s' within a procedure with more than 1 return value requires that the return values are named, allowing for early return", LIT(name)); + } + } + + Operand rhs = {}; + rhs.type = right_type; + rhs.mode = Addressing_Value; + + // TODO(bill): better error message + if (!check_is_assignable_to(c, &rhs, end_type)) { + gbString a = type_to_string(right_type); + gbString b = type_to_string(end_type); + gbString ret_type = type_to_string(result_type); + error(node, "Cannot assign end value of type '%s' to '%s' in '%.*s'", a, b, LIT(name)); + if (vars.count == 1) { + error_line("\tProcedure return value type: %s\n", ret_type); + } else { + error_line("\tProcedure return value types: (%s)\n", ret_type); + } + gb_string_free(ret_type); + gb_string_free(b); + gb_string_free(a); + } + } + } + + o->expr = node; + o->type = left_type; + if (left_type != nullptr) { + o->mode = Addressing_Value; + } else { + o->mode = Addressing_NoValue; + } + + if (c->curr_proc_sig == nullptr) { + error(node, "'%.*s' can only be used within a procedure", LIT(name)); + } + + if (c->in_defer) { + error(node, "'or_return' cannot be used within a defer statement"); + } + + return Expr_Expr; +} + +ExprKind check_compound_literal(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ExprKind kind = Expr_Expr; + ast_node(cl, CompoundLit, node); + + Type *type = type_hint; + if (type != nullptr && is_type_untyped(type)) { + type = nullptr; + } + bool is_to_be_determined_array_count = false; + bool is_constant = true; + if (cl->type != nullptr) { + type = nullptr; + + // [?]Type + if (cl->type->kind == Ast_ArrayType && cl->type->ArrayType.count != nullptr) { + Ast *count = cl->type->ArrayType.count; + if (count->kind == Ast_UnaryExpr && + count->UnaryExpr.op.kind == Token_Question) { + type = alloc_type_array(check_type(c, cl->type->ArrayType.elem), -1); + is_to_be_determined_array_count = true; + } + if (cl->elems.count > 0) { + if (cl->type->ArrayType.tag != nullptr) { + Ast *tag = cl->type->ArrayType.tag; + GB_ASSERT(tag->kind == Ast_BasicDirective); + String name = tag->BasicDirective.name.string; + if (name == "soa") { + error(node, "#soa arrays are not supported for compound literals"); + return kind; + } + } + } + } + if (cl->type->kind == Ast_DynamicArrayType && cl->type->DynamicArrayType.tag != nullptr) { + if (cl->elems.count > 0) { + Ast *tag = cl->type->DynamicArrayType.tag; + GB_ASSERT(tag->kind == Ast_BasicDirective); + String name = tag->BasicDirective.name.string; + if (name == "soa") { + error(node, "#soa arrays are not supported for compound literals"); + return kind; + } + } + } + + if (type == nullptr) { + type = check_type(c, cl->type); + } + } + + if (type == nullptr) { + error(node, "Missing type in compound literal"); + return kind; + } + + + Type *t = base_type(type); + if (is_type_polymorphic(t)) { + gbString str = type_to_string(type); + error(node, "Cannot use a polymorphic type for a compound literal, got '%s'", str); + o->expr = node; + o->type = type; + gb_string_free(str); + return kind; + } + + + switch (t->kind) { + case Type_Struct: { + if (cl->elems.count == 0) { + break; // NOTE(bill): No need to init + } + if (t->Struct.is_raw_union) { + if (cl->elems.count > 0) { + // NOTE: unions cannot be constant + is_constant = false; + + if (cl->elems[0]->kind != Ast_FieldValue) { + gbString type_str = type_to_string(type); + error(node, "%s ('struct #raw_union') compound literals are only allowed to contain 'field = value' elements", type_str); + gb_string_free(type_str); + } else { + if (cl->elems.count != 1) { + gbString type_str = type_to_string(type); + error(node, "%s ('struct #raw_union') compound literals are only allowed to contain up to 1 'field = value' element, got %td", type_str, cl->elems.count); + gb_string_free(type_str); + } else { + Ast *elem = cl->elems[0]; + ast_node(fv, FieldValue, elem); + if (fv->field->kind != Ast_Ident) { + gbString expr_str = expr_to_string(fv->field); + error(elem, "Invalid field name '%s' in structure literal", expr_str); + gb_string_free(expr_str); + break; + } + + String name = fv->field->Ident.token.string; + + Selection sel = lookup_field(type, name, o->mode == Addressing_Type); + bool is_unknown = sel.entity == nullptr; + if (is_unknown) { + error(elem, "Unknown field '%.*s' in structure literal", LIT(name)); + break; + } + + if (sel.index.count > 1) { + error(elem, "Cannot assign to an anonymous field '%.*s' in a structure literal (at the moment)", LIT(name)); + break; + } + + Entity *field = t->Struct.fields[sel.index[0]]; + add_entity_use(c, fv->field, field); + + Operand o = {}; + check_expr_or_type(c, &o, fv->value, field->type); + + + check_assignment(c, &o, field->type, str_lit("structure literal")); + } + + } + } + break; + } + + + isize field_count = t->Struct.fields.count; + isize min_field_count = t->Struct.fields.count; + for (isize i = min_field_count-1; i >= 0; i--) { + Entity *e = t->Struct.fields[i]; + GB_ASSERT(e->kind == Entity_Variable); + if (e->Variable.param_value.kind != ParameterValue_Invalid) { + min_field_count--; + } else { + break; + } + } + + if (cl->elems[0]->kind == Ast_FieldValue) { + bool *fields_visited = gb_alloc_array(temporary_allocator(), bool, field_count); + + for_array(i, cl->elems) { + Ast *elem = cl->elems[i]; + if (elem->kind != Ast_FieldValue) { + error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); + continue; + } + ast_node(fv, FieldValue, elem); + if (fv->field->kind != Ast_Ident) { + gbString expr_str = expr_to_string(fv->field); + error(elem, "Invalid field name '%s' in structure literal", expr_str); + gb_string_free(expr_str); + continue; + } + String name = fv->field->Ident.token.string; + + Selection sel = lookup_field(type, name, o->mode == Addressing_Type); + bool is_unknown = sel.entity == nullptr; + if (is_unknown) { + error(elem, "Unknown field '%.*s' in structure literal", LIT(name)); + continue; + } + + if (sel.index.count > 1) { + error(elem, "Cannot assign to an anonymous field '%.*s' in a structure literal (at the moment)", LIT(name)); + continue; + } + + Entity *field = t->Struct.fields[sel.index[0]]; + add_entity_use(c, fv->field, field); + + if (fields_visited[sel.index[0]]) { + error(elem, "Duplicate field '%.*s' in structure literal", LIT(name)); + continue; + } + + fields_visited[sel.index[0]] = true; + + Operand o = {}; + check_expr_or_type(c, &o, fv->value, field->type); + + if (is_type_any(field->type) || is_type_union(field->type) || is_type_raw_union(field->type) || is_type_typeid(field->type)) { + is_constant = false; + } + if (is_constant) { + is_constant = check_is_operand_compound_lit_constant(c, &o); + } + + check_assignment(c, &o, field->type, str_lit("structure literal")); + } + } else { + bool seen_field_value = false; + + for_array(index, cl->elems) { + Entity *field = nullptr; + Ast *elem = cl->elems[index]; + if (elem->kind == Ast_FieldValue) { + seen_field_value = true; + error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); + continue; + } else if (seen_field_value) { + error(elem, "Value elements cannot be used after a 'field = value'"); + continue; + } + if (index >= field_count) { + error(elem, "Too many values in structure literal, expected %td, got %td", field_count, cl->elems.count); + break; + } + + if (field == nullptr) { + field = t->Struct.fields[index]; + } + + Operand o = {}; + check_expr_or_type(c, &o, elem, field->type); + + if (is_type_any(field->type) || is_type_union(field->type) || is_type_raw_union(field->type) || is_type_typeid(field->type)) { + is_constant = false; + } + if (is_constant) { + is_constant = check_is_operand_compound_lit_constant(c, &o); + } + + check_assignment(c, &o, field->type, str_lit("structure literal")); + } + if (cl->elems.count < field_count) { + if (min_field_count < field_count) { + if (cl->elems.count < min_field_count) { + error(cl->close, "Too few values in structure literal, expected at least %td, got %td", min_field_count, cl->elems.count); + } + } else { + error(cl->close, "Too few values in structure literal, expected %td, got %td", field_count, cl->elems.count); + } + } + } + + break; + } + + case Type_Slice: + case Type_Array: + case Type_DynamicArray: + case Type_SimdVector: + case Type_Matrix: + { + Type *elem_type = nullptr; + String context_name = {}; + i64 max_type_count = -1; + if (t->kind == Type_Slice) { + elem_type = t->Slice.elem; + context_name = str_lit("slice literal"); + } else if (t->kind == Type_Array) { + elem_type = t->Array.elem; + context_name = str_lit("array literal"); + if (!is_to_be_determined_array_count) { + max_type_count = t->Array.count; + } + } else if (t->kind == Type_DynamicArray) { + elem_type = t->DynamicArray.elem; + context_name = str_lit("dynamic array literal"); + is_constant = false; + + if (!build_context.no_dynamic_literals) { + add_package_dependency(c, "runtime", "__dynamic_array_reserve"); + add_package_dependency(c, "runtime", "__dynamic_array_append"); + } + } else if (t->kind == Type_SimdVector) { + elem_type = t->SimdVector.elem; + context_name = str_lit("simd vector literal"); + max_type_count = t->SimdVector.count; + } else if (t->kind == Type_Matrix) { + elem_type = t->Matrix.elem; + context_name = str_lit("matrix literal"); + max_type_count = t->Matrix.row_count*t->Matrix.column_count; + } else { + GB_PANIC("unreachable"); + } + + + i64 max = 0; + + Type *bet = base_type(elem_type); + if (!elem_type_can_be_constant(bet)) { + is_constant = false; + } + + if (bet == t_invalid) { + break; + } + + if (cl->elems.count > 0 && cl->elems[0]->kind == Ast_FieldValue) { + if (is_type_simd_vector(t)) { + error(cl->elems[0], "'field = value' is not allowed for SIMD vector literals"); + } else { + RangeCache rc = range_cache_make(heap_allocator()); + defer (range_cache_destroy(&rc)); + + for_array(i, cl->elems) { + Ast *elem = cl->elems[i]; + if (elem->kind != Ast_FieldValue) { + error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); + continue; + } + ast_node(fv, FieldValue, elem); + + if (is_ast_range(fv->field)) { + Token op = fv->field->BinaryExpr.op; + + Operand x = {}; + Operand y = {}; + bool ok = check_range(c, fv->field, &x, &y, nullptr); + if (!ok) { + continue; + } + if (x.mode != Addressing_Constant || !is_type_integer(core_type(x.type))) { + error(x.expr, "Expected a constant integer as an array field"); + continue; + } + + if (y.mode != Addressing_Constant || !is_type_integer(core_type(y.type))) { + error(y.expr, "Expected a constant integer as an array field"); + continue; + } + + i64 lo = exact_value_to_i64(x.value); + i64 hi = exact_value_to_i64(y.value); + i64 max_index = hi; + if (op.kind == Token_RangeHalf) { // ..< (exclusive) + hi -= 1; + } else { // .. (inclusive) + max_index += 1; + } + + bool new_range = range_cache_add_range(&rc, lo, hi); + if (!new_range) { + error(elem, "Overlapping field range index %lld %.*s %lld for %.*s", lo, LIT(op.string), hi, LIT(context_name)); + continue; + } + + + if (max_type_count >= 0 && (lo < 0 || lo >= max_type_count)) { + error(elem, "Index %lld is out of bounds (0..<%lld) for %.*s", lo, max_type_count, LIT(context_name)); + continue; + } + if (max_type_count >= 0 && (hi < 0 || hi >= max_type_count)) { + error(elem, "Index %lld is out of bounds (0..<%lld) for %.*s", hi, max_type_count, LIT(context_name)); + continue; + } + + if (max < hi) { + max = max_index; + } + + Operand operand = {}; + check_expr_with_type_hint(c, &operand, fv->value, elem_type); + check_assignment(c, &operand, elem_type, context_name); + + is_constant = is_constant && operand.mode == Addressing_Constant; + } else { + Operand op_index = {}; + check_expr(c, &op_index, fv->field); + + if (op_index.mode != Addressing_Constant || !is_type_integer(core_type(op_index.type))) { + error(elem, "Expected a constant integer as an array field"); + continue; + } + // add_type_and_value(c->info, op_index.expr, op_index.mode, op_index.type, op_index.value); + + i64 index = exact_value_to_i64(op_index.value); + + if (max_type_count >= 0 && (index < 0 || index >= max_type_count)) { + error(elem, "Index %lld is out of bounds (0..<%lld) for %.*s", index, max_type_count, LIT(context_name)); + continue; + } + + bool new_index = range_cache_add_index(&rc, index); + if (!new_index) { + error(elem, "Duplicate field index %lld for %.*s", index, LIT(context_name)); + continue; + } + + if (max < index+1) { + max = index+1; + } + + Operand operand = {}; + check_expr_with_type_hint(c, &operand, fv->value, elem_type); + check_assignment(c, &operand, elem_type, context_name); + + is_constant = is_constant && operand.mode == Addressing_Constant; + } + } + + cl->max_count = max; + } + + } else { + isize index = 0; + for (; index < cl->elems.count; index++) { + Ast *e = cl->elems[index]; + if (e == nullptr) { + error(node, "Invalid literal element"); + continue; + } + + if (e->kind == Ast_FieldValue) { + error(e, "Mixture of 'field = value' and value elements in a literal is not allowed"); + continue; + } + + if (0 <= max_type_count && max_type_count <= index) { + error(e, "Index %lld is out of bounds (>= %lld) for %.*s", index, max_type_count, LIT(context_name)); + } + + Operand operand = {}; + check_expr_with_type_hint(c, &operand, e, elem_type); + check_assignment(c, &operand, elem_type, context_name); + + is_constant = is_constant && operand.mode == Addressing_Constant; + } + + if (max < index) { + max = index; + } + } + + + if (t->kind == Type_Array) { + if (is_to_be_determined_array_count) { + t->Array.count = max; + } else if (cl->elems.count > 0 && cl->elems[0]->kind != Ast_FieldValue) { + if (0 < max && max < t->Array.count) { + error(node, "Expected %lld values for this array literal, got %lld", cast(long long)t->Array.count, cast(long long)max); + } + } + } + + + if (t->kind == Type_SimdVector) { + if (!is_constant) { + error(node, "Expected all constant elements for a simd vector"); + } + } + + + if (t->kind == Type_DynamicArray) { + if (build_context.no_dynamic_literals && cl->elems.count) { + error(node, "Compound literals of dynamic types have been disabled"); + } + } + + if (t->kind == Type_Matrix) { + if (cl->elems.count > 0 && cl->elems[0]->kind != Ast_FieldValue) { + if (0 < max && max < max_type_count) { + error(node, "Expected %lld values for this matrix literal, got %lld", cast(long long)max_type_count, cast(long long)max); + } + } + } + + break; + } + + case Type_EnumeratedArray: + { + Type *elem_type = t->EnumeratedArray.elem; + Type *index_type = t->EnumeratedArray.index; + String context_name = str_lit("enumerated array literal"); + i64 max_type_count = t->EnumeratedArray.count; + + gbString index_type_str = type_to_string(index_type); + defer (gb_string_free(index_type_str)); + + i64 total_lo = exact_value_to_i64(*t->EnumeratedArray.min_value); + i64 total_hi = exact_value_to_i64(*t->EnumeratedArray.max_value); + + String total_lo_string = {}; + String total_hi_string = {}; + GB_ASSERT(is_type_enum(index_type)); + { + Type *bt = base_type(index_type); + GB_ASSERT(bt->kind == Type_Enum); + for_array(i, bt->Enum.fields) { + Entity *f = bt->Enum.fields[i]; + if (f->kind != Entity_Constant) { + continue; + } + if (total_lo_string.len == 0 && compare_exact_values(Token_CmpEq, f->Constant.value, *t->EnumeratedArray.min_value)) { + total_lo_string = f->token.string; + } + if (total_hi_string.len == 0 && compare_exact_values(Token_CmpEq, f->Constant.value, *t->EnumeratedArray.max_value)) { + total_hi_string = f->token.string; + } + if (total_lo_string.len != 0 && total_hi_string.len != 0) { + break; + } + } + } + + i64 max = 0; + + Type *bet = base_type(elem_type); + if (!elem_type_can_be_constant(bet)) { + is_constant = false; + } + + if (bet == t_invalid) { + break; + } + bool is_partial = cl->tag && (cl->tag->BasicDirective.name.string == "partial"); + + SeenMap seen = {}; // NOTE(bill): Multimap, Key: ExactValue + map_init(&seen, heap_allocator()); + defer (map_destroy(&seen)); + + if (cl->elems.count > 0 && cl->elems[0]->kind == Ast_FieldValue) { + RangeCache rc = range_cache_make(heap_allocator()); + defer (range_cache_destroy(&rc)); + + for_array(i, cl->elems) { + Ast *elem = cl->elems[i]; + if (elem->kind != Ast_FieldValue) { + error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); + continue; + } + ast_node(fv, FieldValue, elem); + + if (is_ast_range(fv->field)) { + Token op = fv->field->BinaryExpr.op; + + Operand x = {}; + Operand y = {}; + bool ok = check_range(c, fv->field, &x, &y, nullptr, index_type); + if (!ok) { + continue; + } + if (x.mode != Addressing_Constant || !are_types_identical(x.type, index_type)) { + error(x.expr, "Expected a constant enum of type '%s' as an array field", index_type_str); + continue; + } + + if (y.mode != Addressing_Constant || !are_types_identical(x.type, index_type)) { + error(y.expr, "Expected a constant enum of type '%s' as an array field", index_type_str); + continue; + } + + i64 lo = exact_value_to_i64(x.value); + i64 hi = exact_value_to_i64(y.value); + i64 max_index = hi; + if (op.kind == Token_RangeHalf) { + hi -= 1; + } + + bool new_range = range_cache_add_range(&rc, lo, hi); + if (!new_range) { + gbString lo_str = expr_to_string(x.expr); + gbString hi_str = expr_to_string(y.expr); + error(elem, "Overlapping field range index %s %.*s %s for %.*s", lo_str, LIT(op.string), hi_str, LIT(context_name)); + gb_string_free(hi_str); + gb_string_free(lo_str); + continue; + } + + + // NOTE(bill): These are sanity checks for invalid enum values + if (max_type_count >= 0 && (lo < total_lo || lo > total_hi)) { + gbString lo_str = expr_to_string(x.expr); + error(elem, "Index %s is out of bounds (%.*s .. %.*s) for %.*s", lo_str, LIT(total_lo_string), LIT(total_hi_string), LIT(context_name)); + gb_string_free(lo_str); + continue; + } + if (max_type_count >= 0 && (hi < 0 || hi > total_hi)) { + gbString hi_str = expr_to_string(y.expr); + error(elem, "Index %s is out of bounds (%.*s .. %.*s) for %.*s", hi_str, LIT(total_lo_string), LIT(total_hi_string), LIT(context_name)); + gb_string_free(hi_str); + continue; + } + + if (max < hi) { + max = max_index; + } + + Operand operand = {}; + check_expr_with_type_hint(c, &operand, fv->value, elem_type); + check_assignment(c, &operand, elem_type, context_name); + + is_constant = is_constant && operand.mode == Addressing_Constant; + + TokenKind upper_op = Token_LtEq; + if (op.kind == Token_RangeHalf) { + upper_op = Token_Lt; + } + add_to_seen_map(c, &seen, upper_op, x, x, y); + } else { + Operand op_index = {}; + check_expr_with_type_hint(c, &op_index, fv->field, index_type); + + if (op_index.mode != Addressing_Constant || !are_types_identical(op_index.type, index_type)) { + error(op_index.expr, "Expected a constant enum of type '%s' as an array field", index_type_str); + continue; + } + + i64 index = exact_value_to_i64(op_index.value); + + if (max_type_count >= 0 && (index < total_lo || index > total_hi)) { + gbString idx_str = expr_to_string(op_index.expr); + error(elem, "Index %s is out of bounds (%.*s .. %.*s) for %.*s", idx_str, LIT(total_lo_string), LIT(total_hi_string), LIT(context_name)); + gb_string_free(idx_str); + continue; + } + + bool new_index = range_cache_add_index(&rc, index); + if (!new_index) { + gbString idx_str = expr_to_string(op_index.expr); + error(elem, "Duplicate field index %s for %.*s", idx_str, LIT(context_name)); + gb_string_free(idx_str); + continue; + } + + if (max < index+1) { + max = index+1; + } + + Operand operand = {}; + check_expr_with_type_hint(c, &operand, fv->value, elem_type); + check_assignment(c, &operand, elem_type, context_name); + + is_constant = is_constant && operand.mode == Addressing_Constant; + + add_to_seen_map(c, &seen, op_index); + } + } + + cl->max_count = max; + + } else { + isize index = 0; + for (; index < cl->elems.count; index++) { + Ast *e = cl->elems[index]; + if (e == nullptr) { + error(node, "Invalid literal element"); + continue; + } + + if (e->kind == Ast_FieldValue) { + error(e, "Mixture of 'field = value' and value elements in a literal is not allowed"); + continue; + } + + if (0 <= max_type_count && max_type_count <= index) { + error(e, "Index %lld is out of bounds (>= %lld) for %.*s", index, max_type_count, LIT(context_name)); + } + + Operand operand = {}; + check_expr_with_type_hint(c, &operand, e, elem_type); + check_assignment(c, &operand, elem_type, context_name); + + is_constant = is_constant && operand.mode == Addressing_Constant; + } + + if (max < index) { + max = index; + } + } + + bool was_error = false; + if (cl->elems.count > 0 && cl->elems[0]->kind != Ast_FieldValue) { + if (0 < max && max < t->EnumeratedArray.count) { + error(node, "Expected %lld values for this enumerated array literal, got %lld", cast(long long)t->EnumeratedArray.count, cast(long long)max); + was_error = true; + } else { + error(node, "Enumerated array literals must only have 'field = value' elements, bare elements are not allowed"); + was_error = true; + } + } + + // NOTE(bill): Check for missing cases when `#partial literal` is not present + if (cl->elems.count > 0 && !was_error && !is_partial) { + Type *et = base_type(index_type); + GB_ASSERT(et->kind == Type_Enum); + auto fields = et->Enum.fields; + + auto unhandled = array_make(temporary_allocator(), 0, fields.count); + + for_array(i, fields) { + Entity *f = fields[i]; + if (f->kind != Entity_Constant) { + continue; + } + ExactValue v = f->Constant.value; + auto found = map_get(&seen, hash_exact_value(v)); + if (!found) { + array_add(&unhandled, f); + } + } + + if (unhandled.count > 0) { + begin_error_block(); + defer (end_error_block()); + + if (unhandled.count == 1) { + error_no_newline(node, "Unhandled enumerated array case: %.*s", LIT(unhandled[0]->token.string)); + } else { + error(node, "Unhandled enumerated array cases:"); + for_array(i, unhandled) { + Entity *f = unhandled[i]; + error_line("\t%.*s\n", LIT(f->token.string)); + } + } + error_line("\n"); + + error_line("\tSuggestion: Was '#partial %s{...}' wanted?\n", type_to_string(type)); + } + } + + break; + } + + case Type_Basic: { + if (!is_type_any(t)) { + if (cl->elems.count != 0) { + error(node, "Illegal compound literal"); + } + break; + } + if (cl->elems.count == 0) { + break; // NOTE(bill): No need to init + } + { // Checker values + Type *field_types[2] = {t_rawptr, t_typeid}; + isize field_count = 2; + if (cl->elems[0]->kind == Ast_FieldValue) { + bool fields_visited[2] = {}; + + for_array(i, cl->elems) { + Ast *elem = cl->elems[i]; + if (elem->kind != Ast_FieldValue) { + error(elem, "Mixture of 'field = value' and value elements in a 'any' literal is not allowed"); + continue; + } + ast_node(fv, FieldValue, elem); + if (fv->field->kind != Ast_Ident) { + gbString expr_str = expr_to_string(fv->field); + error(elem, "Invalid field name '%s' in 'any' literal", expr_str); + gb_string_free(expr_str); + continue; + } + String name = fv->field->Ident.token.string; + + Selection sel = lookup_field(type, name, o->mode == Addressing_Type); + if (sel.entity == nullptr) { + error(elem, "Unknown field '%.*s' in 'any' literal", LIT(name)); + continue; + } + + isize index = sel.index[0]; + + if (fields_visited[index]) { + error(elem, "Duplicate field '%.*s' in 'any' literal", LIT(name)); + continue; + } + + fields_visited[index] = true; + check_expr(c, o, fv->value); + + // NOTE(bill): 'any' literals can never be constant + is_constant = false; + + check_assignment(c, o, field_types[index], str_lit("'any' literal")); + } + } else { + for_array(index, cl->elems) { + Ast *elem = cl->elems[index]; + if (elem->kind == Ast_FieldValue) { + error(elem, "Mixture of 'field = value' and value elements in a 'any' literal is not allowed"); + continue; + } + + + check_expr(c, o, elem); + if (index >= field_count) { + error(o->expr, "Too many values in 'any' literal, expected %td", field_count); + break; + } + + // NOTE(bill): 'any' literals can never be constant + is_constant = false; + + check_assignment(c, o, field_types[index], str_lit("'any' literal")); + } + if (cl->elems.count < field_count) { + error(cl->close, "Too few values in 'any' literal, expected %td, got %td", field_count, cl->elems.count); + } + } + } + + break; + } + + case Type_Map: { + if (cl->elems.count == 0) { + break; + } + is_constant = false; + { // Checker values + bool key_is_typeid = is_type_typeid(t->Map.key); + bool value_is_typeid = is_type_typeid(t->Map.value); + + for_array(i, cl->elems) { + Ast *elem = cl->elems[i]; + if (elem->kind != Ast_FieldValue) { + error(elem, "Only 'field = value' elements are allowed in a map literal"); + continue; + } + ast_node(fv, FieldValue, elem); + + if (key_is_typeid) { + check_expr_or_type(c, o, fv->field, t->Map.key); + } else { + check_expr_with_type_hint(c, o, fv->field, t->Map.key); + } + check_assignment(c, o, t->Map.key, str_lit("map literal")); + if (o->mode == Addressing_Invalid) { + continue; + } + + if (value_is_typeid) { + check_expr_or_type(c, o, fv->value, t->Map.value); + } else { + check_expr_with_type_hint(c, o, fv->value, t->Map.value); + } + check_assignment(c, o, t->Map.value, str_lit("map literal")); + } + } + + if (build_context.no_dynamic_literals && cl->elems.count) { + error(node, "Compound literals of dynamic types have been disabled"); + } else { + add_package_dependency(c, "runtime", "__dynamic_map_reserve"); + add_package_dependency(c, "runtime", "__dynamic_map_set"); + } + break; + } + + case Type_BitSet: { + if (cl->elems.count == 0) { + break; // NOTE(bill): No need to init + } + Type *et = base_type(t->BitSet.elem); + isize field_count = 0; + if (et->kind == Type_Enum) { + field_count = et->Enum.fields.count; + } + + if (cl->elems[0]->kind == Ast_FieldValue) { + error(cl->elems[0], "'field = value' in a bit_set a literal is not allowed"); + is_constant = false; + } else { + for_array(index, cl->elems) { + Ast *elem = cl->elems[index]; + if (elem->kind == Ast_FieldValue) { + error(elem, "'field = value' in a bit_set a literal is not allowed"); + continue; + } + + check_expr_with_type_hint(c, o, elem, et); + + if (is_constant) { + is_constant = o->mode == Addressing_Constant; + } + + check_assignment(c, o, t->BitSet.elem, str_lit("bit_set literal")); + if (o->mode == Addressing_Constant) { + i64 lower = t->BitSet.lower; + i64 upper = t->BitSet.upper; + i64 v = exact_value_to_i64(o->value); + if (lower <= v && v <= upper) { + // okay + } else { + error(elem, "Bit field value out of bounds, %lld not in the range %lld .. %lld", v, lower, upper); + continue; + } + } + } + } + break; + } + + default: { + if (cl->elems.count == 0) { + break; // NOTE(bill): No need to init + } + + gbString str = type_to_string(type); + error(node, "Invalid compound literal type '%s'", str); + gb_string_free(str); + return kind; + } + } + + if (is_constant) { + o->mode = Addressing_Constant; + + if (is_type_bit_set(type)) { + // NOTE(bill): Encode as an integer + + i64 lower = base_type(type)->BitSet.lower; + + u64 bits = 0; + for_array(index, cl->elems) { + Ast *elem = cl->elems[index]; + GB_ASSERT(elem->kind != Ast_FieldValue); + TypeAndValue tav = elem->tav; + ExactValue i = exact_value_to_integer(tav.value); + if (i.kind != ExactValue_Integer) { + continue; + } + i64 val = big_int_to_i64(&i.value_integer); + val -= lower; + u64 bit = u64(1ll<value = exact_value_u64(bits); + } else if (is_type_constant_type(type) && cl->elems.count == 0) { + ExactValue value = exact_value_compound(node); + Type *bt = core_type(type); + if (bt->kind == Type_Basic) { + if (bt->Basic.flags & BasicFlag_Boolean) { + value = exact_value_bool(false); + } else if (bt->Basic.flags & BasicFlag_Integer) { + value = exact_value_i64(0); + } else if (bt->Basic.flags & BasicFlag_Unsigned) { + value = exact_value_i64(0); + } else if (bt->Basic.flags & BasicFlag_Float) { + value = exact_value_float(0); + } else if (bt->Basic.flags & BasicFlag_Complex) { + value = exact_value_complex(0, 0); + } else if (bt->Basic.flags & BasicFlag_Quaternion) { + value = exact_value_quaternion(0, 0, 0, 0); + } else if (bt->Basic.flags & BasicFlag_Pointer) { + value = exact_value_pointer(0); + } else if (bt->Basic.flags & BasicFlag_String) { + String empty_string = {}; + value = exact_value_string(empty_string); + } else if (bt->Basic.flags & BasicFlag_Rune) { + value = exact_value_i64(0); + } + } + + o->value = value; + } else { + o->value = exact_value_compound(node); + } + } else { + o->mode = Addressing_Value; + } + o->type = type; + return kind; +} + +ExprKind check_type_assertion(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ExprKind kind = Expr_Expr; + ast_node(ta, TypeAssertion, node); + check_expr(c, o, ta->expr); + node->viral_state_flags |= ta->expr->viral_state_flags; + + if (o->mode == Addressing_Invalid) { + o->expr = node; + return kind; + } + if (o->mode == Addressing_Constant) { + gbString expr_str = expr_to_string(o->expr); + error(o->expr, "A type assertion cannot be applied to a constant expression: '%s'", expr_str); + gb_string_free(expr_str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + if (is_type_untyped(o->type)) { + gbString expr_str = expr_to_string(o->expr); + error(o->expr, "A type assertion cannot be applied to an untyped expression: '%s'", expr_str); + gb_string_free(expr_str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + Type *src = type_deref(o->type); + Type *bsrc = base_type(src); + + + if (ta->type != nullptr && ta->type->kind == Ast_UnaryExpr && ta->type->UnaryExpr.op.kind == Token_Question) { + if (!is_type_union(src)) { + gbString str = type_to_string(o->type); + error(o->expr, "Type assertions with .? can only operate on unions, got %s", str); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + if (bsrc->Union.variants.count != 1 && type_hint != nullptr) { + bool allowed = false; + for_array(i, bsrc->Union.variants) { + Type *vt = bsrc->Union.variants[i]; + if (are_types_identical(vt, type_hint)) { + allowed = true; + add_type_info_type(c, vt); + break; + } + } + if (allowed) { + add_type_info_type(c, o->type); + o->type = type_hint; + o->mode = Addressing_OptionalOk; + return kind; + } + } + + if (bsrc->Union.variants.count != 1) { + error(o->expr, "Type assertions with .? can only operate on unions with 1 variant, got %lld", cast(long long)bsrc->Union.variants.count); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + add_type_info_type(c, o->type); + add_type_info_type(c, bsrc->Union.variants[0]); + + o->type = bsrc->Union.variants[0]; + o->mode = Addressing_OptionalOk; + } else { + Type *t = check_type(c, ta->type); + Type *dst = t; + + if (is_type_union(src)) { + bool ok = false; + for_array(i, bsrc->Union.variants) { + Type *vt = bsrc->Union.variants[i]; + if (are_types_identical(vt, dst)) { + ok = true; + break; + } + } + + if (!ok) { + gbString expr_str = expr_to_string(o->expr); + gbString dst_type_str = type_to_string(t); + defer (gb_string_free(expr_str)); + defer (gb_string_free(dst_type_str)); + if (bsrc->Union.variants.count == 0) { + error(o->expr, "Cannot type assert '%s' to '%s' as this is an empty union", expr_str, dst_type_str); + } else { + error(o->expr, "Cannot type assert '%s' to '%s' as it is not a variant of that union", expr_str, dst_type_str); + } + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + add_type_info_type(c, o->type); + add_type_info_type(c, t); + + o->type = t; + o->mode = Addressing_OptionalOk; + } else if (is_type_any(src)) { + o->type = t; + o->mode = Addressing_OptionalOk; + + add_type_info_type(c, o->type); + add_type_info_type(c, t); + } else { + gbString str = type_to_string(o->type); + error(o->expr, "Type assertions can only operate on unions and 'any', got %s", str); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + } + + if ((c->state_flags & StateFlag_no_type_assert) == 0) { + add_package_dependency(c, "runtime", "type_assertion_check"); + add_package_dependency(c, "runtime", "type_assertion_check2"); + } + return kind; +} + +ExprKind check_selector_call_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ast_node(se, SelectorCallExpr, node); + // IMPORTANT NOTE(bill, 2020-05-22): This is a complete hack to get a shorthand which is extremely useful for vtables + // COM APIs is a great example of where this kind of thing is extremely useful + // General idea: + // + // x->y(123) == x.y(x, 123) + // + // How this has been implemented at the moment is quite hacky but it's done so to reduce need for huge backend changes + // Just regenerating a new AST aids things + // + // TODO(bill): Is this a good hack or not? + // + // NOTE(bill, 2020-05-22): I'm going to regret this decision, ain't I? + + + if (se->modified_call) { + // Prevent double evaluation + o->expr = node; + o->type = node->tav.type; + o->value = node->tav.value; + o->mode = node->tav.mode; + return Expr_Expr; + } + + bool allow_arrow_right_selector_expr; + allow_arrow_right_selector_expr = c->allow_arrow_right_selector_expr; + c->allow_arrow_right_selector_expr = true; + Operand x = {}; + ExprKind kind = check_expr_base(c, &x, se->expr, nullptr); + c->allow_arrow_right_selector_expr = allow_arrow_right_selector_expr; + + if (x.mode == Addressing_Invalid || x.type == t_invalid) { + o->mode = Addressing_Invalid; + o->type = t_invalid; + o->expr = node; + return kind; + } + if (!is_type_proc(x.type)) { + gbString type_str = type_to_string(x.type); + error(se->call, "Selector call expressions expect a procedure type for the call, got '%s'", type_str); + gb_string_free(type_str); + + o->mode = Addressing_Invalid; + o->type = t_invalid; + o->expr = node; + return Expr_Stmt; + } + + ast_node(ce, CallExpr, se->call); + + GB_ASSERT(x.expr->kind == Ast_SelectorExpr); + + Ast *first_arg = x.expr->SelectorExpr.expr; + GB_ASSERT(first_arg != nullptr); + + Type *pt = base_type(x.type); + GB_ASSERT(pt->kind == Type_Proc); + Type *first_type = nullptr; + String first_arg_name = {}; + if (pt->Proc.param_count > 0) { + Entity *f = pt->Proc.params->Tuple.variables[0]; + first_type = f->type; + first_arg_name = f->token.string; + } + if (first_arg_name.len == 0) { + first_arg_name = str_lit("_"); + } + + if (first_type == nullptr) { + error(se->call, "Selector call expressions expect a procedure type for the call with at least 1 parameter"); + o->mode = Addressing_Invalid; + o->type = t_invalid; + o->expr = node; + return Expr_Stmt; + } + + Operand y = {}; + y.mode = first_arg->tav.mode; + y.type = first_arg->tav.type; + y.value = first_arg->tav.value; + if (check_is_assignable_to(c, &y, first_type)) { + // Do nothing, it's valid + } else { + Operand z = y; + z.type = type_deref(y.type); + if (check_is_assignable_to(c, &z, first_type)) { + // NOTE(bill): AST GENERATION HACK! + Token op = {Token_Pointer}; + first_arg = ast_deref_expr(first_arg->file(), first_arg, op); + } else if (y.mode == Addressing_Variable) { + Operand w = y; + w.type = alloc_type_pointer(y.type); + if (check_is_assignable_to(c, &w, first_type)) { + // NOTE(bill): AST GENERATION HACK! + Token op = {Token_And}; + first_arg = ast_unary_expr(first_arg->file(), op, first_arg); + } + } + } + + if (ce->args.count > 0) { + bool fail = false; + bool first_is_field_value = (ce->args[0]->kind == Ast_FieldValue); + for_array(i, ce->args) { + Ast *arg = ce->args[i]; + bool mix = false; + if (first_is_field_value) { + mix = arg->kind != Ast_FieldValue; + } else { + mix = arg->kind == Ast_FieldValue; + } + if (mix) { + fail = true; + break; + } + } + if (!fail && first_is_field_value) { + Token op = {Token_Eq}; + AstFile *f = first_arg->file(); + first_arg = ast_field_value(f, ast_ident(f, make_token_ident(first_arg_name)), first_arg, op); + } + } + + + + auto modified_args = slice_make(heap_allocator(), ce->args.count+1); + modified_args[0] = first_arg; + slice_copy(&modified_args, ce->args, 1); + ce->args = modified_args; + se->modified_call = true; + + allow_arrow_right_selector_expr = c->allow_arrow_right_selector_expr; + c->allow_arrow_right_selector_expr = true; + check_expr_base(c, o, se->call, type_hint); + c->allow_arrow_right_selector_expr = allow_arrow_right_selector_expr; + + o->expr = node; + return Expr_Expr; +} + + +ExprKind check_index_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ExprKind kind = Expr_Expr; + ast_node(ie, IndexExpr, node); + check_expr(c, o, ie->expr); + node->viral_state_flags |= ie->expr->viral_state_flags; + if (o->mode == Addressing_Invalid) { + o->expr = node; + return kind; + } + + Type *t = base_type(type_deref(o->type)); + bool is_ptr = is_type_pointer(o->type); + bool is_const = o->mode == Addressing_Constant; + + if (is_type_map(t)) { + Operand key = {}; + if (is_type_typeid(t->Map.key)) { + check_expr_or_type(c, &key, ie->index, t->Map.key); + } else { + check_expr_with_type_hint(c, &key, ie->index, t->Map.key); + } + check_assignment(c, &key, t->Map.key, str_lit("map index")); + if (key.mode == Addressing_Invalid) { + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + o->mode = Addressing_MapIndex; + o->type = t->Map.value; + o->expr = node; + + add_package_dependency(c, "runtime", "__dynamic_map_get"); + add_package_dependency(c, "runtime", "__dynamic_map_set"); + return Expr_Expr; + } + + i64 max_count = -1; + bool valid = check_set_index_data(o, t, is_ptr, &max_count, o->type); + + if (is_const) { + if (is_type_array(t)) { + // OKay + } else if (is_type_slice(t)) { + // Okay + } else if (is_type_enumerated_array(t)) { + // Okay + } else if (is_type_string(t)) { + // Okay + } else if (is_type_relative_slice(t)) { + // Okay + } else if (is_type_matrix(t)) { + // Okay + } else { + valid = false; + } + } + + if (!valid) { + gbString str = expr_to_string(o->expr); + gbString type_str = type_to_string(o->type); + defer (gb_string_free(str)); + defer (gb_string_free(type_str)); + if (is_const) { + error(o->expr, "Cannot index constant '%s' of type '%s'", str, type_str); + } else { + error(o->expr, "Cannot index '%s' of type '%s'", str, type_str); + } + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + if (ie->index == nullptr) { + gbString str = expr_to_string(o->expr); + error(o->expr, "Missing index for '%s'", str); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + Type *index_type_hint = nullptr; + if (is_type_enumerated_array(t)) { + Type *bt = base_type(t); + GB_ASSERT(bt->kind == Type_EnumeratedArray); + index_type_hint = bt->EnumeratedArray.index; + } + + i64 index = 0; + bool ok = check_index_value(c, t, false, ie->index, max_count, &index, index_type_hint); + if (is_const) { + if (index < 0) { + gbString str = expr_to_string(o->expr); + error(o->expr, "Cannot index a constant '%s'", str); + error_line("\tSuggestion: store the constant into a variable in order to index it with a variable index\n"); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } else if (ok) { + ExactValue value = type_and_value_of_expr(ie->expr).value; + o->mode = Addressing_Constant; + bool success = false; + bool finish = false; + o->value = get_constant_field_single(c, value, cast(i32)index, &success, &finish); + if (!success) { + gbString str = expr_to_string(o->expr); + error(o->expr, "Cannot index a constant '%s' with index %lld", str, cast(long long)index); + error_line("\tSuggestion: store the constant into a variable in order to index it with a variable index\n"); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + } + } + + if (type_hint != nullptr && is_type_matrix(t)) { + // TODO(bill): allow matrix columns to be assignable to other types which are the same internally + // if a type hint exists + } + return kind; +} + +ExprKind check_slice_expr(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { + ExprKind kind = Expr_Stmt; + ast_node(se, SliceExpr, node); + check_expr(c, o, se->expr); + node->viral_state_flags |= se->expr->viral_state_flags; + + if (o->mode == Addressing_Invalid) { + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + bool valid = false; + i64 max_count = -1; + Type *t = base_type(type_deref(o->type)); + switch (t->kind) { + case Type_Basic: + if (t->Basic.kind == Basic_string || t->Basic.kind == Basic_UntypedString) { + valid = true; + if (o->mode == Addressing_Constant) { + max_count = o->value.value_string.len; + } + o->type = type_deref(o->type); + } + break; + + case Type_Array: + valid = true; + max_count = t->Array.count; + if (o->mode != Addressing_Variable && !is_type_pointer(o->type)) { + gbString str = expr_to_string(node); + error(node, "Cannot slice array '%s', value is not addressable", str); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + o->type = alloc_type_slice(t->Array.elem); + break; + + case Type_MultiPointer: + valid = true; + o->type = type_deref(o->type); + break; + + case Type_Slice: + valid = true; + o->type = type_deref(o->type); + break; + + case Type_DynamicArray: + valid = true; + o->type = alloc_type_slice(t->DynamicArray.elem); + break; + + case Type_Struct: + if (is_type_soa_struct(t)) { + valid = true; + o->type = make_soa_struct_slice(c, nullptr, nullptr, t->Struct.soa_elem); + } + break; + + case Type_RelativeSlice: + valid = true; + o->type = t->RelativeSlice.slice_type; + if (o->mode != Addressing_Variable) { + gbString str = expr_to_string(node); + error(node, "Cannot relative slice '%s', value is not addressable", str); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + break; + } + + if (!valid) { + gbString str = expr_to_string(o->expr); + gbString type_str = type_to_string(o->type); + error(o->expr, "Cannot slice '%s' of type '%s'", str, type_str); + gb_string_free(type_str); + gb_string_free(str); + o->mode = Addressing_Invalid; + o->expr = node; + return kind; + } + + if (se->low == nullptr && se->high != nullptr) { + // It is okay to continue as it will assume the 1st index is zero + } + + i64 indices[2] = {}; + Ast *nodes[2] = {se->low, se->high}; + for (isize i = 0; i < gb_count_of(nodes); i++) { + i64 index = max_count; + if (nodes[i] != nullptr) { + i64 capacity = -1; + if (max_count >= 0) { + capacity = max_count; + } + i64 j = 0; + if (check_index_value(c, t, true, nodes[i], capacity, &j)) { + index = j; + } + + node->viral_state_flags |= nodes[i]->viral_state_flags; + } else if (i == 0) { + index = 0; + } + indices[i] = index; + } + + for (isize i = 0; i < gb_count_of(indices); i++) { + i64 a = indices[i]; + for (isize j = i+1; j < gb_count_of(indices); j++) { + i64 b = indices[j]; + if (a > b && b >= 0) { + error(se->close, "Invalid slice indices: [%td > %td]", a, b); + } + } + } + + if (max_count < 0) { + if (o->mode == Addressing_Constant) { + gbString s = expr_to_string(se->expr); + error(se->expr, "Cannot slice constant value '%s'", s); + gb_string_free(s); + } + } + + if (t->kind == Type_MultiPointer && se->high != nullptr) { + /* + x[:] -> [^]T + x[i:] -> [^]T + x[:n] -> []T + x[i:n] -> []T + */ + o->type = alloc_type_slice(t->MultiPointer.elem); + } + + o->mode = Addressing_Value; + + if (is_type_string(t) && max_count >= 0) { + bool all_constant = true; + for (isize i = 0; i < gb_count_of(nodes); i++) { + if (nodes[i] != nullptr) { + TypeAndValue tav = type_and_value_of_expr(nodes[i]); + if (tav.mode != Addressing_Constant) { + all_constant = false; + break; + } + } + } + if (!all_constant) { + gbString str = expr_to_string(o->expr); + error(o->expr, "Cannot slice '%s' with non-constant indices", str); + error_line("\tSuggestion: store the constant into a variable in order to index it with a variable index\n"); + gb_string_free(str); + o->mode = Addressing_Value; // NOTE(bill): Keep subsequent values going without erring + o->expr = node; + return kind; + } + + String s = {}; + if (o->value.kind == ExactValue_String) { + s = o->value.value_string; + } + + o->mode = Addressing_Constant; + o->type = t; + o->value = exact_value_string(substring(s, cast(isize)indices[0], cast(isize)indices[1])); + } + return kind; +} + ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { u32 prev_state_flags = c->state_flags; defer (c->state_flags = prev_state_flags); @@ -6875,6 +8912,14 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type out &= ~StateFlag_no_bounds_check; } + if (in & StateFlag_no_type_assert) { + out |= StateFlag_no_type_assert; + out &= ~StateFlag_type_assert; + } else if (in & StateFlag_type_assert) { + out |= StateFlag_type_assert; + out &= ~StateFlag_no_type_assert; + } + c->state_flags = out; } @@ -6954,52 +8999,7 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type case_end; case_ast_node(bd, BasicDirective, node); - o->mode = Addressing_Constant; - String name = bd->name.string; - if (name == "file") { - o->type = t_untyped_string; - o->value = exact_value_string(get_file_path_string(bd->token.pos.file_id)); - } else if (name == "line") { - o->type = t_untyped_integer; - o->value = exact_value_i64(bd->token.pos.line); - } else if (name == "procedure") { - if (c->curr_proc_decl == nullptr) { - error(node, "#procedure may only be used within procedures"); - o->type = t_untyped_string; - o->value = exact_value_string(str_lit("")); - } else { - o->type = t_untyped_string; - o->value = exact_value_string(c->proc_name); - } - } else if (name == "caller_location") { - init_core_source_code_location(c->checker); - error(node, "#caller_location may only be used as a default argument parameter"); - o->type = t_source_code_location; - o->mode = Addressing_Value; - } else { - if (name == "location") { - init_core_source_code_location(c->checker); - error(node, "'#%.*s' must be used in a call expression", LIT(name)); - o->type = t_source_code_location; - o->mode = Addressing_Value; - } else if ( - name == "assert" || - name == "defined" || - name == "config" || - name == "load" || - name == "load_hash" || - name == "load_or" - ) { - error(node, "'#%.*s' must be used as a call", LIT(name)); - o->type = t_invalid; - o->mode = Addressing_Invalid; - } else { - error(node, "Unknown directive: #%.*s", LIT(name)); - o->type = t_invalid; - o->mode = Addressing_Invalid; - } - - } + kind = check_basic_directive_expr(c, o, node, type_hint); case_end; case_ast_node(pg, ProcGroup, node); @@ -7048,1110 +9048,23 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type case_end; case_ast_node(te, TernaryIfExpr, node); - Operand cond = {Addressing_Invalid}; - check_expr(c, &cond, te->cond); - node->viral_state_flags |= te->cond->viral_state_flags; - - if (cond.mode != Addressing_Invalid && !is_type_boolean(cond.type)) { - error(te->cond, "Non-boolean condition in ternary if expression"); - } - - Operand x = {Addressing_Invalid}; - Operand y = {Addressing_Invalid}; - check_expr_or_type(c, &x, te->x, type_hint); - node->viral_state_flags |= te->x->viral_state_flags; - - if (te->y != nullptr) { - check_expr_or_type(c, &y, te->y, type_hint); - node->viral_state_flags |= te->y->viral_state_flags; - } else { - error(node, "A ternary expression must have an else clause"); - return kind; - } - - if (x.type == nullptr || x.type == t_invalid || - y.type == nullptr || y.type == t_invalid) { - return kind; - } - - convert_to_typed(c, &x, y.type); - if (x.mode == Addressing_Invalid) { - return kind; - } - convert_to_typed(c, &y, x.type); - if (y.mode == Addressing_Invalid) { - x.mode = Addressing_Invalid; - return kind; - } - - if (!ternary_compare_types(x.type, y.type)) { - gbString its = type_to_string(x.type); - gbString ets = type_to_string(y.type); - error(node, "Mismatched types in ternary if expression, %s vs %s", its, ets); - gb_string_free(ets); - gb_string_free(its); - return kind; - } - - o->type = x.type; - if (is_type_untyped_nil(o->type) || is_type_untyped_undef(o->type)) { - o->type = y.type; - } - - o->mode = Addressing_Value; - o->expr = node; - if (type_hint != nullptr && is_type_untyped(o->type)) { - if (check_cast_internal(c, &x, type_hint) && - check_cast_internal(c, &y, type_hint)) { - convert_to_typed(c, o, type_hint); - update_untyped_expr_type(c, node, type_hint, !is_type_untyped(type_hint)); - } - } + kind = check_ternary_if_expr(c, o, node, type_hint); case_end; case_ast_node(te, TernaryWhenExpr, node); - Operand cond = {}; - check_expr(c, &cond, te->cond); - node->viral_state_flags |= te->cond->viral_state_flags; - - if (cond.mode != Addressing_Constant || !is_type_boolean(cond.type)) { - error(te->cond, "Expected a constant boolean condition in ternary when expression"); - return kind; - } - - if (cond.value.value_bool) { - check_expr_or_type(c, o, te->x, type_hint); - node->viral_state_flags |= te->x->viral_state_flags; - } else { - if (te->y != nullptr) { - check_expr_or_type(c, o, te->y, type_hint); - node->viral_state_flags |= te->y->viral_state_flags; - } else { - error(node, "A ternary when expression must have an else clause"); - return kind; - } - } + kind = check_ternary_when_expr(c, o, node, type_hint); case_end; case_ast_node(oe, OrElseExpr, node); - String name = oe->token.string; - Ast *arg = oe->x; - Ast *default_value = oe->y; - - Operand x = {}; - Operand y = {}; - check_multi_expr_with_type_hint(c, &x, arg, type_hint); - if (x.mode == Addressing_Invalid) { - o->mode = Addressing_Value; - o->type = t_invalid; - o->expr = node; - return Expr_Expr; - } - - check_multi_expr_with_type_hint(c, &y, default_value, x.type); - error_operand_no_value(&y); - if (y.mode == Addressing_Invalid) { - o->mode = Addressing_Value; - o->type = t_invalid; - o->expr = node; - return Expr_Expr; - } - - Type *left_type = nullptr; - Type *right_type = nullptr; - check_or_else_split_types(c, &x, name, &left_type, &right_type); - add_type_and_value(&c->checker->info, arg, x.mode, x.type, x.value); - - if (left_type != nullptr) { - check_assignment(c, &y, left_type, name); - } else { - check_or_else_expr_no_value_error(c, name, x, type_hint); - } - - if (left_type == nullptr) { - left_type = t_invalid; - } - o->mode = Addressing_Value; - o->type = left_type; - o->expr = node; - return Expr_Expr; + return check_or_else_expr(c, o, node, type_hint); case_end; case_ast_node(re, OrReturnExpr, node); - String name = re->token.string; - Operand x = {}; - check_multi_expr_with_type_hint(c, &x, re->expr, type_hint); - if (x.mode == Addressing_Invalid) { - o->mode = Addressing_Value; - o->type = t_invalid; - o->expr = node; - return Expr_Expr; - } - - Type *left_type = nullptr; - Type *right_type = nullptr; - check_or_return_split_types(c, &x, name, &left_type, &right_type); - add_type_and_value(&c->checker->info, re->expr, x.mode, x.type, x.value); - - if (right_type == nullptr) { - check_or_else_expr_no_value_error(c, name, x, type_hint); - } else { - Type *proc_type = base_type(c->curr_proc_sig); - GB_ASSERT(proc_type->kind == Type_Proc); - Type *result_type = proc_type->Proc.results; - if (result_type == nullptr) { - error(node, "'%.*s' requires the current procedure to have at least one return value", LIT(name)); - } else { - GB_ASSERT(result_type->kind == Type_Tuple); - - auto const &vars = result_type->Tuple.variables; - Type *end_type = vars[vars.count-1]->type; - - if (vars.count > 1) { - if (!proc_type->Proc.has_named_results) { - error(node, "'%.*s' within a procedure with more than 1 return value requires that the return values are named, allowing for early return", LIT(name)); - } - } - - Operand rhs = {}; - rhs.type = right_type; - rhs.mode = Addressing_Value; - - // TODO(bill): better error message - if (!check_is_assignable_to(c, &rhs, end_type)) { - gbString a = type_to_string(right_type); - gbString b = type_to_string(end_type); - gbString ret_type = type_to_string(result_type); - error(node, "Cannot assign end value of type '%s' to '%s' in '%.*s'", a, b, LIT(name)); - if (vars.count == 1) { - error_line("\tProcedure return value type: %s\n", ret_type); - } else { - error_line("\tProcedure return value types: (%s)\n", ret_type); - } - gb_string_free(ret_type); - gb_string_free(b); - gb_string_free(a); - } - } - } - - o->expr = node; - o->type = left_type; - if (left_type != nullptr) { - o->mode = Addressing_Value; - } else { - o->mode = Addressing_NoValue; - } - - if (c->curr_proc_sig == nullptr) { - error(node, "'%.*s' can only be used within a procedure", LIT(name)); - } - - if (c->in_defer) { - error(node, "'or_return' cannot be used within a defer statement"); - } - - return Expr_Expr; + return check_or_return_expr(c, o, node, type_hint); case_end; case_ast_node(cl, CompoundLit, node); - Type *type = type_hint; - if (type != nullptr && is_type_untyped(type)) { - type = nullptr; - } - bool is_to_be_determined_array_count = false; - bool is_constant = true; - if (cl->type != nullptr) { - type = nullptr; - - // [?]Type - if (cl->type->kind == Ast_ArrayType && cl->type->ArrayType.count != nullptr) { - Ast *count = cl->type->ArrayType.count; - if (count->kind == Ast_UnaryExpr && - count->UnaryExpr.op.kind == Token_Question) { - type = alloc_type_array(check_type(c, cl->type->ArrayType.elem), -1); - is_to_be_determined_array_count = true; - } - if (cl->elems.count > 0) { - if (cl->type->ArrayType.tag != nullptr) { - Ast *tag = cl->type->ArrayType.tag; - GB_ASSERT(tag->kind == Ast_BasicDirective); - String name = tag->BasicDirective.name.string; - if (name == "soa") { - error(node, "#soa arrays are not supported for compound literals"); - return kind; - } - } - } - } - if (cl->type->kind == Ast_DynamicArrayType && cl->type->DynamicArrayType.tag != nullptr) { - if (cl->elems.count > 0) { - Ast *tag = cl->type->DynamicArrayType.tag; - GB_ASSERT(tag->kind == Ast_BasicDirective); - String name = tag->BasicDirective.name.string; - if (name == "soa") { - error(node, "#soa arrays are not supported for compound literals"); - return kind; - } - } - } - - if (type == nullptr) { - type = check_type(c, cl->type); - } - } - - if (type == nullptr) { - error(node, "Missing type in compound literal"); - return kind; - } - - - Type *t = base_type(type); - if (is_type_polymorphic(t)) { - gbString str = type_to_string(type); - error(node, "Cannot use a polymorphic type for a compound literal, got '%s'", str); - o->expr = node; - o->type = type; - gb_string_free(str); - return kind; - } - - - switch (t->kind) { - case Type_Struct: { - if (cl->elems.count == 0) { - break; // NOTE(bill): No need to init - } - if (t->Struct.is_raw_union) { - if (cl->elems.count > 0) { - // NOTE: unions cannot be constant - is_constant = false; - - if (cl->elems[0]->kind != Ast_FieldValue) { - gbString type_str = type_to_string(type); - error(node, "%s ('struct #raw_union') compound literals are only allowed to contain 'field = value' elements", type_str); - gb_string_free(type_str); - } else { - if (cl->elems.count != 1) { - gbString type_str = type_to_string(type); - error(node, "%s ('struct #raw_union') compound literals are only allowed to contain up to 1 'field = value' element, got %td", type_str, cl->elems.count); - gb_string_free(type_str); - } else { - Ast *elem = cl->elems[0]; - ast_node(fv, FieldValue, elem); - if (fv->field->kind != Ast_Ident) { - gbString expr_str = expr_to_string(fv->field); - error(elem, "Invalid field name '%s' in structure literal", expr_str); - gb_string_free(expr_str); - break; - } - - String name = fv->field->Ident.token.string; - - Selection sel = lookup_field(type, name, o->mode == Addressing_Type); - bool is_unknown = sel.entity == nullptr; - if (is_unknown) { - error(elem, "Unknown field '%.*s' in structure literal", LIT(name)); - break; - } - - if (sel.index.count > 1) { - error(elem, "Cannot assign to an anonymous field '%.*s' in a structure literal (at the moment)", LIT(name)); - break; - } - - Entity *field = t->Struct.fields[sel.index[0]]; - add_entity_use(c, fv->field, field); - - Operand o = {}; - check_expr_or_type(c, &o, fv->value, field->type); - - - check_assignment(c, &o, field->type, str_lit("structure literal")); - } - - } - } - break; - } - - - isize field_count = t->Struct.fields.count; - isize min_field_count = t->Struct.fields.count; - for (isize i = min_field_count-1; i >= 0; i--) { - Entity *e = t->Struct.fields[i]; - GB_ASSERT(e->kind == Entity_Variable); - if (e->Variable.param_value.kind != ParameterValue_Invalid) { - min_field_count--; - } else { - break; - } - } - - if (cl->elems[0]->kind == Ast_FieldValue) { - bool *fields_visited = gb_alloc_array(temporary_allocator(), bool, field_count); - - for_array(i, cl->elems) { - Ast *elem = cl->elems[i]; - if (elem->kind != Ast_FieldValue) { - error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); - continue; - } - ast_node(fv, FieldValue, elem); - if (fv->field->kind != Ast_Ident) { - gbString expr_str = expr_to_string(fv->field); - error(elem, "Invalid field name '%s' in structure literal", expr_str); - gb_string_free(expr_str); - continue; - } - String name = fv->field->Ident.token.string; - - Selection sel = lookup_field(type, name, o->mode == Addressing_Type); - bool is_unknown = sel.entity == nullptr; - if (is_unknown) { - error(elem, "Unknown field '%.*s' in structure literal", LIT(name)); - continue; - } - - if (sel.index.count > 1) { - error(elem, "Cannot assign to an anonymous field '%.*s' in a structure literal (at the moment)", LIT(name)); - continue; - } - - Entity *field = t->Struct.fields[sel.index[0]]; - add_entity_use(c, fv->field, field); - - if (fields_visited[sel.index[0]]) { - error(elem, "Duplicate field '%.*s' in structure literal", LIT(name)); - continue; - } - - fields_visited[sel.index[0]] = true; - - Operand o = {}; - check_expr_or_type(c, &o, fv->value, field->type); - - if (is_type_any(field->type) || is_type_union(field->type) || is_type_raw_union(field->type) || is_type_typeid(field->type)) { - is_constant = false; - } - if (is_constant) { - is_constant = check_is_operand_compound_lit_constant(c, &o); - } - - check_assignment(c, &o, field->type, str_lit("structure literal")); - } - } else { - bool seen_field_value = false; - - for_array(index, cl->elems) { - Entity *field = nullptr; - Ast *elem = cl->elems[index]; - if (elem->kind == Ast_FieldValue) { - seen_field_value = true; - error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); - continue; - } else if (seen_field_value) { - error(elem, "Value elements cannot be used after a 'field = value'"); - continue; - } - if (index >= field_count) { - error(elem, "Too many values in structure literal, expected %td, got %td", field_count, cl->elems.count); - break; - } - - if (field == nullptr) { - field = t->Struct.fields[index]; - } - - Operand o = {}; - check_expr_or_type(c, &o, elem, field->type); - - if (is_type_any(field->type) || is_type_union(field->type) || is_type_raw_union(field->type) || is_type_typeid(field->type)) { - is_constant = false; - } - if (is_constant) { - is_constant = check_is_operand_compound_lit_constant(c, &o); - } - - check_assignment(c, &o, field->type, str_lit("structure literal")); - } - if (cl->elems.count < field_count) { - if (min_field_count < field_count) { - if (cl->elems.count < min_field_count) { - error(cl->close, "Too few values in structure literal, expected at least %td, got %td", min_field_count, cl->elems.count); - } - } else { - error(cl->close, "Too few values in structure literal, expected %td, got %td", field_count, cl->elems.count); - } - } - } - - break; - } - - case Type_Slice: - case Type_Array: - case Type_DynamicArray: - case Type_SimdVector: - case Type_Matrix: - { - Type *elem_type = nullptr; - String context_name = {}; - i64 max_type_count = -1; - if (t->kind == Type_Slice) { - elem_type = t->Slice.elem; - context_name = str_lit("slice literal"); - } else if (t->kind == Type_Array) { - elem_type = t->Array.elem; - context_name = str_lit("array literal"); - if (!is_to_be_determined_array_count) { - max_type_count = t->Array.count; - } - } else if (t->kind == Type_DynamicArray) { - elem_type = t->DynamicArray.elem; - context_name = str_lit("dynamic array literal"); - is_constant = false; - - if (!build_context.no_dynamic_literals) { - add_package_dependency(c, "runtime", "__dynamic_array_reserve"); - add_package_dependency(c, "runtime", "__dynamic_array_append"); - } - } else if (t->kind == Type_SimdVector) { - elem_type = t->SimdVector.elem; - context_name = str_lit("simd vector literal"); - max_type_count = t->SimdVector.count; - } else if (t->kind == Type_Matrix) { - elem_type = t->Matrix.elem; - context_name = str_lit("matrix literal"); - max_type_count = t->Matrix.row_count*t->Matrix.column_count; - } else { - GB_PANIC("unreachable"); - } - - - i64 max = 0; - - Type *bet = base_type(elem_type); - if (!elem_type_can_be_constant(bet)) { - is_constant = false; - } - - if (bet == t_invalid) { - break; - } - - if (cl->elems.count > 0 && cl->elems[0]->kind == Ast_FieldValue) { - if (is_type_simd_vector(t)) { - error(cl->elems[0], "'field = value' is not allowed for SIMD vector literals"); - } else { - RangeCache rc = range_cache_make(heap_allocator()); - defer (range_cache_destroy(&rc)); - - for_array(i, cl->elems) { - Ast *elem = cl->elems[i]; - if (elem->kind != Ast_FieldValue) { - error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); - continue; - } - ast_node(fv, FieldValue, elem); - - if (is_ast_range(fv->field)) { - Token op = fv->field->BinaryExpr.op; - - Operand x = {}; - Operand y = {}; - bool ok = check_range(c, fv->field, &x, &y, nullptr); - if (!ok) { - continue; - } - if (x.mode != Addressing_Constant || !is_type_integer(core_type(x.type))) { - error(x.expr, "Expected a constant integer as an array field"); - continue; - } - - if (y.mode != Addressing_Constant || !is_type_integer(core_type(y.type))) { - error(y.expr, "Expected a constant integer as an array field"); - continue; - } - - i64 lo = exact_value_to_i64(x.value); - i64 hi = exact_value_to_i64(y.value); - i64 max_index = hi; - if (op.kind == Token_RangeHalf) { // ..< (exclusive) - hi -= 1; - } else { // .. (inclusive) - max_index += 1; - } - - bool new_range = range_cache_add_range(&rc, lo, hi); - if (!new_range) { - error(elem, "Overlapping field range index %lld %.*s %lld for %.*s", lo, LIT(op.string), hi, LIT(context_name)); - continue; - } - - - if (max_type_count >= 0 && (lo < 0 || lo >= max_type_count)) { - error(elem, "Index %lld is out of bounds (0..<%lld) for %.*s", lo, max_type_count, LIT(context_name)); - continue; - } - if (max_type_count >= 0 && (hi < 0 || hi >= max_type_count)) { - error(elem, "Index %lld is out of bounds (0..<%lld) for %.*s", hi, max_type_count, LIT(context_name)); - continue; - } - - if (max < hi) { - max = max_index; - } - - Operand operand = {}; - check_expr_with_type_hint(c, &operand, fv->value, elem_type); - check_assignment(c, &operand, elem_type, context_name); - - is_constant = is_constant && operand.mode == Addressing_Constant; - } else { - Operand op_index = {}; - check_expr(c, &op_index, fv->field); - - if (op_index.mode != Addressing_Constant || !is_type_integer(core_type(op_index.type))) { - error(elem, "Expected a constant integer as an array field"); - continue; - } - // add_type_and_value(c->info, op_index.expr, op_index.mode, op_index.type, op_index.value); - - i64 index = exact_value_to_i64(op_index.value); - - if (max_type_count >= 0 && (index < 0 || index >= max_type_count)) { - error(elem, "Index %lld is out of bounds (0..<%lld) for %.*s", index, max_type_count, LIT(context_name)); - continue; - } - - bool new_index = range_cache_add_index(&rc, index); - if (!new_index) { - error(elem, "Duplicate field index %lld for %.*s", index, LIT(context_name)); - continue; - } - - if (max < index+1) { - max = index+1; - } - - Operand operand = {}; - check_expr_with_type_hint(c, &operand, fv->value, elem_type); - check_assignment(c, &operand, elem_type, context_name); - - is_constant = is_constant && operand.mode == Addressing_Constant; - } - } - - cl->max_count = max; - } - - } else { - isize index = 0; - for (; index < cl->elems.count; index++) { - Ast *e = cl->elems[index]; - if (e == nullptr) { - error(node, "Invalid literal element"); - continue; - } - - if (e->kind == Ast_FieldValue) { - error(e, "Mixture of 'field = value' and value elements in a literal is not allowed"); - continue; - } - - if (0 <= max_type_count && max_type_count <= index) { - error(e, "Index %lld is out of bounds (>= %lld) for %.*s", index, max_type_count, LIT(context_name)); - } - - Operand operand = {}; - check_expr_with_type_hint(c, &operand, e, elem_type); - check_assignment(c, &operand, elem_type, context_name); - - is_constant = is_constant && operand.mode == Addressing_Constant; - } - - if (max < index) { - max = index; - } - } - - - if (t->kind == Type_Array) { - if (is_to_be_determined_array_count) { - t->Array.count = max; - } else if (cl->elems.count > 0 && cl->elems[0]->kind != Ast_FieldValue) { - if (0 < max && max < t->Array.count) { - error(node, "Expected %lld values for this array literal, got %lld", cast(long long)t->Array.count, cast(long long)max); - } - } - } - - - if (t->kind == Type_SimdVector) { - if (!is_constant) { - error(node, "Expected all constant elements for a simd vector"); - } - } - - - if (t->kind == Type_DynamicArray) { - if (build_context.no_dynamic_literals && cl->elems.count) { - error(node, "Compound literals of dynamic types have been disabled"); - } - } - - if (t->kind == Type_Matrix) { - if (cl->elems.count > 0 && cl->elems[0]->kind != Ast_FieldValue) { - if (0 < max && max < max_type_count) { - error(node, "Expected %lld values for this matrix literal, got %lld", cast(long long)max_type_count, cast(long long)max); - } - } - } - - break; - } - - case Type_EnumeratedArray: - { - Type *elem_type = t->EnumeratedArray.elem; - Type *index_type = t->EnumeratedArray.index; - String context_name = str_lit("enumerated array literal"); - i64 max_type_count = t->EnumeratedArray.count; - - gbString index_type_str = type_to_string(index_type); - defer (gb_string_free(index_type_str)); - - i64 total_lo = exact_value_to_i64(*t->EnumeratedArray.min_value); - i64 total_hi = exact_value_to_i64(*t->EnumeratedArray.max_value); - - String total_lo_string = {}; - String total_hi_string = {}; - GB_ASSERT(is_type_enum(index_type)); - { - Type *bt = base_type(index_type); - GB_ASSERT(bt->kind == Type_Enum); - for_array(i, bt->Enum.fields) { - Entity *f = bt->Enum.fields[i]; - if (f->kind != Entity_Constant) { - continue; - } - if (total_lo_string.len == 0 && compare_exact_values(Token_CmpEq, f->Constant.value, *t->EnumeratedArray.min_value)) { - total_lo_string = f->token.string; - } - if (total_hi_string.len == 0 && compare_exact_values(Token_CmpEq, f->Constant.value, *t->EnumeratedArray.max_value)) { - total_hi_string = f->token.string; - } - if (total_lo_string.len != 0 && total_hi_string.len != 0) { - break; - } - } - } - - i64 max = 0; - - Type *bet = base_type(elem_type); - if (!elem_type_can_be_constant(bet)) { - is_constant = false; - } - - if (bet == t_invalid) { - break; - } - - if (cl->elems.count > 0 && cl->elems[0]->kind == Ast_FieldValue) { - RangeCache rc = range_cache_make(heap_allocator()); - defer (range_cache_destroy(&rc)); - - for_array(i, cl->elems) { - Ast *elem = cl->elems[i]; - if (elem->kind != Ast_FieldValue) { - error(elem, "Mixture of 'field = value' and value elements in a literal is not allowed"); - continue; - } - ast_node(fv, FieldValue, elem); - - if (is_ast_range(fv->field)) { - Token op = fv->field->BinaryExpr.op; - - Operand x = {}; - Operand y = {}; - bool ok = check_range(c, fv->field, &x, &y, nullptr, index_type); - if (!ok) { - continue; - } - if (x.mode != Addressing_Constant || !are_types_identical(x.type, index_type)) { - error(x.expr, "Expected a constant enum of type '%s' as an array field", index_type_str); - continue; - } - - if (y.mode != Addressing_Constant || !are_types_identical(x.type, index_type)) { - error(y.expr, "Expected a constant enum of type '%s' as an array field", index_type_str); - continue; - } - - i64 lo = exact_value_to_i64(x.value); - i64 hi = exact_value_to_i64(y.value); - i64 max_index = hi; - if (op.kind == Token_RangeHalf) { - hi -= 1; - } - - bool new_range = range_cache_add_range(&rc, lo, hi); - if (!new_range) { - gbString lo_str = expr_to_string(x.expr); - gbString hi_str = expr_to_string(y.expr); - error(elem, "Overlapping field range index %s %.*s %s for %.*s", lo_str, LIT(op.string), hi_str, LIT(context_name)); - gb_string_free(hi_str); - gb_string_free(lo_str); - continue; - } - - - // NOTE(bill): These are sanity checks for invalid enum values - if (max_type_count >= 0 && (lo < total_lo || lo > total_hi)) { - gbString lo_str = expr_to_string(x.expr); - error(elem, "Index %s is out of bounds (%.*s .. %.*s) for %.*s", lo_str, LIT(total_lo_string), LIT(total_hi_string), LIT(context_name)); - gb_string_free(lo_str); - continue; - } - if (max_type_count >= 0 && (hi < 0 || hi > total_hi)) { - gbString hi_str = expr_to_string(y.expr); - error(elem, "Index %s is out of bounds (%.*s .. %.*s) for %.*s", hi_str, LIT(total_lo_string), LIT(total_hi_string), LIT(context_name)); - gb_string_free(hi_str); - continue; - } - - if (max < hi) { - max = max_index; - } - - Operand operand = {}; - check_expr_with_type_hint(c, &operand, fv->value, elem_type); - check_assignment(c, &operand, elem_type, context_name); - - is_constant = is_constant && operand.mode == Addressing_Constant; - } else { - Operand op_index = {}; - check_expr_with_type_hint(c, &op_index, fv->field, index_type); - - if (op_index.mode != Addressing_Constant || !are_types_identical(op_index.type, index_type)) { - error(op_index.expr, "Expected a constant enum of type '%s' as an array field", index_type_str); - continue; - } - - i64 index = exact_value_to_i64(op_index.value); - - if (max_type_count >= 0 && (index < total_lo || index > total_hi)) { - gbString idx_str = expr_to_string(op_index.expr); - error(elem, "Index %s is out of bounds (%.*s .. %.*s) for %.*s", idx_str, LIT(total_lo_string), LIT(total_hi_string), LIT(context_name)); - gb_string_free(idx_str); - continue; - } - - bool new_index = range_cache_add_index(&rc, index); - if (!new_index) { - gbString idx_str = expr_to_string(op_index.expr); - error(elem, "Duplicate field index %s for %.*s", idx_str, LIT(context_name)); - gb_string_free(idx_str); - continue; - } - - if (max < index+1) { - max = index+1; - } - - Operand operand = {}; - check_expr_with_type_hint(c, &operand, fv->value, elem_type); - check_assignment(c, &operand, elem_type, context_name); - - is_constant = is_constant && operand.mode == Addressing_Constant; - } - } - - cl->max_count = max; - - } else { - isize index = 0; - for (; index < cl->elems.count; index++) { - Ast *e = cl->elems[index]; - if (e == nullptr) { - error(node, "Invalid literal element"); - continue; - } - - if (e->kind == Ast_FieldValue) { - error(e, "Mixture of 'field = value' and value elements in a literal is not allowed"); - continue; - } - - if (0 <= max_type_count && max_type_count <= index) { - error(e, "Index %lld is out of bounds (>= %lld) for %.*s", index, max_type_count, LIT(context_name)); - } - - Operand operand = {}; - check_expr_with_type_hint(c, &operand, e, elem_type); - check_assignment(c, &operand, elem_type, context_name); - - is_constant = is_constant && operand.mode == Addressing_Constant; - } - - if (max < index) { - max = index; - } - } - - if (cl->elems.count > 0 && cl->elems[0]->kind != Ast_FieldValue) { - if (0 < max && max < t->EnumeratedArray.count) { - error(node, "Expected %lld values for this enumerated array literal, got %lld", cast(long long)t->EnumeratedArray.count, cast(long long)max); - } else { - error(node, "Enumerated array literals must only have 'field = value' elements, bare elements are not allowed"); - } - } - - break; - } - - case Type_Basic: { - if (!is_type_any(t)) { - if (cl->elems.count != 0) { - error(node, "Illegal compound literal"); - } - break; - } - if (cl->elems.count == 0) { - break; // NOTE(bill): No need to init - } - { // Checker values - Type *field_types[2] = {t_rawptr, t_typeid}; - isize field_count = 2; - if (cl->elems[0]->kind == Ast_FieldValue) { - bool fields_visited[2] = {}; - - for_array(i, cl->elems) { - Ast *elem = cl->elems[i]; - if (elem->kind != Ast_FieldValue) { - error(elem, "Mixture of 'field = value' and value elements in a 'any' literal is not allowed"); - continue; - } - ast_node(fv, FieldValue, elem); - if (fv->field->kind != Ast_Ident) { - gbString expr_str = expr_to_string(fv->field); - error(elem, "Invalid field name '%s' in 'any' literal", expr_str); - gb_string_free(expr_str); - continue; - } - String name = fv->field->Ident.token.string; - - Selection sel = lookup_field(type, name, o->mode == Addressing_Type); - if (sel.entity == nullptr) { - error(elem, "Unknown field '%.*s' in 'any' literal", LIT(name)); - continue; - } - - isize index = sel.index[0]; - - if (fields_visited[index]) { - error(elem, "Duplicate field '%.*s' in 'any' literal", LIT(name)); - continue; - } - - fields_visited[index] = true; - check_expr(c, o, fv->value); - - // NOTE(bill): 'any' literals can never be constant - is_constant = false; - - check_assignment(c, o, field_types[index], str_lit("'any' literal")); - } - } else { - for_array(index, cl->elems) { - Ast *elem = cl->elems[index]; - if (elem->kind == Ast_FieldValue) { - error(elem, "Mixture of 'field = value' and value elements in a 'any' literal is not allowed"); - continue; - } - - - check_expr(c, o, elem); - if (index >= field_count) { - error(o->expr, "Too many values in 'any' literal, expected %td", field_count); - break; - } - - // NOTE(bill): 'any' literals can never be constant - is_constant = false; - - check_assignment(c, o, field_types[index], str_lit("'any' literal")); - } - if (cl->elems.count < field_count) { - error(cl->close, "Too few values in 'any' literal, expected %td, got %td", field_count, cl->elems.count); - } - } - } - - break; - } - - case Type_Map: { - if (cl->elems.count == 0) { - break; - } - is_constant = false; - { // Checker values - bool key_is_typeid = is_type_typeid(t->Map.key); - bool value_is_typeid = is_type_typeid(t->Map.value); - - for_array(i, cl->elems) { - Ast *elem = cl->elems[i]; - if (elem->kind != Ast_FieldValue) { - error(elem, "Only 'field = value' elements are allowed in a map literal"); - continue; - } - ast_node(fv, FieldValue, elem); - - if (key_is_typeid) { - check_expr_or_type(c, o, fv->field, t->Map.key); - } else { - check_expr_with_type_hint(c, o, fv->field, t->Map.key); - } - check_assignment(c, o, t->Map.key, str_lit("map literal")); - if (o->mode == Addressing_Invalid) { - continue; - } - - if (value_is_typeid) { - check_expr_or_type(c, o, fv->value, t->Map.value); - } else { - check_expr_with_type_hint(c, o, fv->value, t->Map.value); - } - check_assignment(c, o, t->Map.value, str_lit("map literal")); - } - } - - if (build_context.no_dynamic_literals && cl->elems.count) { - error(node, "Compound literals of dynamic types have been disabled"); - } else { - add_package_dependency(c, "runtime", "__dynamic_map_reserve"); - add_package_dependency(c, "runtime", "__dynamic_map_set"); - } - break; - } - - case Type_BitSet: { - if (cl->elems.count == 0) { - break; // NOTE(bill): No need to init - } - Type *et = base_type(t->BitSet.elem); - isize field_count = 0; - if (et->kind == Type_Enum) { - field_count = et->Enum.fields.count; - } - - if (cl->elems[0]->kind == Ast_FieldValue) { - error(cl->elems[0], "'field = value' in a bit_set a literal is not allowed"); - is_constant = false; - } else { - for_array(index, cl->elems) { - Ast *elem = cl->elems[index]; - if (elem->kind == Ast_FieldValue) { - error(elem, "'field = value' in a bit_set a literal is not allowed"); - continue; - } - - check_expr_with_type_hint(c, o, elem, et); - - if (is_constant) { - is_constant = o->mode == Addressing_Constant; - } - - check_assignment(c, o, t->BitSet.elem, str_lit("bit_set literal")); - if (o->mode == Addressing_Constant) { - i64 lower = t->BitSet.lower; - i64 upper = t->BitSet.upper; - i64 v = exact_value_to_i64(o->value); - if (lower <= v && v <= upper) { - // okay - } else { - error(elem, "Bit field value out of bounds, %lld not in the range %lld .. %lld", v, lower, upper); - continue; - } - } - } - } - break; - } - - default: { - if (cl->elems.count == 0) { - break; // NOTE(bill): No need to init - } - - gbString str = type_to_string(type); - error(node, "Invalid compound literal type '%s'", str); - gb_string_free(str); - return kind; - } - } - - if (is_constant) { - o->mode = Addressing_Constant; - - if (is_type_bit_set(type)) { - // NOTE(bill): Encode as an integer - - i64 lower = base_type(type)->BitSet.lower; - - u64 bits = 0; - for_array(index, cl->elems) { - Ast *elem = cl->elems[index]; - GB_ASSERT(elem->kind != Ast_FieldValue); - TypeAndValue tav = elem->tav; - ExactValue i = exact_value_to_integer(tav.value); - if (i.kind != ExactValue_Integer) { - continue; - } - i64 val = big_int_to_i64(&i.value_integer); - val -= lower; - u64 bit = u64(1ll<value = exact_value_u64(bits); - } else if (is_type_constant_type(type) && cl->elems.count == 0) { - ExactValue value = exact_value_compound(node); - Type *bt = core_type(type); - if (bt->kind == Type_Basic) { - if (bt->Basic.flags & BasicFlag_Boolean) { - value = exact_value_bool(false); - } else if (bt->Basic.flags & BasicFlag_Integer) { - value = exact_value_i64(0); - } else if (bt->Basic.flags & BasicFlag_Unsigned) { - value = exact_value_i64(0); - } else if (bt->Basic.flags & BasicFlag_Float) { - value = exact_value_float(0); - } else if (bt->Basic.flags & BasicFlag_Complex) { - value = exact_value_complex(0, 0); - } else if (bt->Basic.flags & BasicFlag_Quaternion) { - value = exact_value_quaternion(0, 0, 0, 0); - } else if (bt->Basic.flags & BasicFlag_Pointer) { - value = exact_value_pointer(0); - } else if (bt->Basic.flags & BasicFlag_String) { - String empty_string = {}; - value = exact_value_string(empty_string); - } else if (bt->Basic.flags & BasicFlag_Rune) { - value = exact_value_i64(0); - } - } - - o->value = value; - } else { - o->value = exact_value_compound(node); - } - } else { - o->mode = Addressing_Value; - } - o->type = type; + kind = check_compound_literal(c, o, node, type_hint); case_end; case_ast_node(pe, ParenExpr, node); @@ -8171,127 +9084,7 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type case_end; case_ast_node(ta, TypeAssertion, node); - check_expr(c, o, ta->expr); - node->viral_state_flags |= ta->expr->viral_state_flags; - - if (o->mode == Addressing_Invalid) { - o->expr = node; - return kind; - } - if (o->mode == Addressing_Constant) { - gbString expr_str = expr_to_string(o->expr); - error(o->expr, "A type assertion cannot be applied to a constant expression: '%s'", expr_str); - gb_string_free(expr_str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - if (is_type_untyped(o->type)) { - gbString expr_str = expr_to_string(o->expr); - error(o->expr, "A type assertion cannot be applied to an untyped expression: '%s'", expr_str); - gb_string_free(expr_str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - Type *src = type_deref(o->type); - Type *bsrc = base_type(src); - - - if (ta->type != nullptr && ta->type->kind == Ast_UnaryExpr && ta->type->UnaryExpr.op.kind == Token_Question) { - if (!is_type_union(src)) { - gbString str = type_to_string(o->type); - error(o->expr, "Type assertions with .? can only operate on unions, got %s", str); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - if (bsrc->Union.variants.count != 1 && type_hint != nullptr) { - bool allowed = false; - for_array(i, bsrc->Union.variants) { - Type *vt = bsrc->Union.variants[i]; - if (are_types_identical(vt, type_hint)) { - allowed = true; - add_type_info_type(c, vt); - break; - } - } - if (allowed) { - add_type_info_type(c, o->type); - o->type = type_hint; - o->mode = Addressing_OptionalOk; - return kind; - } - } - - if (bsrc->Union.variants.count != 1) { - error(o->expr, "Type assertions with .? can only operate on unions with 1 variant, got %lld", cast(long long)bsrc->Union.variants.count); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - add_type_info_type(c, o->type); - add_type_info_type(c, bsrc->Union.variants[0]); - - o->type = bsrc->Union.variants[0]; - o->mode = Addressing_OptionalOk; - } else { - Type *t = check_type(c, ta->type); - Type *dst = t; - - if (is_type_union(src)) { - bool ok = false; - for_array(i, bsrc->Union.variants) { - Type *vt = bsrc->Union.variants[i]; - if (are_types_identical(vt, dst)) { - ok = true; - break; - } - } - - if (!ok) { - gbString expr_str = expr_to_string(o->expr); - gbString dst_type_str = type_to_string(t); - defer (gb_string_free(expr_str)); - defer (gb_string_free(dst_type_str)); - if (bsrc->Union.variants.count == 0) { - error(o->expr, "Cannot type assert '%s' to '%s' as this is an empty union", expr_str, dst_type_str); - } else { - error(o->expr, "Cannot type assert '%s' to '%s' as it is not a variant of that union", expr_str, dst_type_str); - } - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - add_type_info_type(c, o->type); - add_type_info_type(c, t); - - o->type = t; - o->mode = Addressing_OptionalOk; - } else if (is_type_any(src)) { - o->type = t; - o->mode = Addressing_OptionalOk; - - add_type_info_type(c, o->type); - add_type_info_type(c, t); - } else { - gbString str = type_to_string(o->type); - error(o->expr, "Type assertions can only operate on unions and 'any', got %s", str); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - } - - add_package_dependency(c, "runtime", "type_assertion_check"); - add_package_dependency(c, "runtime", "type_assertion_check2"); + kind = check_type_assertion(c, o, node, type_hint); case_end; case_ast_node(tc, TypeCast, node); @@ -8379,443 +9172,19 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type case_end; case_ast_node(se, SelectorCallExpr, node); - // IMPORTANT NOTE(bill, 2020-05-22): This is a complete hack to get a shorthand which is extremely useful for vtables - // COM APIs is a great example of where this kind of thing is extremely useful - // General idea: - // - // x->y(123) == x.y(x, 123) - // - // How this has been implemented at the moment is quite hacky but it's done so to reduce need for huge backend changes - // Just regenerating a new AST aids things - // - // TODO(bill): Is this a good hack or not? - // - // NOTE(bill, 2020-05-22): I'm going to regret this decision, ain't I? - - - if (se->modified_call) { - // Prevent double evaluation - o->expr = node; - o->type = node->tav.type; - o->value = node->tav.value; - o->mode = node->tav.mode; - return Expr_Expr; - } - - bool allow_arrow_right_selector_expr; - allow_arrow_right_selector_expr = c->allow_arrow_right_selector_expr; - c->allow_arrow_right_selector_expr = true; - Operand x = {}; - ExprKind kind = check_expr_base(c, &x, se->expr, nullptr); - c->allow_arrow_right_selector_expr = allow_arrow_right_selector_expr; - - if (x.mode == Addressing_Invalid || x.type == t_invalid) { - o->mode = Addressing_Invalid; - o->type = t_invalid; - o->expr = node; - return kind; - } - if (!is_type_proc(x.type)) { - gbString type_str = type_to_string(x.type); - error(se->call, "Selector call expressions expect a procedure type for the call, got '%s'", type_str); - gb_string_free(type_str); - - o->mode = Addressing_Invalid; - o->type = t_invalid; - o->expr = node; - return Expr_Stmt; - } - - ast_node(ce, CallExpr, se->call); - - GB_ASSERT(x.expr->kind == Ast_SelectorExpr); - - Ast *first_arg = x.expr->SelectorExpr.expr; - GB_ASSERT(first_arg != nullptr); - - Type *pt = base_type(x.type); - GB_ASSERT(pt->kind == Type_Proc); - Type *first_type = nullptr; - String first_arg_name = {}; - if (pt->Proc.param_count > 0) { - Entity *f = pt->Proc.params->Tuple.variables[0]; - first_type = f->type; - first_arg_name = f->token.string; - } - if (first_arg_name.len == 0) { - first_arg_name = str_lit("_"); - } - - if (first_type == nullptr) { - error(se->call, "Selector call expressions expect a procedure type for the call with at least 1 parameter"); - o->mode = Addressing_Invalid; - o->type = t_invalid; - o->expr = node; - return Expr_Stmt; - } - - Operand y = {}; - y.mode = first_arg->tav.mode; - y.type = first_arg->tav.type; - y.value = first_arg->tav.value; - if (check_is_assignable_to(c, &y, first_type)) { - // Do nothing, it's valid - } else { - Operand z = y; - z.type = type_deref(y.type); - if (check_is_assignable_to(c, &z, first_type)) { - // NOTE(bill): AST GENERATION HACK! - Token op = {Token_Pointer}; - first_arg = ast_deref_expr(first_arg->file(), first_arg, op); - } else if (y.mode == Addressing_Variable) { - Operand w = y; - w.type = alloc_type_pointer(y.type); - if (check_is_assignable_to(c, &w, first_type)) { - // NOTE(bill): AST GENERATION HACK! - Token op = {Token_And}; - first_arg = ast_unary_expr(first_arg->file(), op, first_arg); - } - } - } - - if (ce->args.count > 0) { - bool fail = false; - bool first_is_field_value = (ce->args[0]->kind == Ast_FieldValue); - for_array(i, ce->args) { - Ast *arg = ce->args[i]; - bool mix = false; - if (first_is_field_value) { - mix = arg->kind != Ast_FieldValue; - } else { - mix = arg->kind == Ast_FieldValue; - } - if (mix) { - fail = true; - break; - } - } - if (!fail && first_is_field_value) { - Token op = {Token_Eq}; - AstFile *f = first_arg->file(); - first_arg = ast_field_value(f, ast_ident(f, make_token_ident(first_arg_name)), first_arg, op); - } - } - - - - auto modified_args = slice_make(heap_allocator(), ce->args.count+1); - modified_args[0] = first_arg; - slice_copy(&modified_args, ce->args, 1); - ce->args = modified_args; - se->modified_call = true; - - allow_arrow_right_selector_expr = c->allow_arrow_right_selector_expr; - c->allow_arrow_right_selector_expr = true; - check_expr_base(c, o, se->call, type_hint); - c->allow_arrow_right_selector_expr = allow_arrow_right_selector_expr; - - o->expr = node; - return Expr_Expr; + return check_selector_call_expr(c, o, node, type_hint); case_end; - case_ast_node(ise, ImplicitSelectorExpr, node); return check_implicit_selector_expr(c, o, node, type_hint); case_end; case_ast_node(ie, IndexExpr, node); - check_expr(c, o, ie->expr); - node->viral_state_flags |= ie->expr->viral_state_flags; - if (o->mode == Addressing_Invalid) { - o->expr = node; - return kind; - } - - Type *t = base_type(type_deref(o->type)); - bool is_ptr = is_type_pointer(o->type); - bool is_const = o->mode == Addressing_Constant; - - if (is_type_map(t)) { - Operand key = {}; - if (is_type_typeid(t->Map.key)) { - check_expr_or_type(c, &key, ie->index, t->Map.key); - } else { - check_expr_with_type_hint(c, &key, ie->index, t->Map.key); - } - check_assignment(c, &key, t->Map.key, str_lit("map index")); - if (key.mode == Addressing_Invalid) { - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - o->mode = Addressing_MapIndex; - o->type = t->Map.value; - o->expr = node; - - add_package_dependency(c, "runtime", "__dynamic_map_get"); - add_package_dependency(c, "runtime", "__dynamic_map_set"); - return Expr_Expr; - } - - i64 max_count = -1; - bool valid = check_set_index_data(o, t, is_ptr, &max_count, o->type); - - if (is_const) { - if (is_type_array(t)) { - // OKay - } else if (is_type_slice(t)) { - // Okay - } else if (is_type_enumerated_array(t)) { - // Okay - } else if (is_type_string(t)) { - // Okay - } else if (is_type_relative_slice(t)) { - // Okay - } else if (is_type_matrix(t)) { - // Okay - } else { - valid = false; - } - } - - if (!valid) { - gbString str = expr_to_string(o->expr); - gbString type_str = type_to_string(o->type); - defer (gb_string_free(str)); - defer (gb_string_free(type_str)); - if (is_const) { - error(o->expr, "Cannot index constant '%s' of type '%s'", str, type_str); - } else { - error(o->expr, "Cannot index '%s' of type '%s'", str, type_str); - } - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - if (ie->index == nullptr) { - gbString str = expr_to_string(o->expr); - error(o->expr, "Missing index for '%s'", str); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - Type *index_type_hint = nullptr; - if (is_type_enumerated_array(t)) { - Type *bt = base_type(t); - GB_ASSERT(bt->kind == Type_EnumeratedArray); - index_type_hint = bt->EnumeratedArray.index; - } - - i64 index = 0; - bool ok = check_index_value(c, t, false, ie->index, max_count, &index, index_type_hint); - if (is_const) { - if (index < 0) { - gbString str = expr_to_string(o->expr); - error(o->expr, "Cannot index a constant '%s'", str); - error_line("\tSuggestion: store the constant into a variable in order to index it with a variable index\n"); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } else if (ok) { - ExactValue value = type_and_value_of_expr(ie->expr).value; - o->mode = Addressing_Constant; - bool success = false; - bool finish = false; - o->value = get_constant_field_single(c, value, cast(i32)index, &success, &finish); - if (!success) { - gbString str = expr_to_string(o->expr); - error(o->expr, "Cannot index a constant '%s' with index %lld", str, cast(long long)index); - error_line("\tSuggestion: store the constant into a variable in order to index it with a variable index\n"); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - } - } - - if (type_hint != nullptr && is_type_matrix(t)) { - // TODO(bill): allow matrix columns to be assignable to other types which are the same internally - // if a type hint exists - } - + kind = check_index_expr(c, o, node, type_hint); case_end; case_ast_node(se, SliceExpr, node); - check_expr(c, o, se->expr); - node->viral_state_flags |= se->expr->viral_state_flags; - - if (o->mode == Addressing_Invalid) { - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - bool valid = false; - i64 max_count = -1; - Type *t = base_type(type_deref(o->type)); - switch (t->kind) { - case Type_Basic: - if (t->Basic.kind == Basic_string || t->Basic.kind == Basic_UntypedString) { - valid = true; - if (o->mode == Addressing_Constant) { - max_count = o->value.value_string.len; - } - o->type = type_deref(o->type); - } - break; - - case Type_Array: - valid = true; - max_count = t->Array.count; - if (o->mode != Addressing_Variable && !is_type_pointer(o->type)) { - gbString str = expr_to_string(node); - error(node, "Cannot slice array '%s', value is not addressable", str); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - o->type = alloc_type_slice(t->Array.elem); - break; - - case Type_MultiPointer: - valid = true; - o->type = type_deref(o->type); - break; - - case Type_Slice: - valid = true; - o->type = type_deref(o->type); - break; - - case Type_DynamicArray: - valid = true; - o->type = alloc_type_slice(t->DynamicArray.elem); - break; - - case Type_Struct: - if (is_type_soa_struct(t)) { - valid = true; - o->type = make_soa_struct_slice(c, nullptr, nullptr, t->Struct.soa_elem); - } - break; - - case Type_RelativeSlice: - valid = true; - o->type = t->RelativeSlice.slice_type; - if (o->mode != Addressing_Variable) { - gbString str = expr_to_string(node); - error(node, "Cannot relative slice '%s', value is not addressable", str); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - break; - } - - if (!valid) { - gbString str = expr_to_string(o->expr); - gbString type_str = type_to_string(o->type); - error(o->expr, "Cannot slice '%s' of type '%s'", str, type_str); - gb_string_free(type_str); - gb_string_free(str); - o->mode = Addressing_Invalid; - o->expr = node; - return kind; - } - - if (se->low == nullptr && se->high != nullptr) { - // It is okay to continue as it will assume the 1st index is zero - } - - i64 indices[2] = {}; - Ast *nodes[2] = {se->low, se->high}; - for (isize i = 0; i < gb_count_of(nodes); i++) { - i64 index = max_count; - if (nodes[i] != nullptr) { - i64 capacity = -1; - if (max_count >= 0) { - capacity = max_count; - } - i64 j = 0; - if (check_index_value(c, t, true, nodes[i], capacity, &j)) { - index = j; - } - - node->viral_state_flags |= nodes[i]->viral_state_flags; - } else if (i == 0) { - index = 0; - } - indices[i] = index; - } - - for (isize i = 0; i < gb_count_of(indices); i++) { - i64 a = indices[i]; - for (isize j = i+1; j < gb_count_of(indices); j++) { - i64 b = indices[j]; - if (a > b && b >= 0) { - error(se->close, "Invalid slice indices: [%td > %td]", a, b); - } - } - } - - if (max_count < 0) { - if (o->mode == Addressing_Constant) { - gbString s = expr_to_string(se->expr); - error(se->expr, "Cannot slice constant value '%s'", s); - gb_string_free(s); - } - } - - if (t->kind == Type_MultiPointer && se->high != nullptr) { - /* - x[:] -> [^]T - x[i:] -> [^]T - x[:n] -> []T - x[i:n] -> []T - */ - o->type = alloc_type_slice(t->MultiPointer.elem); - } - - o->mode = Addressing_Value; - - if (is_type_string(t) && max_count >= 0) { - bool all_constant = true; - for (isize i = 0; i < gb_count_of(nodes); i++) { - if (nodes[i] != nullptr) { - TypeAndValue tav = type_and_value_of_expr(nodes[i]); - if (tav.mode != Addressing_Constant) { - all_constant = false; - break; - } - } - } - if (!all_constant) { - gbString str = expr_to_string(o->expr); - error(o->expr, "Cannot slice '%s' with non-constant indices", str); - error_line("\tSuggestion: store the constant into a variable in order to index it with a variable index\n"); - gb_string_free(str); - o->mode = Addressing_Value; // NOTE(bill): Keep subsequent values going without erring - o->expr = node; - return kind; - } - - String s = {}; - if (o->value.kind == ExactValue_String) { - s = o->value.value_string; - } - - o->mode = Addressing_Constant; - o->type = t; - o->value = exact_value_string(substring(s, cast(isize)indices[0], cast(isize)indices[1])); - } - + kind = check_slice_expr(c, o, node, type_hint); case_end; case_ast_node(mie, MatrixIndexExpr, node); @@ -8940,6 +9309,8 @@ ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast *node, Type return kind; } + + ExprKind check_expr_base(CheckerContext *c, Operand *o, Ast *node, Type *type_hint) { ExprKind kind = check_expr_base_internal(c, o, node, type_hint); if (o->type != nullptr && core_type(o->type) == nullptr) { @@ -9114,18 +9485,7 @@ gbString string_append_string(gbString str, String string) { gbString string_append_token(gbString str, Token token) { - if (token.kind == Token_String) { - str = gb_string_append_rune(str, '"'); - } else if (token.kind == Token_Rune) { - str = gb_string_append_rune(str, '\''); - } str = string_append_string(str, token.string); - if (token.kind == Token_String) { - str = gb_string_append_rune(str, '"'); - } else if (token.kind == Token_Rune) { - str = gb_string_append_rune(str, '\''); - } - return str; } @@ -9352,6 +9712,13 @@ gbString write_expr_to_string(gbString str, Ast *node, bool shorthand) { str = gb_string_appendc(str, " = "); str = write_expr_to_string(str, fv->value, shorthand); case_end; + case_ast_node(fv, EnumFieldValue, node); + str = write_expr_to_string(str, fv->name, shorthand); + if (fv->value) { + str = gb_string_appendc(str, " = "); + str = write_expr_to_string(str, fv->value, shorthand); + } + case_end; case_ast_node(ht, HelperType, node); str = gb_string_appendc(str, "#type "); diff --git a/src/check_stmt.cpp b/src/check_stmt.cpp index 94b7561c7..7cae1893f 100644 --- a/src/check_stmt.cpp +++ b/src/check_stmt.cpp @@ -490,6 +490,14 @@ void check_stmt(CheckerContext *ctx, Ast *node, u32 flags) { out &= ~StateFlag_no_bounds_check; } + if (in & StateFlag_no_type_assert) { + out |= StateFlag_no_type_assert; + out &= ~StateFlag_type_assert; + } else if (in & StateFlag_type_assert) { + out |= StateFlag_type_assert; + out &= ~StateFlag_no_type_assert; + } + ctx->state_flags = out; } @@ -689,54 +697,6 @@ bool check_using_stmt_entity(CheckerContext *ctx, AstUsingStmt *us, Ast *expr, b return true; } - -struct TypeAndToken { - Type *type; - Token token; -}; - - -void add_constant_switch_case(CheckerContext *ctx, PtrMap *seen, Operand operand, bool use_expr = true) { - if (operand.mode != Addressing_Constant) { - return; - } - if (operand.value.kind == ExactValue_Invalid) { - return; - } - - uintptr key = hash_exact_value(operand.value); - TypeAndToken *found = map_get(seen, key); - if (found != nullptr) { - isize count = multi_map_count(seen, key); - TypeAndToken *taps = gb_alloc_array(temporary_allocator(), TypeAndToken, count); - - multi_map_get_all(seen, key, taps); - for (isize i = 0; i < count; i++) { - TypeAndToken tap = taps[i]; - if (!are_types_identical(operand.type, tap.type)) { - continue; - } - - TokenPos pos = tap.token.pos; - if (use_expr) { - gbString expr_str = expr_to_string(operand.expr); - error(operand.expr, - "Duplicate case '%s'\n" - "\tprevious case at %s", - expr_str, - token_pos_to_string(pos)); - gb_string_free(expr_str); - } else { - error(operand.expr, "Duplicate case found with previous case at %s", token_pos_to_string(pos)); - } - return; - } - } - - TypeAndToken tap = {operand.type, ast_token(operand.expr)}; - multi_map_insert(seen, key, tap); -} - void check_inline_range_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) { ast_node(irs, UnrollRangeStmt, node); check_open_scope(ctx, node); @@ -961,7 +921,7 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) { } } - PtrMap seen = {}; // NOTE(bill): Multimap, Key: ExactValue + SeenMap seen = {}; // NOTE(bill): Multimap, Key: ExactValue map_init(&seen, heap_allocator()); defer (map_destroy(&seen)); @@ -1001,9 +961,9 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) { TokenKind upper_op = Token_Invalid; switch (be->op.kind) { - case Token_Ellipsis: upper_op = Token_GtEq; break; - case Token_RangeFull: upper_op = Token_GtEq; break; - case Token_RangeHalf: upper_op = Token_Gt; break; + case Token_Ellipsis: upper_op = Token_LtEq; break; + case Token_RangeFull: upper_op = Token_LtEq; break; + case Token_RangeHalf: upper_op = Token_Lt; break; default: GB_PANIC("Invalid range operator"); break; } @@ -1024,45 +984,7 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) { Operand b1 = rhs; check_comparison(ctx, &a1, &b1, Token_LtEq); - if (is_type_enum(x.type)) { - // TODO(bill): Fix this logic so it's fast!!! - - i64 v0 = exact_value_to_i64(lhs.value); - i64 v1 = exact_value_to_i64(rhs.value); - Operand v = {}; - v.mode = Addressing_Constant; - v.type = x.type; - v.expr = x.expr; - - Type *bt = base_type(x.type); - GB_ASSERT(bt->kind == Type_Enum); - for (i64 vi = v0; vi <= v1; vi++) { - if (upper_op != Token_GtEq && vi == v1) { - break; - } - - bool found = false; - for_array(j, bt->Enum.fields) { - Entity *f = bt->Enum.fields[j]; - GB_ASSERT(f->kind == Entity_Constant); - - i64 fv = exact_value_to_i64(f->Constant.value); - if (fv == vi) { - found = true; - break; - } - } - if (found) { - v.value = exact_value_i64(vi); - add_constant_switch_case(ctx, &seen, v); - } - } - } else { - add_constant_switch_case(ctx, &seen, lhs); - if (upper_op == Token_GtEq) { - add_constant_switch_case(ctx, &seen, rhs); - } - } + add_to_seen_map(ctx, &seen, upper_op, x, lhs, rhs); if (is_type_string(x.type)) { // NOTE(bill): Force dependency for strings here @@ -1107,7 +1029,7 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) { continue; } update_untyped_expr_type(ctx, z.expr, x.type, !is_type_untyped(x.type)); - add_constant_switch_case(ctx, &seen, y); + add_to_seen_map(ctx, &seen, y); } } } @@ -1143,7 +1065,7 @@ void check_switch_stmt(CheckerContext *ctx, Ast *node, u32 mod_flags) { if (unhandled.count == 1) { error_no_newline(node, "Unhandled switch case: %.*s", LIT(unhandled[0]->token.string)); } else { - error_no_newline(node, "Unhandled switch cases: "); + error(node, "Unhandled switch cases:"); for_array(i, unhandled) { Entity *f = unhandled[i]; error_line("\t%.*s\n", LIT(f->token.string)); diff --git a/src/check_type.cpp b/src/check_type.cpp index a5a757f3e..6d3e32466 100644 --- a/src/check_type.cpp +++ b/src/check_type.cpp @@ -120,6 +120,8 @@ void check_struct_fields(CheckerContext *ctx, Ast *node, Slice *fields ast_node(p, Field, param); Ast *type_expr = p->type; Type *type = nullptr; + CommentGroup *docs = p->docs; + CommentGroup *comment = p->comment; if (type_expr != nullptr) { type = check_type_expr(ctx, type_expr, nullptr); @@ -156,6 +158,14 @@ void check_struct_fields(CheckerContext *ctx, Ast *node, Slice *fields Entity *field = alloc_entity_field(ctx->scope, name_token, type, is_using, field_src_index); add_entity(ctx, ctx->scope, name, field); field->Variable.field_group_index = field_group_index; + + if (j == 0) { + field->Variable.docs = docs; + } + if (j+1 == p->names.count) { + field->Variable.comment = comment; + } + array_add(&fields_array, field); String tag = p->tag.string; if (tag.len != 0 && !unquote_string(permanent_allocator(), &tag, 0, tag.text[0] == '`')) { @@ -722,20 +732,19 @@ void check_enum_type(CheckerContext *ctx, Type *enum_type, Type *named_type, Ast Ast *ident = nullptr; Ast *init = nullptr; u32 entity_flags = 0; - if (field->kind == Ast_FieldValue) { - ast_node(fv, FieldValue, field); - if (fv->field == nullptr || fv->field->kind != Ast_Ident) { - error(field, "An enum field's name must be an identifier"); - continue; - } - ident = fv->field; - init = fv->value; - } else if (field->kind == Ast_Ident) { - ident = field; - } else { + if (field->kind != Ast_EnumFieldValue) { error(field, "An enum field's name must be an identifier"); continue; } + ident = field->EnumFieldValue.name; + init = field->EnumFieldValue.value; + if (ident == nullptr || ident->kind != Ast_Ident) { + error(field, "An enum field's name must be an identifier"); + continue; + } + CommentGroup *docs = field->EnumFieldValue.docs; + CommentGroup *comment = field->EnumFieldValue.comment; + String name = ident->Ident.token.string; if (init != nullptr) { @@ -793,6 +802,8 @@ void check_enum_type(CheckerContext *ctx, Type *enum_type, Type *named_type, Ast e->flags |= EntityFlag_Visited; e->state = EntityState_Resolved; e->Constant.flags |= entity_flags; + e->Constant.docs = docs; + e->Constant.comment = comment; if (scope_lookup_current(ctx->scope, name) != nullptr) { error(ident, "'%.*s' is already declared in this enumeration", LIT(name)); @@ -2702,29 +2713,30 @@ bool check_type_internal(CheckerContext *ctx, Ast *e, Type **type, Type *named_t Type *t = alloc_type_enumerated_array(elem, index, bt->Enum.min_value, bt->Enum.max_value, Token_Invalid); - bool is_partial = false; + bool is_sparse = false; if (at->tag != nullptr) { GB_ASSERT(at->tag->kind == Ast_BasicDirective); String name = at->tag->BasicDirective.name.string; - if (name == "partial") { - is_partial = true; + if (name == "sparse") { + is_sparse = true; } else { error(at->tag, "Invalid tag applied to an enumerated array, got #%.*s", LIT(name)); } } - if (!is_partial && t->EnumeratedArray.count > bt->Enum.fields.count) { + if (!is_sparse && t->EnumeratedArray.count > bt->Enum.fields.count) { error(e, "Non-contiguous enumeration used as an index in an enumerated array"); long long ea_count = cast(long long)t->EnumeratedArray.count; long long enum_count = cast(long long)bt->Enum.fields.count; error_line("\tenumerated array length: %lld\n", ea_count); error_line("\tenum field count: %lld\n", enum_count); - error_line("\tSuggestion: prepend #partial to the enumerated array to allow for non-named elements\n"); + error_line("\tSuggestion: prepend #sparse to the enumerated array to allow for non-contiguous elements\n"); if (2*enum_count < ea_count) { error_line("\tWarning: the number of named elements is much smaller than the length of the array, are you sure this is what you want?\n"); - error_line("\t this warning will be removed if #partial is applied\n"); + error_line("\t this warning will be removed if #sparse is applied\n"); } } + t->EnumeratedArray.is_sparse = is_sparse; *type = t; diff --git a/src/checker.cpp b/src/checker.cpp index 055d0d356..e1c827529 100644 --- a/src/checker.cpp +++ b/src/checker.cpp @@ -504,6 +504,7 @@ enum VettedEntityKind { VettedEntity_Unused, VettedEntity_Shadowed, + VettedEntity_Shadowed_And_Unused, }; struct VettedEntity { VettedEntityKind kind; @@ -625,12 +626,18 @@ void check_scope_usage(Checker *c, Scope *scope) { MUTEX_GUARD_BLOCK(scope->mutex) for_array(i, scope->elements.entries) { Entity *e = scope->elements.entries[i].value; if (e == nullptr) continue; - VettedEntity ve = {}; - if (vet_unused && check_vet_unused(c, e, &ve)) { - array_add(&vetted_entities, ve); - } - if (vet_shadowing && check_vet_shadowing(c, e, &ve)) { - array_add(&vetted_entities, ve); + VettedEntity ve_unused = {}; + VettedEntity ve_shadowed = {}; + bool is_unused = vet_unused && check_vet_unused(c, e, &ve_unused); + bool is_shadowed = vet_shadowing && check_vet_shadowing(c, e, &ve_shadowed); + if (is_unused && is_shadowed) { + VettedEntity ve_both = ve_shadowed; + ve_both.kind = VettedEntity_Shadowed_And_Unused; + array_add(&vetted_entities, ve_both); + } else if (is_unused) { + array_add(&vetted_entities, ve_unused); + } else if (is_shadowed) { + array_add(&vetted_entities, ve_shadowed); } } @@ -642,16 +649,18 @@ void check_scope_usage(Checker *c, Scope *scope) { Entity *other = ve.other; String name = e->token.string; - if (build_context.vet) { + if (ve.kind == VettedEntity_Shadowed_And_Unused) { + error(e->token, "'%.*s' declared but not used, possibly shadows declaration at line %d", LIT(name), other->token.pos.line); + } else if (build_context.vet) { switch (ve.kind) { case VettedEntity_Unused: error(e->token, "'%.*s' declared but not used", LIT(name)); break; case VettedEntity_Shadowed: if (e->flags&EntityFlag_Using) { - error(e->token, "Declaration of '%.*s' from 'using' shadows declaration at line %lld", LIT(name), cast(long long)other->token.pos.line); + error(e->token, "Declaration of '%.*s' from 'using' shadows declaration at line %d", LIT(name), other->token.pos.line); } else { - error(e->token, "Declaration of '%.*s' shadows declaration at line %lld", LIT(name), cast(long long)other->token.pos.line); + error(e->token, "Declaration of '%.*s' shadows declaration at line %d", LIT(name), other->token.pos.line); } break; default: @@ -688,12 +697,17 @@ void add_dependency(CheckerInfo *info, DeclInfo *d, Entity *e) { ptr_set_add(&d->deps, e); mutex_unlock(&info->deps_mutex); } -void add_type_info_dependency(DeclInfo *d, Type *type) { +void add_type_info_dependency(CheckerInfo *info, DeclInfo *d, Type *type, bool require_mutex) { if (d == nullptr) { return; } - // NOTE(bill): no mutex is required here because the only procedure calling it is wrapped in a mutex already + if (require_mutex) { + mutex_lock(&info->deps_mutex); + } ptr_set_add(&d->type_info_deps, type); + if (require_mutex) { + mutex_unlock(&info->deps_mutex); + } } AstPackage *get_core_package(CheckerInfo *info, String name) { @@ -919,6 +933,16 @@ void init_universal(void) { add_global_string_constant("ODIN_ENDIAN_STRING", target_endian_names[target_endians[bc->metrics.arch]]); } + { + GlobalEnumValue values[ErrorPosStyle_COUNT] = { + {"Default", ErrorPosStyle_Default}, + {"Unix", ErrorPosStyle_Unix}, + }; + + auto fields = add_global_enum_type(str_lit("Odin_Error_Pos_Style_Type"), values, gb_count_of(values)); + add_global_enum_constant(fields, "ODIN_ERROR_POS_STYLE", build_context.ODIN_ERROR_POS_STYLE); + } + add_global_bool_constant("ODIN_DEBUG", bc->ODIN_DEBUG); add_global_bool_constant("ODIN_DISABLE_ASSERT", bc->ODIN_DISABLE_ASSERT); @@ -1620,7 +1644,7 @@ void add_type_info_type_internal(CheckerContext *c, Type *t) { return; } - add_type_info_dependency(c->decl, t); + add_type_info_dependency(c->info, c->decl, t, false); auto found = map_get(&c->info->type_info_map, t); if (found != nullptr) { @@ -1749,6 +1773,7 @@ void add_type_info_type_internal(CheckerContext *c, Type *t) { } else { add_type_info_type_internal(c, t_type_info_ptr); } + add_type_info_type_internal(c, bt->Union.polymorphic_params); for_array(i, bt->Union.variants) { add_type_info_type_internal(c, bt->Union.variants[i]); } @@ -1772,6 +1797,7 @@ void add_type_info_type_internal(CheckerContext *c, Type *t) { } } } + add_type_info_type_internal(c, bt->Struct.polymorphic_params); for_array(i, bt->Struct.fields) { Entity *f = bt->Struct.fields[i]; add_type_info_type_internal(c, f->type); @@ -1965,6 +1991,7 @@ void add_min_dep_type_info(Checker *c, Type *t) { } else { add_min_dep_type_info(c, t_type_info_ptr); } + add_min_dep_type_info(c, bt->Union.polymorphic_params); for_array(i, bt->Union.variants) { add_min_dep_type_info(c, bt->Union.variants[i]); } @@ -1988,6 +2015,7 @@ void add_min_dep_type_info(Checker *c, Type *t) { } } } + add_min_dep_type_info(c, bt->Struct.polymorphic_params); for_array(i, bt->Struct.fields) { Entity *f = bt->Struct.fields[i]; add_min_dep_type_info(c, f->type); @@ -3479,9 +3507,12 @@ void check_collect_value_decl(CheckerContext *c, Ast *decl) { if (entity_visibility_kind == EntityVisiblity_Public && (c->scope->flags&ScopeFlag_File) && - c->scope->file && - (c->scope->file->flags & AstFile_IsPrivate)) { - entity_visibility_kind = EntityVisiblity_PrivateToPackage; + c->scope->file) { + if (c->scope->file->flags & AstFile_IsPrivateFile) { + entity_visibility_kind = EntityVisiblity_PrivateToFile; + } else if (c->scope->file->flags & AstFile_IsPrivatePkg) { + entity_visibility_kind = EntityVisiblity_PrivateToPackage; + } } if (entity_visibility_kind != EntityVisiblity_Public && !(c->scope->flags&ScopeFlag_File)) { @@ -3572,9 +3603,6 @@ void check_collect_value_decl(CheckerContext *c, Ast *decl) { if (is_ast_type(init)) { e = alloc_entity_type_name(d->scope, token, nullptr); - // if (vd->type != nullptr) { - // error(name, "A type declaration cannot have an type parameter"); - // } } else if (init->kind == Ast_ProcLit) { if (c->scope->flags&ScopeFlag_Type) { error(name, "Procedure declarations are not allowed within a struct"); @@ -3677,6 +3705,59 @@ void check_add_foreign_block_decl(CheckerContext *ctx, Ast *decl) { check_collect_entities(&c, block->stmts); } +bool correct_single_type_alias(CheckerContext *c, Entity *e) { + if (e->kind == Entity_Constant) { + DeclInfo *d = e->decl_info; + if (d != nullptr && d->init_expr != nullptr) { + Ast *init = d->init_expr; + Entity *alias_of = check_entity_from_ident_or_selector(c, init, true); + if (alias_of != nullptr && alias_of->kind == Entity_TypeName) { + e->kind = Entity_TypeName; + return true; + } + } + } + return false; +} + +bool correct_type_alias_in_scope_backwards(CheckerContext *c, Scope *s) { + isize n = s->elements.entries.count; + bool correction = false; + for (isize i = n-1; i >= 0; i--) { + correction |= correct_single_type_alias(c, s->elements.entries[i].value); + } + return correction; +} +bool correct_type_alias_in_scope_forwards(CheckerContext *c, Scope *s) { + isize n = s->elements.entries.count; + bool correction = false; + for (isize i = 0; i < n; i++) { + correction |= correct_single_type_alias(c, s->elements.entries[i].value); + } + return correction; +} + + +void correct_type_aliases_in_scope(CheckerContext *c, Scope *s) { + // NOTE(bill, 2022-02-04): This is used to solve the problem caused by type aliases + // of type aliases being "confused" as constants + // + // A :: C + // B :: A + // C :: struct {b: ^B} + // + // See @TypeAliasingProblem for more information + for (;;) { + bool corrections = false; + corrections |= correct_type_alias_in_scope_backwards(c, s); + corrections |= correct_type_alias_in_scope_forwards(c, s); + if (!corrections) { + return; + } + } +} + + // NOTE(bill): If file_scopes == nullptr, this will act like a local scope void check_collect_entities(CheckerContext *c, Slice const &nodes) { AstFile *curr_file = nullptr; @@ -3748,6 +3829,7 @@ void check_collect_entities(CheckerContext *c, Slice const &nodes) { } } + // correct_type_aliases(c); // NOTE(bill): 'when' stmts need to be handled after the other as the condition may refer to something // declared after this stmt in source @@ -4393,10 +4475,11 @@ bool collect_file_decls(CheckerContext *ctx, Slice const &decls) { for_array(i, decls) { if (collect_file_decl(ctx, decls[i])) { + correct_type_aliases_in_scope(ctx, ctx->scope); return true; } } - + correct_type_aliases_in_scope(ctx, ctx->scope); return false; } @@ -4666,6 +4749,15 @@ void check_import_entities(Checker *c) { } add_untyped_expressions(ctx.info, &untyped); } + + for_array(i, pkg->files) { + AstFile *f = pkg->files[i]; + reset_checker_context(&ctx, f, &untyped); + ctx.collect_delayed_decls = false; + + correct_type_aliases_in_scope(&ctx, pkg->scope); + } + for_array(i, pkg->files) { AstFile *f = pkg->files[i]; reset_checker_context(&ctx, f, &untyped); @@ -4887,6 +4979,9 @@ bool check_proc_info(Checker *c, ProcInfo *pi, UntypedExprInfoMap *untyped, Proc bool bounds_check = (pi->tags & ProcTag_bounds_check) != 0; bool no_bounds_check = (pi->tags & ProcTag_no_bounds_check) != 0; + bool type_assert = (pi->tags & ProcTag_type_assert) != 0; + bool no_type_assert = (pi->tags & ProcTag_no_type_assert) != 0; + if (bounds_check) { ctx.state_flags |= StateFlag_bounds_check; ctx.state_flags &= ~StateFlag_no_bounds_check; @@ -4894,6 +4989,15 @@ bool check_proc_info(Checker *c, ProcInfo *pi, UntypedExprInfoMap *untyped, Proc ctx.state_flags |= StateFlag_no_bounds_check; ctx.state_flags &= ~StateFlag_bounds_check; } + + if (type_assert) { + ctx.state_flags |= StateFlag_type_assert; + ctx.state_flags &= ~StateFlag_no_type_assert; + } else if (no_type_assert) { + ctx.state_flags |= StateFlag_no_type_assert; + ctx.state_flags &= ~StateFlag_type_assert; + } + if (pi->body != nullptr && e != nullptr) { GB_ASSERT((e->flags & EntityFlag_ProcBodyChecked) == 0); } @@ -5307,12 +5411,18 @@ void check_unique_package_names(Checker *c) { string_map_set(&pkgs, key, pkg); continue; } + auto *curr = pkg->files[0]->pkg_decl; + auto *prev = (*found)->files[0]->pkg_decl; + if (curr == prev) { + // NOTE(bill): A false positive was found, ignore it + continue; + } - error(pkg->files[0]->pkg_decl, "Duplicate declaration of 'package %.*s'", LIT(name)); + error(curr, "Duplicate declaration of 'package %.*s'", LIT(name)); error_line("\tA package name must be unique\n" "\tThere is no relation between a package name and the directory that contains it, so they can be completely different\n" "\tA package name is required for link name prefixing to have a consistent ABI\n"); - error((*found)->files[0]->pkg_decl, "found at previous location"); + error(prev, "found at previous location"); } } @@ -5504,9 +5614,6 @@ void check_parsed_files(Checker *c) { TIME_SECTION("calculate global init order"); calculate_global_init_order(c); - TIME_SECTION("generate minimum dependency set"); - generate_minimum_dependency_set(c, c->info.entry_point); - TIME_SECTION("check test procedures"); check_test_procedures(c); @@ -5517,6 +5624,9 @@ void check_parsed_files(Checker *c) { add_type_info_for_type_definitions(c); check_merge_queues_into_arrays(c); + TIME_SECTION("generate minimum dependency set"); + generate_minimum_dependency_set(c, c->info.entry_point); + TIME_SECTION("check entry point"); if (build_context.build_mode == BuildMode_Executable && !build_context.no_entry_point && build_context.command_kind != Command_test) { Scope *s = c->info.init_scope; diff --git a/src/checker_builtin_procs.hpp b/src/checker_builtin_procs.hpp index e8f5174c0..d833a055f 100644 --- a/src/checker_builtin_procs.hpp +++ b/src/checker_builtin_procs.hpp @@ -213,8 +213,6 @@ BuiltinProc__type_simple_boolean_begin, BuiltinProc_type_is_union, BuiltinProc_type_is_enum, BuiltinProc_type_is_proc, - BuiltinProc_type_is_bit_field, - BuiltinProc_type_is_bit_field_value, BuiltinProc_type_is_bit_set, BuiltinProc_type_is_simd_vector, BuiltinProc_type_is_matrix, @@ -466,8 +464,6 @@ gb_global BuiltinProc builtin_procs[BuiltinProc_COUNT] = { {STR_LIT("type_is_union"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, {STR_LIT("type_is_enum"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, {STR_LIT("type_is_proc"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, - {STR_LIT("type_is_bit_field"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, - {STR_LIT("type_is_bit_field_value"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, {STR_LIT("type_is_bit_set"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, {STR_LIT("type_is_simd_vector"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, {STR_LIT("type_is_matrix"), 1, false, Expr_Expr, BuiltinProcPkg_intrinsics}, diff --git a/src/docs_format.cpp b/src/docs_format.cpp index f47fd0945..7ce93d2bf 100644 --- a/src/docs_format.cpp +++ b/src/docs_format.cpp @@ -15,7 +15,7 @@ struct OdinDocVersionType { #define OdinDocVersionType_Major 0 #define OdinDocVersionType_Minor 2 -#define OdinDocVersionType_Patch 3 +#define OdinDocVersionType_Patch 4 struct OdinDocHeaderBase { u8 magic[8]; @@ -154,6 +154,7 @@ enum OdinDocEntityKind : u32 { OdinDocEntity_ProcGroup = 5, OdinDocEntity_ImportName = 6, OdinDocEntity_LibraryName = 7, + OdinDocEntity_Builtin = 8, }; enum OdinDocEntityFlag : u64 { @@ -170,6 +171,9 @@ enum OdinDocEntityFlag : u64 { OdinDocEntityFlag_Type_Alias = 1ull<<20, + OdinDocEntityFlag_Builtin_Pkg_Builtin = 1ull<<30, + OdinDocEntityFlag_Builtin_Pkg_Intrinsics = 1ull<<31, + OdinDocEntityFlag_Var_Thread_Local = 1ull<<40, OdinDocEntityFlag_Var_Static = 1ull<<41, @@ -185,8 +189,8 @@ struct OdinDocEntity { OdinDocTypeIndex type; OdinDocString init_string; u32 reserved_for_init; - OdinDocString comment; - OdinDocString docs; + OdinDocString comment; // line comment + OdinDocString docs; // preceding comment i32 field_group_index; OdinDocEntityIndex foreign_library; OdinDocString link_name; @@ -201,15 +205,21 @@ enum OdinDocPkgFlags : u32 { OdinDocPkgFlag_Init = 1<<2, }; +struct OdinDocScopeEntry { + OdinDocString name; + OdinDocEntityIndex entity; +}; + struct OdinDocPkg { OdinDocString fullpath; OdinDocString name; u32 flags; OdinDocString docs; - OdinDocArray files; - OdinDocArray entities; + OdinDocArray files; + OdinDocArray entries; }; + struct OdinDocHeader { OdinDocHeaderBase base; diff --git a/src/docs_writer.cpp b/src/docs_writer.cpp index 0a990cc37..0474ce8ff 100644 --- a/src/docs_writer.cpp +++ b/src/docs_writer.cpp @@ -811,8 +811,17 @@ OdinDocEntityIndex odin_doc_add_entity(OdinDocWriter *w, Entity *e) { comment = e->decl_info->comment; docs = e->decl_info->docs; } + if (e->kind == Entity_Variable) { + if (!comment) { comment = e->Variable.comment; } + if (!docs) { docs = e->Variable.docs; } + } else if (e->kind == Entity_Constant) { + if (!comment) { comment = e->Constant.comment; } + if (!docs) { docs = e->Constant.docs; } + } + String name = e->token.string; String link_name = {}; + TokenPos pos = e->token.pos; OdinDocEntityKind kind = OdinDocEntity_Invalid; u64 flags = 0; @@ -827,6 +836,7 @@ OdinDocEntityIndex odin_doc_add_entity(OdinDocWriter *w, Entity *e) { case Entity_ProcGroup: kind = OdinDocEntity_ProcGroup; break; case Entity_ImportName: kind = OdinDocEntity_ImportName; break; case Entity_LibraryName: kind = OdinDocEntity_LibraryName; break; + case Entity_Builtin: kind = OdinDocEntity_Builtin; break; } switch (e->kind) { @@ -856,6 +866,23 @@ OdinDocEntityIndex odin_doc_add_entity(OdinDocWriter *w, Entity *e) { if (e->Procedure.is_export) { flags |= OdinDocEntityFlag_Export; } link_name = e->Procedure.link_name; break; + case Entity_Builtin: + { + auto bp = builtin_procs[e->Builtin.id]; + pos = {}; + name = bp.name; + switch (bp.pkg) { + case BuiltinProcPkg_builtin: + flags |= OdinDocEntityFlag_Builtin_Pkg_Builtin; + break; + case BuiltinProcPkg_intrinsics: + flags |= OdinDocEntityFlag_Builtin_Pkg_Intrinsics; + break; + default: + GB_PANIC("Unhandled BuiltinProcPkg"); + } + } + break; } if (e->flags & EntityFlag_Param) { @@ -891,8 +918,8 @@ OdinDocEntityIndex odin_doc_add_entity(OdinDocWriter *w, Entity *e) { doc_entity.kind = kind; doc_entity.flags = flags; - doc_entity.pos = odin_doc_token_pos_cast(w, e->token.pos); - doc_entity.name = odin_doc_write_string(w, e->token.string); + doc_entity.pos = odin_doc_token_pos_cast(w, pos); + doc_entity.name = odin_doc_write_string(w, name); doc_entity.type = 0; // Set later doc_entity.init_string = init_string; doc_entity.comment = odin_doc_comment_group_string(w, comment); @@ -969,7 +996,7 @@ void odin_doc_update_entities(OdinDocWriter *w) { -OdinDocArray odin_doc_add_pkg_entities(OdinDocWriter *w, AstPackage *pkg) { +OdinDocArray odin_doc_add_pkg_entries(OdinDocWriter *w, AstPackage *pkg) { if (pkg->scope == nullptr) { return {}; } @@ -977,14 +1004,14 @@ OdinDocArray odin_doc_add_pkg_entities(OdinDocWriter *w, Ast return {}; } - auto entities = array_make(heap_allocator(), 0, pkg->scope->elements.entries.count); - defer (array_free(&entities)); + auto entries = array_make(heap_allocator(), 0, w->entity_cache.entries.count); + defer (array_free(&entries)); for_array(i, pkg->scope->elements.entries) { + String name = pkg->scope->elements.entries[i].key.string; Entity *e = pkg->scope->elements.entries[i].value; switch (e->kind) { case Entity_Invalid: - case Entity_Builtin: case Entity_Nil: case Entity_Label: continue; @@ -995,34 +1022,27 @@ OdinDocArray odin_doc_add_pkg_entities(OdinDocWriter *w, Ast case Entity_ProcGroup: case Entity_ImportName: case Entity_LibraryName: + case Entity_Builtin: // Fine break; } - array_add(&entities, e); - } - gb_sort_array(entities.data, entities.count, cmp_entities_for_printing); - - auto entity_indices = array_make(heap_allocator(), 0, w->entity_cache.entries.count); - defer (array_free(&entity_indices)); - - for_array(i, entities) { - Entity *e = entities[i]; if (e->pkg != pkg) { continue; } - if (!is_entity_exported(e)) { + if (!is_entity_exported(e, true)) { continue; } if (e->token.string.len == 0) { continue; } - OdinDocEntityIndex doc_entity_index = 0; - doc_entity_index = odin_doc_add_entity(w, e); - array_add(&entity_indices, doc_entity_index); + OdinDocScopeEntry entry = {}; + entry.name = odin_doc_write_string(w, name); + entry.entity = odin_doc_add_entity(w, e); + array_add(&entries, entry); } - return odin_write_slice(w, entity_indices.data, entity_indices.count); + return odin_write_slice(w, entries.data, entries.count); } @@ -1090,7 +1110,7 @@ void odin_doc_write_docs(OdinDocWriter *w) { } doc_pkg.files = odin_write_slice(w, file_indices.data, file_indices.count); - doc_pkg.entities = odin_doc_add_pkg_entities(w, pkg); + doc_pkg.entries = odin_doc_add_pkg_entries(w, pkg); if (dst) { *dst = doc_pkg; diff --git a/src/entity.cpp b/src/entity.cpp index 05ee9a33e..8327a517e 100644 --- a/src/entity.cpp +++ b/src/entity.cpp @@ -161,6 +161,8 @@ struct Entity { ParameterValue param_value; u32 flags; i32 field_group_index; + CommentGroup *docs; + CommentGroup *comment; } Constant; struct { Ast *init_expr; // only used for some variables within procedure bodies @@ -175,6 +177,8 @@ struct Entity { String link_name; String link_prefix; String link_section; + CommentGroup *docs; + CommentGroup *comment; bool is_foreign; bool is_export; } Variable; @@ -241,7 +245,7 @@ bool is_entity_exported(Entity *e, bool allow_builtin = false) { if (e->flags & EntityFlag_NotExported) { return false; } - if (e->file != nullptr && (e->file->flags & AstFile_IsPrivate) != 0) { + if (e->file != nullptr && (e->file->flags & (AstFile_IsPrivatePkg|AstFile_IsPrivateFile)) != 0) { return false; } diff --git a/src/error.cpp b/src/error.cpp index 1496b4775..faf4d11fb 100644 --- a/src/error.cpp +++ b/src/error.cpp @@ -33,6 +33,10 @@ void init_global_error_collector(void) { } +// temporary +// defined in build_settings.cpp +char *token_pos_to_string(TokenPos const &pos); + bool set_file_path_string(i32 index, String const &path) { bool ok = false; GB_ASSERT(index >= 0); @@ -403,6 +407,7 @@ void compiler_error(char const *fmt, ...) { gb_printf_err("Internal Compiler Error: %s\n", gb_bprintf_va(fmt, va)); va_end(va); + GB_DEBUG_TRAP(); gb_exit(1); } diff --git a/src/llvm_backend.hpp b/src/llvm_backend.hpp index 49f675a49..d7093bc63 100644 --- a/src/llvm_backend.hpp +++ b/src/llvm_backend.hpp @@ -232,6 +232,7 @@ struct lbTargetList { enum lbProcedureFlag : u32 { lbProcedureFlag_WithoutMemcpyPass = 1<<0, + lbProcedureFlag_DebugAllocaCopy = 1<<1, }; struct lbCopyElisionHint { diff --git a/src/llvm_backend_const.cpp b/src/llvm_backend_const.cpp index 5862a7add..8f17a1cfb 100644 --- a/src/llvm_backend_const.cpp +++ b/src/llvm_backend_const.cpp @@ -115,8 +115,8 @@ LLVMValueRef llvm_const_cast(LLVMValueRef val, LLVMTypeRef dst) { lbValue lb_const_ptr_cast(lbModule *m, lbValue value, Type *t) { - GB_ASSERT(is_type_pointer(value.type)); - GB_ASSERT(is_type_pointer(t)); + GB_ASSERT(is_type_internally_pointer_like(value.type)); + GB_ASSERT(is_type_internally_pointer_like(t)); GB_ASSERT(lb_is_const(value)); lbValue res = {}; @@ -175,7 +175,7 @@ LLVMValueRef llvm_const_array(LLVMTypeRef elem_type, LLVMValueRef *values, isize } LLVMValueRef llvm_const_slice(lbModule *m, lbValue data, lbValue len) { - GB_ASSERT(is_type_pointer(data.type)); + GB_ASSERT(is_type_pointer(data.type) || is_type_multi_pointer(data.type)); GB_ASSERT(are_types_identical(len.type, t_int)); LLVMValueRef vals[2] = { data.value, @@ -568,7 +568,7 @@ lbValue lb_const_value(lbModule *m, Type *type, ExactValue value, bool allow_loc } case ExactValue_Integer: - if (is_type_pointer(type)) { + if (is_type_pointer(type) || is_type_multi_pointer(type)) { LLVMTypeRef t = lb_type(m, original_type); LLVMValueRef i = lb_big_int_to_llvm(m, t_uintptr, &value.value_integer); res.value = LLVMConstIntToPtr(i, t); diff --git a/src/llvm_backend_debug.cpp b/src/llvm_backend_debug.cpp index 7a2b00fe9..f60096aad 100644 --- a/src/llvm_backend_debug.cpp +++ b/src/llvm_backend_debug.cpp @@ -965,6 +965,77 @@ void lb_add_debug_local_variable(lbProcedure *p, LLVMValueRef ptr, Type *type, T LLVMDIBuilderInsertDeclareBefore(m->debug_builder, storage, var_info, llvm_expr, llvm_debug_loc, instr); } + +void lb_add_debug_param_variable(lbProcedure *p, LLVMValueRef ptr, Type *type, Token const &token, unsigned arg_number) { + if (p->debug_info == nullptr) { + return; + } + if (type == nullptr) { + return; + } + if (type == t_invalid) { + return; + } + if (p->body == nullptr) { + return; + } + + lbModule *m = p->module; + String const &name = token.string; + if (name == "" || name == "_") { + return; + } + + if (lb_get_llvm_metadata(m, ptr) != nullptr) { + // Already been set + return; + } + + + AstFile *file = p->body->file(); + + LLVMMetadataRef llvm_scope = lb_get_current_debug_scope(p); + LLVMMetadataRef llvm_file = lb_get_llvm_metadata(m, file); + GB_ASSERT(llvm_scope != nullptr); + if (llvm_file == nullptr) { + llvm_file = LLVMDIScopeGetFile(llvm_scope); + } + + if (llvm_file == nullptr) { + return; + } + + LLVMDIFlags flags = LLVMDIFlagZero; + LLVMBool always_preserve = build_context.optimization_level == 0; + + LLVMMetadataRef debug_type = lb_debug_type(m, type); + + LLVMMetadataRef var_info = LLVMDIBuilderCreateParameterVariable( + m->debug_builder, llvm_scope, + cast(char const *)name.text, cast(size_t)name.len, + arg_number, + llvm_file, token.pos.line, + debug_type, + always_preserve, flags + ); + + LLVMValueRef storage = ptr; + LLVMValueRef instr = ptr; + LLVMBasicBlockRef block = p->decl_block->block; + LLVMMetadataRef llvm_debug_loc = lb_debug_location_from_token_pos(p, token.pos); + LLVMMetadataRef llvm_expr = LLVMDIBuilderCreateExpression(m->debug_builder, nullptr, 0); + lb_set_llvm_metadata(m, ptr, llvm_expr); + if (LLVMIsAAllocaInst(instr)) { + LLVMDIBuilderInsertDeclareBefore(m->debug_builder, storage, var_info, llvm_expr, llvm_debug_loc, instr); + } else { + // NOTE(bill, 2022-02-01): For parameter values, you must insert them at the end of the decl block + // The reason is that if the parameter is at index 0 and a pointer, there is not such things as an + // instruction "before" it. + LLVMDIBuilderInsertDeclareAtEnd(m->debug_builder, storage, var_info, llvm_expr, llvm_debug_loc, block); + } +} + + void lb_add_debug_context_variable(lbProcedure *p, lbAddr const &ctx) { if (!p->debug_info || !p->body) { return; diff --git a/src/llvm_backend_expr.cpp b/src/llvm_backend_expr.cpp index 1f0ed6434..29a86d116 100644 --- a/src/llvm_backend_expr.cpp +++ b/src/llvm_backend_expr.cpp @@ -580,6 +580,27 @@ LLVMValueRef lb_matrix_to_trimmed_vector(lbProcedure *p, lbValue m) { lbValue lb_emit_matrix_tranpose(lbProcedure *p, lbValue m, Type *type) { if (is_type_array(m.type)) { + i32 rank = type_math_rank(m.type); + if (rank == 2) { + lbAddr addr = lb_add_local_generated(p, type, false); + lbValue dst = addr.addr; + lbValue src = m; + i32 n = cast(i32)get_array_type_count(m.type); + i32 m = cast(i32)get_array_type_count(type); + // m.type == [n][m]T + // type == [m][n]T + + for (i32 j = 0; j < m; j++) { + lbValue dst_col = lb_emit_struct_ep(p, dst, j); + for (i32 i = 0; i < n; i++) { + lbValue dst_row = lb_emit_struct_ep(p, dst_col, i); + lbValue src_col = lb_emit_struct_ev(p, src, i); + lbValue src_row = lb_emit_struct_ev(p, src_col, j); + lb_emit_store(p, dst_row, src_row); + } + } + return lb_addr_load(p, addr); + } // no-op m.type = type; return m; @@ -1834,6 +1855,15 @@ lbValue lb_emit_conv(lbProcedure *p, lbValue value, Type *t) { return lb_addr_load(p, parent); } } + if (dst->Union.variants.count == 1) { + Type *vt = dst->Union.variants[0]; + if (internal_check_is_assignable_to(src, vt)) { + value = lb_emit_conv(p, value, vt); + lbAddr parent = lb_add_local_generated(p, t, true); + lb_emit_store_union_variant(p, parent.addr, value, vt); + return lb_addr_load(p, parent); + } + } } // NOTE(bill): This has to be done before 'Pointer <-> Pointer' as it's @@ -2768,28 +2798,30 @@ lbValue lb_build_unary_and(lbProcedure *p, Ast *expr) { Type *src_type = type_deref(v.type); Type *dst_type = type; - lbValue src_tag = {}; - lbValue dst_tag = {}; - if (is_type_union_maybe_pointer(src_type)) { - src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v); - dst_tag = lb_const_bool(p->module, t_bool, true); - } else { - src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v)); - dst_tag = lb_const_union_tag(p->module, src_type, dst_type); + + if ((p->state_flags & StateFlag_no_type_assert) == 0) { + lbValue src_tag = {}; + lbValue dst_tag = {}; + if (is_type_union_maybe_pointer(src_type)) { + src_tag = lb_emit_comp_against_nil(p, Token_NotEq, v); + dst_tag = lb_const_bool(p->module, t_bool, true); + } else { + src_tag = lb_emit_load(p, lb_emit_union_tag_ptr(p, v)); + dst_tag = lb_const_union_tag(p->module, src_type, dst_type); + } + lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag); + auto args = array_make(permanent_allocator(), 6); + args[0] = ok; + + args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id)); + args[2] = lb_const_int(p->module, t_i32, pos.line); + args[3] = lb_const_int(p->module, t_i32, pos.column); + + args[4] = lb_typeid(p->module, src_type); + args[5] = lb_typeid(p->module, dst_type); + lb_emit_runtime_call(p, "type_assertion_check", args); } - lbValue ok = lb_emit_comp(p, Token_CmpEq, src_tag, dst_tag); - auto args = array_make(permanent_allocator(), 6); - args[0] = ok; - - args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id)); - args[2] = lb_const_int(p->module, t_i32, pos.line); - args[3] = lb_const_int(p->module, t_i32, pos.column); - - args[4] = lb_typeid(p->module, src_type); - args[5] = lb_typeid(p->module, dst_type); - lb_emit_runtime_call(p, "type_assertion_check", args); - lbValue data_ptr = v; return lb_emit_conv(p, data_ptr, tv.type); } else if (is_type_any(t)) { @@ -2797,23 +2829,23 @@ lbValue lb_build_unary_and(lbProcedure *p, Ast *expr) { if (is_type_pointer(v.type)) { v = lb_emit_load(p, v); } - lbValue data_ptr = lb_emit_struct_ev(p, v, 0); - lbValue any_id = lb_emit_struct_ev(p, v, 1); - lbValue id = lb_typeid(p->module, type); + if ((p->state_flags & StateFlag_no_type_assert) == 0) { + lbValue any_id = lb_emit_struct_ev(p, v, 1); + lbValue id = lb_typeid(p->module, type); + lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id); + auto args = array_make(permanent_allocator(), 6); + args[0] = ok; - lbValue ok = lb_emit_comp(p, Token_CmpEq, any_id, id); - auto args = array_make(permanent_allocator(), 6); - args[0] = ok; + args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id)); + args[2] = lb_const_int(p->module, t_i32, pos.line); + args[3] = lb_const_int(p->module, t_i32, pos.column); - args[1] = lb_find_or_add_entity_string(p->module, get_file_path_string(pos.file_id)); - args[2] = lb_const_int(p->module, t_i32, pos.line); - args[3] = lb_const_int(p->module, t_i32, pos.column); - - args[4] = any_id; - args[5] = id; - lb_emit_runtime_call(p, "type_assertion_check", args); + args[4] = any_id; + args[5] = id; + lb_emit_runtime_call(p, "type_assertion_check", args); + } return lb_emit_conv(p, data_ptr, tv.type); } else { @@ -2843,6 +2875,14 @@ lbValue lb_build_expr(lbProcedure *p, Ast *expr) { out &= ~StateFlag_bounds_check; } + if (in & StateFlag_type_assert) { + out |= StateFlag_type_assert; + out &= ~StateFlag_no_type_assert; + } else if (in & StateFlag_no_type_assert) { + out |= StateFlag_no_type_assert; + out &= ~StateFlag_type_assert; + } + p->state_flags = out; } @@ -3460,7 +3500,8 @@ lbAddr lb_build_addr(lbProcedure *p, Ast *expr) { GB_ASSERT_MSG(is_type_indexable(t), "%s %s", type_to_string(t), expr_to_string(expr)); if (is_type_map(t)) { - lbValue map_val = lb_build_addr_ptr(p, ie->expr); + lbAddr map_addr = lb_build_addr(p, ie->expr); + lbValue map_val = lb_addr_load(p, map_addr); if (deref) { map_val = lb_emit_load(p, map_val); } @@ -3469,7 +3510,8 @@ lbAddr lb_build_addr(lbProcedure *p, Ast *expr) { key = lb_emit_conv(p, key, t->Map.key); Type *result_type = type_of_expr(expr); - return lb_addr_map(map_val, key, t, result_type); + lbValue map_ptr = lb_address_from_load_or_generate_local(p, map_val); + return lb_addr_map(map_ptr, key, t, result_type); } switch (t->kind) { diff --git a/src/llvm_backend_general.cpp b/src/llvm_backend_general.cpp index 17eeb0bea..2fc21b534 100644 --- a/src/llvm_backend_general.cpp +++ b/src/llvm_backend_general.cpp @@ -271,6 +271,10 @@ lbAddr lb_addr(lbValue addr) { lbAddr lb_addr_map(lbValue addr, lbValue map_key, Type *map_type, Type *map_result) { + GB_ASSERT(is_type_pointer(addr.type)); + Type *mt = type_deref(addr.type); + GB_ASSERT(is_type_map(mt)); + lbAddr v = {lbAddr_Map, addr}; v.map.key = map_key; v.map.type = map_type; @@ -1598,8 +1602,9 @@ LLVMTypeRef lb_type_internal(lbModule *m, Type *type) { return llvm_type; } llvm_type = LLVMStructCreateNamed(ctx, name); + LLVMTypeRef found_val = *found; map_set(&m->types, type, llvm_type); - lb_clone_struct_type(llvm_type, *found); + lb_clone_struct_type(llvm_type, found_val); return llvm_type; } } diff --git a/src/llvm_backend_proc.cpp b/src/llvm_backend_proc.cpp index 2a6eb6bb3..7a6fac603 100644 --- a/src/llvm_backend_proc.cpp +++ b/src/llvm_backend_proc.cpp @@ -61,7 +61,7 @@ lbProcedure *lb_create_procedure(lbModule *m, Entity *entity, bool ignore_body) GB_ASSERT(entity != nullptr); GB_ASSERT(entity->kind == Entity_Procedure); if (!entity->Procedure.is_foreign) { - GB_ASSERT(entity->flags & EntityFlag_ProcBodyChecked); + GB_ASSERT_MSG(entity->flags & EntityFlag_ProcBodyChecked, "%.*s :: %s", LIT(entity->token.string), type_to_string(entity->type)); } String link_name = {}; @@ -473,6 +473,8 @@ void lb_begin_procedure_body(lbProcedure *p) { } lbArgType *arg_type = &ft->args[param_index]; + defer (param_index += 1); + if (arg_type->kind == lbArg_Ignore) { continue; } else if (arg_type->kind == lbArg_Direct) { @@ -487,18 +489,19 @@ void lb_begin_procedure_body(lbProcedure *p) { param.type = e->type; lbValue ptr = lb_address_from_load_or_generate_local(p, param); + GB_ASSERT(LLVMIsAAllocaInst(ptr.value)); lb_add_entity(p->module, e, ptr); + lb_add_debug_param_variable(p, ptr.value, e->type, e->token, param_index+1); } } else if (arg_type->kind == lbArg_Indirect) { if (e->token.string.len != 0 && !is_blank_ident(e->token.string)) { lbValue ptr = {}; ptr.value = LLVMGetParam(p->value, param_offset+param_index); ptr.type = alloc_type_pointer(e->type); - lb_add_entity(p->module, e, ptr); + lb_add_debug_param_variable(p, ptr.value, e->type, e->token, param_index+1); } } - param_index += 1; } } diff --git a/src/llvm_backend_stmt.cpp b/src/llvm_backend_stmt.cpp index 3375ceda9..916c0433e 100644 --- a/src/llvm_backend_stmt.cpp +++ b/src/llvm_backend_stmt.cpp @@ -1991,6 +1991,13 @@ void lb_build_stmt(lbProcedure *p, Ast *node) { out |= StateFlag_no_bounds_check; out &= ~StateFlag_bounds_check; } + if (in & StateFlag_no_type_assert) { + out |= StateFlag_no_type_assert; + out &= ~StateFlag_type_assert; + } else if (in & StateFlag_type_assert) { + out |= StateFlag_type_assert; + out &= ~StateFlag_no_type_assert; + } p->state_flags = out; } diff --git a/src/llvm_backend_type.cpp b/src/llvm_backend_type.cpp index decb57702..1d6297164 100644 --- a/src/llvm_backend_type.cpp +++ b/src/llvm_backend_type.cpp @@ -1,11 +1,10 @@ isize lb_type_info_index(CheckerInfo *info, Type *type, bool err_on_not_found=true) { - isize index = type_info_index(info, type, false); + auto *set = &info->minimum_dependency_type_info_set; + isize index = type_info_index(info, type, err_on_not_found); if (index >= 0) { - auto *set = &info->minimum_dependency_type_info_set; - for_array(i, set->entries) { - if (set->entries[i].ptr == index) { - return i+1; - } + isize i = ptr_entry_index(set, index); + if (i >= 0) { + return i+1; } } if (err_on_not_found) { @@ -455,7 +454,7 @@ void lb_setup_type_info_data(lbProcedure *p) { // NOTE(bill): Setup type_info da case Type_EnumeratedArray: { tag = lb_const_ptr_cast(m, variant_ptr, t_type_info_enumerated_array_ptr); - LLVMValueRef vals[6] = { + LLVMValueRef vals[7] = { lb_get_type_info_ptr(m, t->EnumeratedArray.elem).value, lb_get_type_info_ptr(m, t->EnumeratedArray.index).value, lb_const_int(m, t_int, type_size_of(t->EnumeratedArray.elem)).value, @@ -464,6 +463,8 @@ void lb_setup_type_info_data(lbProcedure *p) { // NOTE(bill): Setup type_info da // Unions LLVMConstNull(lb_type(m, t_type_info_enum_value)), LLVMConstNull(lb_type(m, t_type_info_enum_value)), + + lb_const_bool(m, t_bool, t->EnumeratedArray.is_sparse).value, }; lbValue res = {}; diff --git a/src/llvm_backend_utility.cpp b/src/llvm_backend_utility.cpp index 5b1b11b44..7e2bd7daa 100644 --- a/src/llvm_backend_utility.cpp +++ b/src/llvm_backend_utility.cpp @@ -626,6 +626,12 @@ lbValue lb_emit_union_cast(lbProcedure *p, lbValue value, Type *type, TokenPos p lbValue value_ = lb_address_from_load_or_generate_local(p, value); + if ((p->state_flags & StateFlag_no_type_assert) != 0 && !is_tuple) { + // just do a bit cast of the data at the front + lbValue ptr = lb_emit_conv(p, value_, alloc_type_pointer(type)); + return lb_emit_load(p, ptr); + } + lbValue tag = {}; lbValue dst_tag = {}; lbValue cond = {}; @@ -666,23 +672,22 @@ lbValue lb_emit_union_cast(lbProcedure *p, lbValue value, Type *type, TokenPos p lb_start_block(p, end_block); if (!is_tuple) { - { - // NOTE(bill): Panic on invalid conversion - Type *dst_type = tuple->Tuple.variables[0]->type; + GB_ASSERT((p->state_flags & StateFlag_no_type_assert) == 0); + // NOTE(bill): Panic on invalid conversion + Type *dst_type = tuple->Tuple.variables[0]->type; - lbValue ok = lb_emit_load(p, lb_emit_struct_ep(p, v.addr, 1)); - auto args = array_make(permanent_allocator(), 7); - args[0] = ok; + lbValue ok = lb_emit_load(p, lb_emit_struct_ep(p, v.addr, 1)); + auto args = array_make(permanent_allocator(), 7); + args[0] = ok; - args[1] = lb_const_string(m, get_file_path_string(pos.file_id)); - args[2] = lb_const_int(m, t_i32, pos.line); - args[3] = lb_const_int(m, t_i32, pos.column); + args[1] = lb_const_string(m, get_file_path_string(pos.file_id)); + args[2] = lb_const_int(m, t_i32, pos.line); + args[3] = lb_const_int(m, t_i32, pos.column); - args[4] = lb_typeid(m, src_type); - args[5] = lb_typeid(m, dst_type); - args[6] = lb_emit_conv(p, value_, t_rawptr); - lb_emit_runtime_call(p, "type_assertion_check2", args); - } + args[4] = lb_typeid(m, src_type); + args[5] = lb_typeid(m, dst_type); + args[6] = lb_emit_conv(p, value_, t_rawptr); + lb_emit_runtime_call(p, "type_assertion_check2", args); return lb_emit_load(p, lb_emit_struct_ep(p, v.addr, 0)); } @@ -706,6 +711,13 @@ lbAddr lb_emit_any_cast_addr(lbProcedure *p, lbValue value, Type *type, TokenPos } Type *dst_type = tuple->Tuple.variables[0]->type; + if ((p->state_flags & StateFlag_no_type_assert) != 0 && !is_tuple) { + // just do a bit cast of the data at the front + lbValue ptr = lb_emit_struct_ev(p, value, 0); + ptr = lb_emit_conv(p, ptr, alloc_type_pointer(type)); + return lb_addr(ptr); + } + lbAddr v = lb_add_local_generated(p, tuple, true); lbValue dst_typeid = lb_typeid(m, dst_type); @@ -731,7 +743,6 @@ lbAddr lb_emit_any_cast_addr(lbProcedure *p, lbValue value, Type *type, TokenPos if (!is_tuple) { // NOTE(bill): Panic on invalid conversion - lbValue ok = lb_emit_load(p, lb_emit_struct_ep(p, v.addr, 1)); auto args = array_make(permanent_allocator(), 7); args[0] = ok; diff --git a/src/main.cpp b/src/main.cpp index fe56d451f..efb0f584e 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -485,7 +485,7 @@ i32 linker_stage(lbGenerator *gen) { // NOTE: If you change this (although this minimum is as low as you can go with Odin working) // make sure to also change the 'mtriple' param passed to 'opt' #if defined(GB_CPU_ARM) - " -mmacosx-version-min=11.0.0 " + " -mmacosx-version-min=12.0.0 " #else " -mmacosx-version-min=10.8.0 " #endif @@ -1851,6 +1851,7 @@ void print_show_help(String const arg0, String const &command) { print_usage_line(1, " one must contain the program's entry point, all must be in the same package."); } else if (command == "run") { print_usage_line(1, "run same as 'build', but also then runs the newly compiled executable."); + print_usage_line(1, " append an empty flag and then the args, '-- ', to specify args for the output."); } else if (command == "check") { print_usage_line(1, "check parse and type check .odin file(s)"); } else if (command == "test") { diff --git a/src/parser.cpp b/src/parser.cpp index 7e7146244..0914c77ca 100644 --- a/src/parser.cpp +++ b/src/parser.cpp @@ -183,6 +183,11 @@ Ast *clone_ast(Ast *node) { n->FieldValue.value = clone_ast(n->FieldValue.value); break; + case Ast_EnumFieldValue: + n->EnumFieldValue.name = clone_ast(n->EnumFieldValue.name); + n->EnumFieldValue.value = clone_ast(n->EnumFieldValue.value); + break; + case Ast_TernaryIfExpr: n->TernaryIfExpr.x = clone_ast(n->TernaryIfExpr.x); n->TernaryIfExpr.cond = clone_ast(n->TernaryIfExpr.cond); @@ -693,6 +698,16 @@ Ast *ast_field_value(AstFile *f, Ast *field, Ast *value, Token eq) { return result; } + +Ast *ast_enum_field_value(AstFile *f, Ast *name, Ast *value, CommentGroup *docs, CommentGroup *comment) { + Ast *result = alloc_ast_node(f, Ast_EnumFieldValue); + result->EnumFieldValue.name = name; + result->EnumFieldValue.value = value; + result->EnumFieldValue.docs = docs; + result->EnumFieldValue.comment = comment; + return result; +} + Ast *ast_compound_lit(AstFile *f, Ast *type, Array const &elems, Token open, Token close) { Ast *result = alloc_ast_node(f, Ast_CompoundLit); result->CompoundLit.type = type; @@ -944,7 +959,7 @@ Ast *ast_field(AstFile *f, Array const &names, Ast *type, Ast *default_va result->Field.default_value = default_value; result->Field.flags = flags; result->Field.tag = tag; - result->Field.docs = docs; + result->Field.docs = docs; result->Field.comment = comment; return result; } @@ -1234,7 +1249,7 @@ CommentGroup *consume_comment_group(AstFile *f, isize n, isize *end_line_) { return comments; } -void comsume_comment_groups(AstFile *f, Token prev) { +void consume_comment_groups(AstFile *f, Token prev) { if (f->curr_token.kind == Token_Comment) { CommentGroup *comment = nullptr; isize end_line = 0; @@ -1278,7 +1293,7 @@ Token advance_token(AstFile *f) { if (ok) { switch (f->curr_token.kind) { case Token_Comment: - comsume_comment_groups(f, prev); + consume_comment_groups(f, prev); break; case Token_Semicolon: if (ignore_newlines(f) && f->curr_token.string == "\n") { @@ -1689,6 +1704,46 @@ Array parse_element_list(AstFile *f) { return elems; } +CommentGroup *consume_line_comment(AstFile *f) { + CommentGroup *comment = f->line_comment; + if (f->line_comment == f->lead_comment) { + f->lead_comment = nullptr; + } + f->line_comment = nullptr; + return comment; + +} + +Array parse_enum_field_list(AstFile *f) { + auto elems = array_make(heap_allocator()); + + while (f->curr_token.kind != Token_CloseBrace && + f->curr_token.kind != Token_EOF) { + CommentGroup *docs = f->lead_comment; + CommentGroup *comment = nullptr; + Ast *name = parse_value(f); + Ast *value = nullptr; + if (f->curr_token.kind == Token_Eq) { + Token eq = expect_token(f, Token_Eq); + value = parse_value(f); + } + + comment = consume_line_comment(f); + + Ast *elem = ast_enum_field_value(f, name, value, docs, comment); + array_add(&elems, elem); + + if (!allow_token(f, Token_Comma)) { + break; + } + + if (!elem->EnumFieldValue.comment) { + elem->EnumFieldValue.comment = consume_line_comment(f); + } + } + + return elems; +} Ast *parse_literal_value(AstFile *f, Ast *type) { Array elems = {}; @@ -1793,6 +1848,8 @@ void parse_proc_tags(AstFile *f, u64 *tags) { ELSE_IF_ADD_TAG(require_results) ELSE_IF_ADD_TAG(bounds_check) ELSE_IF_ADD_TAG(no_bounds_check) + ELSE_IF_ADD_TAG(type_assert) + ELSE_IF_ADD_TAG(no_type_assert) else { syntax_error(tag_expr, "Unknown procedure type tag #%.*s", LIT(tag_name)); } @@ -1803,6 +1860,10 @@ void parse_proc_tags(AstFile *f, u64 *tags) { if ((*tags & ProcTag_bounds_check) && (*tags & ProcTag_no_bounds_check)) { syntax_error(f->curr_token, "You cannot apply both #bounds_check and #no_bounds_check to a procedure"); } + + if ((*tags & ProcTag_type_assert) && (*tags & ProcTag_no_type_assert)) { + syntax_error(f->curr_token, "You cannot apply both #type_assert and #no_type_assert to a procedure"); + } } @@ -1950,11 +2011,23 @@ Ast *parse_check_directive_for_statement(Ast *s, Token const &tag_token, u16 sta syntax_error(tag_token, "#bounds_check and #no_bounds_check cannot be applied together"); } break; + case StateFlag_type_assert: + if ((s->state_flags & StateFlag_no_type_assert) != 0) { + syntax_error(tag_token, "#type_assert and #no_type_assert cannot be applied together"); + } + break; + case StateFlag_no_type_assert: + if ((s->state_flags & StateFlag_type_assert) != 0) { + syntax_error(tag_token, "#type_assert and #no_type_assert cannot be applied together"); + } + break; } switch (state_flag) { case StateFlag_bounds_check: case StateFlag_no_bounds_check: + case StateFlag_type_assert: + case StateFlag_no_type_assert: switch (s->kind) { case Ast_BlockStmt: case Ast_IfStmt: @@ -2062,6 +2135,22 @@ Ast *parse_operand(AstFile *f, bool lhs) { } return original_type; } else if (name.string == "partial") { + Ast *tag = ast_basic_directive(f, token, name); + Ast *original_expr = parse_expr(f, lhs); + Ast *expr = unparen_expr(original_expr); + switch (expr->kind) { + case Ast_ArrayType: + syntax_error(expr, "#partial has been replaced with #sparse for non-contiguous enumerated array types"); + break; + case Ast_CompoundLit: + expr->CompoundLit.tag = tag; + break; + default: + syntax_error(expr, "Expected a compound literal after #%.*s, got %.*s", LIT(name.string), LIT(ast_strings[expr->kind])); + break; + } + return original_expr; + } else if (name.string == "sparse") { Ast *tag = ast_basic_directive(f, token, name); Ast *original_type = parse_type(f); Ast *type = unparen_expr(original_type); @@ -2078,6 +2167,12 @@ Ast *parse_operand(AstFile *f, bool lhs) { } else if (name.string == "no_bounds_check") { Ast *operand = parse_expr(f, lhs); return parse_check_directive_for_statement(operand, name, StateFlag_no_bounds_check); + } else if (name.string == "type_assert") { + Ast *operand = parse_expr(f, lhs); + return parse_check_directive_for_statement(operand, name, StateFlag_type_assert); + } else if (name.string == "no_type_assert") { + Ast *operand = parse_expr(f, lhs); + return parse_check_directive_for_statement(operand, name, StateFlag_no_type_assert); } else if (name.string == "relative") { Ast *tag = ast_basic_directive(f, token, name); tag = parse_call_expr(f, tag); @@ -2174,6 +2269,12 @@ Ast *parse_operand(AstFile *f, bool lhs) { if (tags & ProcTag_bounds_check) { body->state_flags |= StateFlag_bounds_check; } + if (tags & ProcTag_no_type_assert) { + body->state_flags |= StateFlag_no_type_assert; + } + if (tags & ProcTag_type_assert) { + body->state_flags |= StateFlag_type_assert; + } return ast_proc_lit(f, type, body, tags, where_token, where_clauses); } else if (allow_token(f, Token_do)) { @@ -2449,7 +2550,7 @@ Ast *parse_operand(AstFile *f, bool lhs) { skip_possible_newline_for_literal(f); Token open = expect_token(f, Token_OpenBrace); - Array values = parse_element_list(f); + Array values = parse_enum_field_list(f); Token close = expect_closing_brace_of_field_list(f); return ast_enum_type(f, token, base_type, values); @@ -4561,6 +4662,12 @@ Ast *parse_stmt(AstFile *f) { } else if (tag == "no_bounds_check") { s = parse_stmt(f); return parse_check_directive_for_statement(s, name, StateFlag_no_bounds_check); + } else if (tag == "type_assert") { + s = parse_stmt(f); + return parse_check_directive_for_statement(s, name, StateFlag_type_assert); + } else if (tag == "no_type_assert") { + s = parse_stmt(f); + return parse_check_directive_for_statement(s, name, StateFlag_no_type_assert); } else if (tag == "partial") { s = parse_stmt(f); switch (s->kind) { @@ -5398,7 +5505,7 @@ bool parse_file(Parser *p, AstFile *f) { String filepath = f->tokenizer.fullpath; String base_dir = dir_from_path(filepath); if (f->curr_token.kind == Token_Comment) { - comsume_comment_groups(f, f->prev_token); + consume_comment_groups(f, f->prev_token); } CommentGroup *docs = f->lead_comment; @@ -5444,8 +5551,17 @@ bool parse_file(Parser *p, AstFile *f) { if (!parse_build_tag(tok, lc)) { return false; } - } else if (lc == "+private") { - f->flags |= AstFile_IsPrivate; + } else if (string_starts_with(lc, str_lit("+private"))) { + f->flags |= AstFile_IsPrivatePkg; + String command = string_trim_starts_with(lc, str_lit("+private ")); + command = string_trim_whitespace(command); + if (lc == "+private") { + f->flags |= AstFile_IsPrivatePkg; + } else if (command == "package") { + f->flags |= AstFile_IsPrivatePkg; + } else if (command == "file") { + f->flags |= AstFile_IsPrivateFile; + } } else if (lc == "+lazy") { if (build_context.ignore_lazy) { // Ignore diff --git a/src/parser.hpp b/src/parser.hpp index b83822cbf..ff0df0382 100644 --- a/src/parser.hpp +++ b/src/parser.hpp @@ -78,9 +78,11 @@ struct ImportedFile { }; enum AstFileFlag : u32 { - AstFile_IsPrivate = 1<<0, - AstFile_IsTest = 1<<1, - AstFile_IsLazy = 1<<2, + AstFile_IsPrivatePkg = 1<<0, + AstFile_IsPrivateFile = 1<<1, + + AstFile_IsTest = 1<<3, + AstFile_IsLazy = 1<<4, }; enum AstDelayQueueKind { @@ -226,6 +228,8 @@ enum ProcInlining { enum ProcTag { ProcTag_bounds_check = 1<<0, ProcTag_no_bounds_check = 1<<1, + ProcTag_type_assert = 1<<2, + ProcTag_no_type_assert = 1<<3, ProcTag_require_results = 1<<4, ProcTag_optional_ok = 1<<5, @@ -258,6 +262,8 @@ ProcCallingConvention default_calling_convention(void) { enum StateFlag : u8 { StateFlag_bounds_check = 1<<0, StateFlag_no_bounds_check = 1<<1, + StateFlag_type_assert = 1<<2, + StateFlag_no_type_assert = 1<<3, StateFlag_BeenHandled = 1<<7, }; @@ -344,6 +350,7 @@ char const *inline_asm_dialect_strings[InlineAsmDialect_COUNT] = { Slice elems; \ Token open, close; \ i64 max_count; \ + Ast *tag; \ }) \ AST_KIND(_ExprBegin, "", bool) \ AST_KIND(BadExpr, "bad expression", struct { Token begin, end; }) \ @@ -383,6 +390,12 @@ AST_KIND(_ExprBegin, "", bool) \ void *sce_temp_data; \ }) \ AST_KIND(FieldValue, "field value", struct { Token eq; Ast *field, *value; }) \ + AST_KIND(EnumFieldValue, "enum field value", struct { \ + Ast *name; \ + Ast *value; \ + CommentGroup *docs; \ + CommentGroup *comment; \ + }) \ AST_KIND(TernaryIfExpr, "ternary if expression", struct { Ast *x, *cond, *y; }) \ AST_KIND(TernaryWhenExpr, "ternary when expression", struct { Ast *x, *cond, *y; }) \ AST_KIND(OrElseExpr, "or_else expression", struct { Ast *x; Token token; Ast *y; }) \ diff --git a/src/parser_pos.cpp b/src/parser_pos.cpp index 6ef0db215..54c3ec1f1 100644 --- a/src/parser_pos.cpp +++ b/src/parser_pos.cpp @@ -39,6 +39,7 @@ Token ast_token(Ast *node) { case Ast_SliceExpr: return node->SliceExpr.open; case Ast_Ellipsis: return node->Ellipsis.token; case Ast_FieldValue: return node->FieldValue.eq; + case Ast_EnumFieldValue: return ast_token(node->EnumFieldValue.name); case Ast_DerefExpr: return node->DerefExpr.op; case Ast_TernaryIfExpr: return ast_token(node->TernaryIfExpr.x); case Ast_TernaryWhenExpr: return ast_token(node->TernaryWhenExpr.x); @@ -178,6 +179,11 @@ Token ast_end_token(Ast *node) { } return node->Ellipsis.token; case Ast_FieldValue: return ast_end_token(node->FieldValue.value); + case Ast_EnumFieldValue: + if (node->EnumFieldValue.value) { + return ast_end_token(node->EnumFieldValue.value); + } + return ast_end_token(node->EnumFieldValue.name); case Ast_DerefExpr: return node->DerefExpr.op; case Ast_TernaryIfExpr: return ast_end_token(node->TernaryIfExpr.y); case Ast_TernaryWhenExpr: return ast_end_token(node->TernaryWhenExpr.y); diff --git a/src/ptr_set.cpp b/src/ptr_set.cpp index ca7df3b53..b45997916 100644 --- a/src/ptr_set.cpp +++ b/src/ptr_set.cpp @@ -138,6 +138,15 @@ gb_inline bool ptr_set_exists(PtrSet *s, T ptr) { return index != MAP_SENTINEL; } +template +gb_inline isize ptr_entry_index(PtrSet *s, T ptr) { + isize index = ptr_set__find(s, ptr).entry_index; + if (index != MAP_SENTINEL) { + return index; + } + return -1; +} + // Returns true if it already exists template T ptr_set_add(PtrSet *s, T ptr) { diff --git a/src/string.cpp b/src/string.cpp index 800378689..eb6058f78 100644 --- a/src/string.cpp +++ b/src/string.cpp @@ -195,8 +195,6 @@ template bool operator > (String const &a, char const (&b)[N]) { retu template bool operator <= (String const &a, char const (&b)[N]) { return str_le(a, make_string(cast(u8 *)b, N-1)); } template bool operator >= (String const &a, char const (&b)[N]) { return str_ge(a, make_string(cast(u8 *)b, N-1)); } - - gb_inline bool string_starts_with(String const &s, String const &prefix) { if (prefix.len > s.len) { return false; @@ -230,6 +228,16 @@ gb_inline bool string_ends_with(String const &s, u8 suffix) { return s[s.len-1] == suffix; } + + +gb_inline String string_trim_starts_with(String const &s, String const &prefix) { + if (string_starts_with(s, prefix)) { + return substring(s, prefix.len, s.len); + } + return s; +} + + gb_inline isize string_extension_position(String const &str) { isize dot_pos = -1; isize i = str.len; diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp index 20815fd16..40bc5c220 100644 --- a/src/tokenizer.cpp +++ b/src/tokenizer.cpp @@ -201,14 +201,6 @@ struct TokenPos { i32 column; // starting at 1 }; -// temporary -char *token_pos_to_string(TokenPos const &pos) { - gbString s = gb_string_make_reserve(temporary_allocator(), 128); - String file = get_file_path_string(pos.file_id); - s = gb_string_append_fmt(s, "%.*s(%d:%d)", LIT(file), pos.line, pos.column); - return s; -} - i32 token_pos_cmp(TokenPos const &a, TokenPos const &b) { if (a.offset != b.offset) { return (a.offset < b.offset) ? -1 : +1; diff --git a/src/types.cpp b/src/types.cpp index 07951196a..9ee6ba359 100644 --- a/src/types.cpp +++ b/src/types.cpp @@ -221,6 +221,7 @@ struct TypeProc { ExactValue *max_value; \ i64 count; \ TokenKind op; \ + bool is_sparse; \ }) \ TYPE_KIND(Slice, struct { Type *elem; }) \ TYPE_KIND(DynamicArray, struct { Type *elem; }) \ @@ -362,6 +363,7 @@ enum TypeInfoFlag : u32 { enum : int { MATRIX_ELEMENT_COUNT_MIN = 1, MATRIX_ELEMENT_COUNT_MAX = 16, + MATRIX_ELEMENT_MAX_SIZE = MATRIX_ELEMENT_COUNT_MAX * (2 * 8), // complex128 }; @@ -1582,6 +1584,24 @@ Type *core_array_type(Type *t) { } } +i32 type_math_rank(Type *t) { + i32 rank = 0; + for (;;) { + t = base_type(t); + switch (t->kind) { + case Type_Array: + rank += 1; + t = t->Array.elem; + break; + case Type_Matrix: + rank += 2; + t = t->Matrix.elem; + break; + default: + return rank; + } + } +} Type *base_complex_elem_type(Type *t) { @@ -3830,6 +3850,9 @@ gbString write_type_to_string(gbString str, Type *type) { break; case Type_EnumeratedArray: + if (type->EnumeratedArray.is_sparse) { + str = gb_string_appendc(str, "#sparse"); + } str = gb_string_append_rune(str, '['); str = write_type_to_string(str, type->EnumeratedArray.index); str = gb_string_append_rune(str, ']'); diff --git a/tests/core/Makefile b/tests/core/Makefile index 0f0ffe4d6..1c2cee6bd 100644 --- a/tests/core/Makefile +++ b/tests/core/Makefile @@ -1,7 +1,7 @@ ODIN=../../odin PYTHON=$(shell which python3) -all: download_test_assets image_test compress_test strings_test hash_test crypto_test +all: download_test_assets image_test compress_test strings_test hash_test crypto_test noise_test download_test_assets: $(PYTHON) download_assets.py @@ -19,4 +19,7 @@ hash_test: $(ODIN) run hash -out=test_hash -o:speed -no-bounds-check crypto_test: - $(ODIN) run crypto -out=crypto_hash -o:speed -no-bounds-check \ No newline at end of file + $(ODIN) run crypto -out=crypto_hash -o:speed -no-bounds-check + +noise_test: + $(ODIN) run math/noise -out=test_noise \ No newline at end of file diff --git a/tests/core/build.bat b/tests/core/build.bat index 176b7f175..6af39e688 100644 --- a/tests/core/build.bat +++ b/tests/core/build.bat @@ -35,4 +35,9 @@ echo --- echo --- echo Running core:encoding tests echo --- -%PATH_TO_ODIN% run encoding %COMMON% \ No newline at end of file +%PATH_TO_ODIN% run encoding %COMMON% + +echo --- +echo Running core:math/noise tests +echo --- +%PATH_TO_ODIN% run math/noise %COMMON% \ No newline at end of file diff --git a/tests/core/math/noise/test_core_math_noise.odin b/tests/core/math/noise/test_core_math_noise.odin new file mode 100644 index 000000000..c3a3e4228 --- /dev/null +++ b/tests/core/math/noise/test_core_math_noise.odin @@ -0,0 +1,149 @@ +package test_core_math_noise + +import "core:testing" +import "core:math/noise" +import "core:fmt" + +TEST_count := 0 +TEST_fail := 0 + +V2 :: noise.Vec2 +V3 :: noise.Vec3 +V4 :: noise.Vec4 + +when ODIN_TEST { + expect :: testing.expect + log :: testing.log +} else { + expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) { + fmt.printf("[%v] ", loc) + TEST_count += 1 + if !condition { + TEST_fail += 1 + fmt.println(message) + return + } + fmt.println(" PASS") + } + log :: proc(t: ^testing.T, v: any, loc := #caller_location) { + fmt.printf("[%v] ", loc) + fmt.printf("log: %v\n", v) + } +} + +main :: proc() { + t := testing.T{} + noise_test(&t) + fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count) +} + +Test_Vector :: struct { + seed: i64, + coord: union {V2, V3, V4}, + expected: f32, + + test_proc: union { + proc(_: i64, _: V2) -> f32, + proc(_: i64, _: V3) -> f32, + proc(_: i64, _: V4) -> f32, + }, +} + +SEED_1 :: 2324223232 +SEED_2 :: 932466901 +SEED_3 :: 9321 + +COORD_1 :: V4{ 242.0, 3433.0, 920.0, 222312.0} +COORD_2 :: V4{ 590.0, 9411.0, 5201.0, 942124256.0} +COORD_3 :: V4{12090.0, 19411.0, 81950901.0, 4224219.0} + +Noise_Tests := []Test_Vector{ + /* + `noise_2d` tests. + */ + {SEED_1, COORD_1.xy, 0.25010583, noise.noise_2d}, + {SEED_2, COORD_2.xy, -0.92513955, noise.noise_2d}, + {SEED_3, COORD_3.xy, 0.67327416, noise.noise_2d}, + + /* + `noise_2d_improve_x` tests. + */ + {SEED_1, COORD_1.xy, 0.17074019, noise.noise_2d_improve_x}, + {SEED_2, COORD_2.xy, 0.72330487, noise.noise_2d_improve_x}, + {SEED_3, COORD_3.xy, -0.032076947, noise.noise_2d_improve_x}, + + /* + `noise_3d_improve_xy` tests. + */ + {SEED_1, COORD_1.xyz, 0.14819577, noise.noise_3d_improve_xy}, + {SEED_2, COORD_2.xyz, -0.065345764, noise.noise_3d_improve_xy}, + {SEED_3, COORD_3.xyz, -0.37761918, noise.noise_3d_improve_xy}, + + /* + `noise_3d_improve_xz` tests. + */ + {SEED_1, COORD_1.xyz, -0.50075006, noise.noise_3d_improve_xz}, + {SEED_2, COORD_2.xyz, -0.36039603, noise.noise_3d_improve_xz}, + {SEED_3, COORD_3.xyz, -0.3479203, noise.noise_3d_improve_xz}, + + /* + `noise_3d_fallback` tests. + */ + {SEED_1, COORD_1.xyz, 0.6557345, noise.noise_3d_fallback}, + {SEED_2, COORD_2.xyz, 0.55452216, noise.noise_3d_fallback}, + {SEED_3, COORD_3.xyz, -0.26408964, noise.noise_3d_fallback}, + + /* + `noise_3d_fallback` tests. + */ + {SEED_1, COORD_1.xyz, 0.6557345, noise.noise_3d_fallback}, + {SEED_2, COORD_2.xyz, 0.55452216, noise.noise_3d_fallback}, + {SEED_3, COORD_3.xyz, -0.26408964, noise.noise_3d_fallback}, + + /* + `noise_4d_improve_xyz_improve_xy` tests. + */ + {SEED_1, COORD_1, 0.44929826, noise.noise_4d_improve_xyz_improve_xy}, + {SEED_2, COORD_2, -0.13270882, noise.noise_4d_improve_xyz_improve_xy}, + {SEED_3, COORD_3, 0.10298563, noise.noise_4d_improve_xyz_improve_xy}, + + /* + `noise_4d_improve_xyz_improve_xz` tests. + */ + {SEED_1, COORD_1, -0.078514606, noise.noise_4d_improve_xyz_improve_xz}, + {SEED_2, COORD_2, -0.032157656, noise.noise_4d_improve_xyz_improve_xz}, + {SEED_3, COORD_3, -0.38607058, noise.noise_4d_improve_xyz_improve_xz}, + + /* + `noise_4d_improve_xyz` tests. + */ + {SEED_1, COORD_1, -0.4442258, noise.noise_4d_improve_xyz}, + {SEED_2, COORD_2, 0.36822623, noise.noise_4d_improve_xyz}, + {SEED_3, COORD_3, 0.22628775, noise.noise_4d_improve_xyz}, + + /* + `noise_4d_fallback` tests. + */ + {SEED_1, COORD_1, -0.14233987, noise.noise_4d_fallback}, + {SEED_2, COORD_2, 0.1354035, noise.noise_4d_fallback}, + {SEED_3, COORD_3, 0.14565045, noise.noise_4d_fallback}, + +} + +noise_test :: proc(t: ^testing.T) { + for test in Noise_Tests { + output: f32 + + switch coord in test.coord { + case V2: + output = test.test_proc.(proc(_: i64, _: V2) -> f32)(test.seed, test.coord.(V2)) + case V3: + output = test.test_proc.(proc(_: i64, _: V3) -> f32)(test.seed, test.coord.(V3)) + case V4: + output = test.test_proc.(proc(_: i64, _: V4) -> f32)(test.seed, test.coord.(V4)) + } + + error := fmt.tprintf("Seed %v, Coord: %v, Expected: %3.8f. Got %3.8f", test.seed, test.coord, test.expected, output) + expect(t, test.expected == output, error) + } +} \ No newline at end of file diff --git a/tools/odin-html-docs/footer.txt.html b/tools/odin-html-docs/footer.txt.html deleted file mode 100644 index 548a00995..000000000 --- a/tools/odin-html-docs/footer.txt.html +++ /dev/null @@ -1,44 +0,0 @@ - - - - - - - - diff --git a/tools/odin-html-docs/header.txt.html b/tools/odin-html-docs/header.txt.html deleted file mode 100644 index 4bb9036e2..000000000 --- a/tools/odin-html-docs/header.txt.html +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - {0:s} \ No newline at end of file diff --git a/tools/odin-html-docs/odin_html_docs_main.odin b/tools/odin-html-docs/odin_html_docs_main.odin deleted file mode 100644 index 7f2e7b59f..000000000 --- a/tools/odin-html-docs/odin_html_docs_main.odin +++ /dev/null @@ -1,1344 +0,0 @@ -package odin_html_docs - -import doc "core:odin/doc-format" -import "core:fmt" -import "core:io" -import "core:os" -import "core:strings" -import "core:path/slashpath" -import "core:sort" -import "core:slice" - -GITHUB_LICENSE_URL :: "https://github.com/odin-lang/Odin/tree/master/LICENSE" -GITHUB_CORE_URL :: "https://github.com/odin-lang/Odin/tree/master/core" -BASE_CORE_URL :: "/core" - -header: ^doc.Header -files: []doc.File -pkgs: []doc.Pkg -entities: []doc.Entity -types: []doc.Type - -pkgs_to_use: map[string]^doc.Pkg // trimmed path -pkg_to_path: map[^doc.Pkg]string // trimmed path - -array :: proc(a: $A/doc.Array($T)) -> []T { - return doc.from_array(header, a) -} -str :: proc(s: $A/doc.String) -> string { - return doc.from_string(header, s) -} - -errorf :: proc(format: string, args: ..any) -> ! { - fmt.eprintf("%s ", os.args[0]) - fmt.eprintf(format, ..args) - fmt.eprintln() - os.exit(1) -} - -base_type :: proc(t: doc.Type) -> doc.Type { - t := t - for { - if t.kind != .Named { - break - } - t = types[array(t.types)[0]] - } - return t -} - -is_type_untyped :: proc(type: doc.Type) -> bool { - if type.kind == .Basic { - flags := transmute(doc.Type_Flags_Basic)type.flags - return .Untyped in flags - } - return false -} - -common_prefix :: proc(strs: []string) -> string { - if len(strs) == 0 { - return "" - } - n := max(int) - for str in strs { - n = min(n, len(str)) - } - - prefix := strs[0][:n] - for str in strs[1:] { - for len(prefix) != 0 && str[:len(prefix)] != prefix { - prefix = prefix[:len(prefix)-1] - } - if len(prefix) == 0 { - break - } - } - return prefix -} - -recursive_make_directory :: proc(path: string, prefix := "") { - head, _, tail := strings.partition(path, "/") - path_to_make := head - if prefix != "" { - path_to_make = fmt.tprintf("%s/%s", prefix, head) - } - os.make_directory(path_to_make, 0) - if tail != "" { - recursive_make_directory(tail, path_to_make) - } -} - - -write_html_header :: proc(w: io.Writer, title: string) { - fmt.wprintf(w, string(#load("header.txt.html")), title) - io.write(w, #load("header-lower.txt.html")) -} - -write_html_footer :: proc(w: io.Writer, include_directory_js: bool) { - fmt.wprintf(w, "\n") - - io.write(w, #load("footer.txt.html")) - - if false && include_directory_js { - io.write_string(w, ` -`) - } - - fmt.wprintf(w, "\n\n") -} - -main :: proc() { - if len(os.args) != 2 { - errorf("expected 1 .odin-doc file") - } - data, ok := os.read_entire_file(os.args[1]) - if !ok { - errorf("unable to read file:", os.args[1]) - } - err: doc.Reader_Error - header, err = doc.read_from_bytes(data) - switch err { - case .None: - case .Header_Too_Small: - errorf("file is too small for the file format") - case .Invalid_Magic: - errorf("invalid magic for the file format") - case .Data_Too_Small: - errorf("data is too small for the file format") - case .Invalid_Version: - errorf("invalid file format version") - } - files = array(header.files) - pkgs = array(header.pkgs) - entities = array(header.entities) - types = array(header.types) - - { - fullpaths: [dynamic]string - defer delete(fullpaths) - - for pkg in pkgs[1:] { - append(&fullpaths, str(pkg.fullpath)) - } - path_prefix := common_prefix(fullpaths[:]) - - pkgs_to_use = make(map[string]^doc.Pkg) - fullpath_loop: for fullpath, i in fullpaths { - path := strings.trim_prefix(fullpath, path_prefix) - if !strings.has_prefix(path, "core/") { - continue fullpath_loop - } - pkg := &pkgs[i+1] - if len(array(pkg.entities)) == 0 { - continue fullpath_loop - } - trimmed_path := strings.trim_prefix(path, "core/") - if strings.has_prefix(trimmed_path, "sys") { - continue fullpath_loop - } - - pkgs_to_use[trimmed_path] = pkg - } - for path, pkg in pkgs_to_use { - pkg_to_path[pkg] = path - } - } - - b := strings.make_builder() - defer strings.destroy_builder(&b) - w := strings.to_writer(&b) - - { - strings.reset_builder(&b) - write_html_header(w, "Packages - pkg.odin-lang.org") - write_home_page(w) - write_html_footer(w, true) - os.write_entire_file("index.html", b.buf[:]) - } - - { - strings.reset_builder(&b) - write_html_header(w, "core library - pkg.odin-lang.org") - write_core_directory(w) - write_html_footer(w, true) - os.make_directory("core", 0) - os.write_entire_file("core/index.html", b.buf[:]) - } - - for path, pkg in pkgs_to_use { - strings.reset_builder(&b) - write_html_header(w, fmt.tprintf("package %s - pkg.odin-lang.org", path)) - write_pkg(w, path, pkg) - write_html_footer(w, false) - recursive_make_directory(path, "core") - os.write_entire_file(fmt.tprintf("core/%s/index.html", path), b.buf[:]) - } -} - -write_home_page :: proc(w: io.Writer) { - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - - { - fmt.wprintln(w, ``) - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - - fmt.wprintln(w, ``) - - } - - - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - - fmt.wprintln(w, "
") - fmt.wprintln(w, "

Odin Packages

") - fmt.wprintln(w, "
") - fmt.wprintln(w, "
") - defer fmt.wprintln(w, "
") - - fmt.wprintln(w, `
`) - fmt.wprintln(w, `

Core Library Collection

`) - fmt.wprintln(w, `

Documentation for all the packages part of the core library collection.

`) - fmt.wprintln(w, `
`) - - fmt.wprintln(w, `
`) - fmt.wprintln(w, `

Vendor Library Collection

`) - fmt.wprintln(w, `

Documentation for all the packages part of the vendor library collection.

`) - fmt.wprintln(w, `

Coming Soon.

`) - fmt.wprintln(w, `
`) - - - -} - - - -Dir_Node :: struct { - dir: string, - path: string, - name: string, - pkg: ^doc.Pkg, - children: [dynamic]^Dir_Node, -} - -generate_directory_tree :: proc() -> (root: ^Dir_Node) { - sort_tree :: proc(node: ^Dir_Node) { - slice.sort_by_key(node.children[:], proc(node: ^Dir_Node) -> string {return node.name}) - for child in node.children { - sort_tree(child) - } - } - root = new(Dir_Node) - root.children = make([dynamic]^Dir_Node) - children := make([dynamic]^Dir_Node) - for path, pkg in pkgs_to_use { - dir, _, inner := strings.partition(path, "/") - if inner == "" { - node := new_clone(Dir_Node{ - dir = dir, - name = dir, - path = path, - pkg = pkg, - }) - append(&root.children, node) - } else { - node := new_clone(Dir_Node{ - dir = dir, - name = inner, - path = path, - pkg = pkg, - }) - append(&children, node) - } - } - child_loop: for child in children { - dir, _, inner := strings.partition(child.path, "/") - for node in root.children { - if node.dir == dir { - append(&node.children, child) - continue child_loop - } - } - parent := new_clone(Dir_Node{ - dir = dir, - name = dir, - path = dir, - pkg = nil, - }) - append(&root.children, parent) - append(&parent.children, child) - } - - sort_tree(root) - - return -} - -write_core_directory :: proc(w: io.Writer) { - root := generate_directory_tree() - - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - { - fmt.wprintln(w, `
`) - fmt.wprintln(w, `
`) - fmt.wprintln(w, "

Core Library Collection

") - fmt.wprintln(w, "
    ") - fmt.wprintf(w, "
  • License: BSD-3-Clause
  • \n", GITHUB_LICENSE_URL) - fmt.wprintf(w, "
  • Repository: {0:s}
  • \n", GITHUB_CORE_URL) - fmt.wprintln(w, "
") - fmt.wprintln(w, "
") - fmt.wprintln(w, "
") - fmt.wprintln(w, `
`) - } - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - - fmt.wprintln(w, "
") - fmt.wprintln(w, `

Directories

`) - fmt.wprintln(w, "
") - - fmt.wprintln(w, "
") - fmt.wprintln(w, "\t") - fmt.wprintln(w, "\t\t") - - for dir in root.children { - if len(dir.children) != 0 { - fmt.wprint(w, ``) - io.write_string(w, ``) - fmt.wprintf(w, "\n") - - for child in dir.children { - assert(child.pkg != nil) - fmt.wprintf(w, ``) - - line_doc, _, _ := strings.partition(str(child.pkg.docs), "\n") - line_doc = strings.trim_space(line_doc) - io.write_string(w, ``) - - fmt.wprintf(w, "") - fmt.wprintf(w, "\n") - } - } - - fmt.wprintln(w, "\t\t") - fmt.wprintln(w, "\t
`, dir.dir) - } else { - fmt.wprintf(w, `
`, dir.dir) - } - - if dir.pkg != nil { - fmt.wprintf(w, `%s`, BASE_CORE_URL, dir.path, dir.name) - } else { - fmt.wprintf(w, "%s", dir.name) - } - io.write_string(w, ``) - if dir.pkg != nil { - line_doc, _, _ := strings.partition(str(dir.pkg.docs), "\n") - line_doc = strings.trim_space(line_doc) - if line_doc != "" { - write_doc_line(w, line_doc) - } - } - io.write_string(w, `
`, str(child.pkg.name)) - fmt.wprintf(w, `%s`, BASE_CORE_URL, child.path, child.name) - io.write_string(w, ``) - if line_doc != "" { - write_doc_line(w, line_doc) - } - io.write_string(w, `
") - fmt.wprintln(w, "
") -} - -is_entity_blank :: proc(e: doc.Entity_Index) -> bool { - name := str(entities[e].name) - return name == "" -} - -write_where_clauses :: proc(w: io.Writer, where_clauses: []doc.String) { - if len(where_clauses) != 0 { - io.write_string(w, " where ") - for clause, i in where_clauses { - if i > 0 { - io.write_string(w, ", ") - } - io.write_string(w, str(clause)) - } - } -} - - -Write_Type_Flag :: enum { - Is_Results, - Variadic, - Allow_Indent, - Poly_Names, -} -Write_Type_Flags :: distinct bit_set[Write_Type_Flag] -Type_Writer :: struct { - w: io.Writer, - pkg: doc.Pkg_Index, - indent: int, - generic_scope: map[string]bool, -} - -write_type :: proc(using writer: ^Type_Writer, type: doc.Type, flags: Write_Type_Flags) { - write_param_entity :: proc(using writer: ^Type_Writer, e, next_entity: ^doc.Entity, flags: Write_Type_Flags, name_width := 0) { - name := str(e.name) - - write_padding :: proc(w: io.Writer, name: string, name_width: int) { - for _ in 0..`, BASE_CORE_URL) - io.write_string(w, init_string) - io.write_string(w, ``) - case strings.has_prefix(init_string, "context."): - io.write_string(w, name) - io.write_string(w, " := ") - fmt.wprintf(w, ``, BASE_CORE_URL) - io.write_string(w, init_string) - io.write_string(w, ``) - case: - the_type := types[e.type] - type_flags := flags - {.Is_Results} - if .Param_Ellipsis in e.flags { - type_flags += {.Variadic} - } - - #partial switch e.kind { - case .Constant: - assert(name != "") - io.write_byte(w, '$') - io.write_string(w, name) - if name != "" && init_string == "" && next_entity != nil && e.field_group_index >= 0 { - if e.field_group_index == next_entity.field_group_index && e.type == next_entity.type { - return - } - } - - generic_scope[name] = true - if !is_type_untyped(the_type) { - io.write_string(w, ": ") - write_padding(w, name, name_width) - write_type(writer, the_type, type_flags) - io.write_string(w, " = ") - io.write_string(w, init_string) - } else { - io.write_string(w, " := ") - io.write_string(w, init_string) - } - return - - case .Variable: - if name != "" && init_string == "" && next_entity != nil && e.field_group_index >= 0 { - if e.field_group_index == next_entity.field_group_index && e.type == next_entity.type { - io.write_string(w, name) - return - } - } - - if name != "" { - io.write_string(w, name) - io.write_string(w, ": ") - write_padding(w, name, name_width) - } - write_type(writer, the_type, type_flags) - case .Type_Name: - io.write_byte(w, '$') - io.write_string(w, name) - generic_scope[name] = true - io.write_string(w, ": ") - write_padding(w, name, name_width) - if the_type.kind == .Generic { - io.write_string(w, "typeid") - if ts := array(the_type.types); len(ts) == 1 { - io.write_byte(w, '/') - write_type(writer, types[ts[0]], type_flags) - } - } else { - write_type(writer, the_type, type_flags) - } - } - - if init_string != "" { - io.write_string(w, " = ") - io.write_string(w, init_string) - } - } - } - write_poly_params :: proc(using writer: ^Type_Writer, type: doc.Type, flags: Write_Type_Flags) { - if type.polymorphic_params != 0 { - io.write_byte(w, '(') - write_type(writer, types[type.polymorphic_params], flags+{.Poly_Names}) - io.write_byte(w, ')') - } - - write_where_clauses(w, array(type.where_clauses)) - } - do_indent :: proc(using writer: ^Type_Writer, flags: Write_Type_Flags) { - if .Allow_Indent not_in flags { - return - } - for _ in 0.. (name_width: int) { - for entity_index in type_entities { - e := &entities[entity_index] - name := str(e.name) - name_width = max(len(name), name_width) - } - return - } - - - type_entities := array(type.entities) - type_types := array(type.types) - switch type.kind { - case .Invalid: - // ignore - case .Basic: - type_flags := transmute(doc.Type_Flags_Basic)type.flags - if is_type_untyped(type) { - io.write_string(w, str(type.name)) - } else { - fmt.wprintf(w, `%s`, str(type.name)) - } - case .Named: - e := entities[type_entities[0]] - name := str(type.name) - tn_pkg := files[e.pos.file].pkg - - if tn_pkg != pkg { - fmt.wprintf(w, `%s.`, str(pkgs[tn_pkg].name)) - } - if .Private in e.flags { - io.write_string(w, name) - } else if n := strings.contains_rune(name, '('); n >= 0 { - fmt.wprintf(w, `{1:s}`, pkg_to_path[&pkgs[tn_pkg]], name[:n], BASE_CORE_URL) - io.write_string(w, name[n:]) - } else { - fmt.wprintf(w, `{1:s}`, pkg_to_path[&pkgs[tn_pkg]], name, BASE_CORE_URL) - } - case .Generic: - name := str(type.name) - if name not_in generic_scope { - io.write_byte(w, '$') - } - io.write_string(w, name) - if name not_in generic_scope && len(array(type.types)) == 1 { - io.write_byte(w, '/') - write_type(writer, types[type_types[0]], flags) - } - case .Pointer: - io.write_byte(w, '^') - write_type(writer, types[type_types[0]], flags) - case .Array: - assert(type.elem_count_len == 1) - io.write_byte(w, '[') - io.write_uint(w, uint(type.elem_counts[0])) - io.write_byte(w, ']') - write_type(writer, types[type_types[0]], flags) - case .Enumerated_Array: - io.write_byte(w, '[') - write_type(writer, types[type_types[0]], flags) - io.write_byte(w, ']') - write_type(writer, types[type_types[1]], flags) - case .Slice: - if .Variadic in flags { - io.write_string(w, "..") - } else { - io.write_string(w, "[]") - } - write_type(writer, types[type_types[0]], flags - {.Variadic}) - case .Dynamic_Array: - io.write_string(w, "[dynamic]") - write_type(writer, types[type_types[0]], flags) - case .Map: - io.write_string(w, "map[") - write_type(writer, types[type_types[0]], flags) - io.write_byte(w, ']') - write_type(writer, types[type_types[1]], flags) - case .Struct: - type_flags := transmute(doc.Type_Flags_Struct)type.flags - io.write_string(w, "struct") - write_poly_params(writer, type, flags) - if .Packed in type_flags { io.write_string(w, " #packed") } - if .Raw_Union in type_flags { io.write_string(w, " #raw_union") } - if custom_align := str(type.custom_align); custom_align != "" { - io.write_string(w, " #align") - io.write_string(w, custom_align) - } - io.write_string(w, " {") - - tags := array(type.tags) - - if len(type_entities) != 0 { - do_newline(writer, flags) - indent += 1 - name_width := calc_name_width(type_entities) - - for entity_index, i in type_entities { - e := &entities[entity_index] - next_entity: ^doc.Entity = nil - if i+1 < len(type_entities) { - next_entity = &entities[type_entities[i+1]] - } - do_indent(writer, flags) - write_param_entity(writer, e, next_entity, flags, name_width) - - if tag := str(tags[i]); tag != "" { - io.write_byte(w, ' ') - io.write_quoted_string(w, tag) - } - - io.write_byte(w, ',') - do_newline(writer, flags) - } - indent -= 1 - do_indent(writer, flags) - } - io.write_string(w, "}") - case .Union: - type_flags := transmute(doc.Type_Flags_Union)type.flags - io.write_string(w, "union") - write_poly_params(writer, type, flags) - if .No_Nil in type_flags { io.write_string(w, " #no_nil") } - if .Maybe in type_flags { io.write_string(w, " #maybe") } - if custom_align := str(type.custom_align); custom_align != "" { - io.write_string(w, " #align") - io.write_string(w, custom_align) - } - io.write_string(w, " {") - if len(type_types) > 1 { - do_newline(writer, flags) - indent += 1 - for type_index in type_types { - do_indent(writer, flags) - write_type(writer, types[type_index], flags) - io.write_string(w, ", ") - do_newline(writer, flags) - } - indent -= 1 - do_indent(writer, flags) - } - io.write_string(w, "}") - case .Enum: - io.write_string(w, "enum") - if len(type_types) != 0 { - io.write_byte(w, ' ') - write_type(writer, types[type_types[0]], flags) - } - io.write_string(w, " {") - do_newline(writer, flags) - indent += 1 - - name_width := calc_name_width(type_entities) - - for entity_index in type_entities { - e := &entities[entity_index] - - name := str(e.name) - do_indent(writer, flags) - io.write_string(w, name) - - if init_string := str(e.init_string); init_string != "" { - for _ in 0.. 1 || !is_entity_blank(type_entities[0])) - if require_parens { io.write_byte(w, '(') } - for entity_index, i in type_entities { - if i > 0 { - io.write_string(w, ", ") - } - e := &entities[entity_index] - next_entity: ^doc.Entity = nil - if i+1 < len(type_entities) { - next_entity = &entities[type_entities[i+1]] - } - write_param_entity(writer, e, next_entity, flags) - } - if require_parens { io.write_byte(w, ')') } - - case .Proc: - type_flags := transmute(doc.Type_Flags_Proc)type.flags - io.write_string(w, "proc") - cc := str(type.calling_convention) - if cc != "" { - io.write_byte(w, ' ') - io.write_quoted_string(w, cc) - io.write_byte(w, ' ') - } - params := array(type.types)[0] - results := array(type.types)[1] - io.write_byte(w, '(') - write_type(writer, types[params], flags) - io.write_byte(w, ')') - if results != 0 { - assert(.Diverging not_in type_flags) - io.write_string(w, " -> ") - write_type(writer, types[results], flags+{.Is_Results}) - } - if .Diverging in type_flags { - io.write_string(w, " -> !") - } - if .Optional_Ok in type_flags { - io.write_string(w, " #optional_ok") - } - - case .Bit_Set: - type_flags := transmute(doc.Type_Flags_Bit_Set)type.flags - io.write_string(w, "bit_set[") - if .Op_Lt in type_flags { - io.write_uint(w, uint(type.elem_counts[0])) - io.write_string(w, "..<") - io.write_uint(w, uint(type.elem_counts[1])) - } else if .Op_Lt_Eq in type_flags { - io.write_uint(w, uint(type.elem_counts[0])) - io.write_string(w, "..=") - io.write_uint(w, uint(type.elem_counts[1])) - } else { - write_type(writer, types[type_types[0]], flags) - } - if .Underlying_Type in type_flags { - write_type(writer, types[type_types[1]], flags) - } - io.write_string(w, "]") - case .Simd_Vector: - io.write_string(w, "#simd[") - io.write_uint(w, uint(type.elem_counts[0])) - io.write_byte(w, ']') - case .SOA_Struct_Fixed: - io.write_string(w, "#soa[") - io.write_uint(w, uint(type.elem_counts[0])) - io.write_byte(w, ']') - case .SOA_Struct_Slice: - io.write_string(w, "#soa[]") - case .SOA_Struct_Dynamic: - io.write_string(w, "#soa[dynamic]") - case .Relative_Pointer: - io.write_string(w, "#relative(") - write_type(writer, types[type_types[1]], flags) - io.write_string(w, ") ") - write_type(writer, types[type_types[0]], flags) - case .Relative_Slice: - io.write_string(w, "#relative(") - write_type(writer, types[type_types[1]], flags) - io.write_string(w, ") ") - write_type(writer, types[type_types[0]], flags) - case .Multi_Pointer: - io.write_string(w, "[^]") - write_type(writer, types[type_types[0]], flags) - case .Matrix: - io.write_string(w, "matrix[") - io.write_uint(w, uint(type.elem_counts[0])) - io.write_string(w, ", ") - io.write_uint(w, uint(type.elem_counts[1])) - io.write_string(w, "]") - write_type(writer, types[type_types[0]], flags) - } -} - -write_doc_line :: proc(w: io.Writer, text: string) { - text := text - for len(text) != 0 { - if strings.count(text, "`") >= 2 { - n := strings.index_byte(text, '`') - io.write_string(w, text[:n]) - io.write_string(w, "") - remaining := text[n+1:] - m := strings.index_byte(remaining, '`') - io.write_string(w, remaining[:m]) - io.write_string(w, "") - text = remaining[m+1:] - } else { - io.write_string(w, text) - return - } - } -} - -write_docs :: proc(w: io.Writer, pkg: ^doc.Pkg, docs: string) { - if docs == "" { - return - } - Block_Kind :: enum { - Paragraph, - Code, - } - Block :: struct { - kind: Block_Kind, - lines: []string, - } - - lines: [dynamic]string - it := docs - for line_ in strings.split_iterator(&it, "\n") { - line := strings.trim_right_space(line_) - append(&lines, line) - } - - curr_block_kind := Block_Kind.Paragraph - start := 0 - blocks: [dynamic]Block - - for line, i in lines { - text := strings.trim_space(line) - switch curr_block_kind { - case .Paragraph: - if strings.has_prefix(line, "\t") { - if i-start > 0 { - append(&blocks, Block{curr_block_kind, lines[start:i]}) - } - curr_block_kind, start = .Code, i - } else if text == "" { - if i-start > 0 { - append(&blocks, Block{curr_block_kind, lines[start:i]}) - } - start = i - } - case .Code: - if text == "" || strings.has_prefix(line, "\t") { - continue - } - - if i-start > 0 { - append(&blocks, Block{curr_block_kind, lines[start:i]}) - } - curr_block_kind, start = .Paragraph, i - } - } - if start < len(lines) { - if len(lines)-start > 0 { - append(&blocks, Block{curr_block_kind, lines[start:]}) - } - } - - for block in &blocks { - trim_amount := 0 - for trim_amount = 0; trim_amount < len(block.lines); trim_amount += 1 { - line := block.lines[trim_amount] - if strings.trim_space(line) != "" { - break - } - } - block.lines = block.lines[trim_amount:] - } - - for block, i in blocks { - if len(block.lines) == 0 { - continue - } - prev_line := "" - if i > 0 { - prev_lines := blocks[i-1].lines - if len(prev_lines) > 0 { - prev_line = prev_lines[len(prev_lines)-1] - } - } - prev_line = strings.trim_space(prev_line) - - lines := block.lines[:] - - end_line := block.lines[len(lines)-1] - if block.kind == .Paragraph && i+1 < len(blocks) { - if strings.has_prefix(end_line, "Example:") && blocks[i+1].kind == .Code { - lines = lines[:len(lines)-1] - } - } - - switch block.kind { - case .Paragraph: - io.write_string(w, "

") - for line, line_idx in lines { - if line_idx > 0 { - io.write_string(w, "\n") - } - io.write_string(w, line) - } - io.write_string(w, "

\n") - case .Code: - all_blank := len(lines) > 0 - for line in lines { - if strings.trim_space(line) != "" { - all_blank = false - } - } - if all_blank { - continue - } - - if strings.has_prefix(prev_line, "Example:") { - io.write_string(w, "
\n") - defer io.write_string(w, "
\n") - io.write_string(w, "Example:\n") - io.write_string(w, `
`)
-				for line in lines {
-					io.write_string(w, strings.trim_prefix(line, "\t"))
-					io.write_string(w, "\n")
-				}
-				io.write_string(w, "
\n") - } else { - io.write_string(w, "
")
-				for line in lines {
-					io.write_string(w, strings.trim_prefix(line, "\t"))
-					io.write_string(w, "\n")
-				}
-				io.write_string(w, "
\n") - } - } - } -} - -write_pkg :: proc(w: io.Writer, path: string, pkg: ^doc.Pkg) { - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - - fmt.wprintln(w, `
`) - - { // breadcrumbs - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - - fmt.wprintln(w, ``) - io.write_string(w, "
    \n") - defer io.write_string(w, "
\n") - - fmt.wprintf(w, ``, BASE_CORE_URL) - - dirs := strings.split(path, "/") - for dir, i in dirs { - url := strings.join(dirs[:i+1], "/") - short_path := strings.join(dirs[1:i+1], "/") - - a_class := "breadcrumb-link" - is_curr := i+1 == len(dirs) - if is_curr { - io.write_string(w, `\n") - } - } - - - fmt.wprintf(w, "

package core:%s

\n", path) - overview_docs := strings.trim_space(str(pkg.docs)) - if overview_docs != "" { - fmt.wprintln(w, "

Overview

") - fmt.wprintln(w, "
") - defer fmt.wprintln(w, "
") - - write_docs(w, pkg, overview_docs) - } - - fmt.wprintln(w, `

Index

`) - fmt.wprintln(w, `
`) - pkg_procs: [dynamic]^doc.Entity - pkg_proc_groups: [dynamic]^doc.Entity - pkg_types: [dynamic]^doc.Entity - pkg_vars: [dynamic]^doc.Entity - pkg_consts: [dynamic]^doc.Entity - - for entity_index in array(pkg.entities) { - e := &entities[entity_index] - name := str(e.name) - if name == "" || name[0] == '_' { - continue - } - switch e.kind { - case .Invalid, .Import_Name, .Library_Name: - // ignore - case .Constant: append(&pkg_consts, e) - case .Variable: append(&pkg_vars, e) - case .Type_Name: append(&pkg_types, e) - case .Procedure: append(&pkg_procs, e) - case .Proc_Group: append(&pkg_proc_groups, e) - } - } - - entity_key :: proc(e: ^doc.Entity) -> string { - return str(e.name) - } - - slice.sort_by_key(pkg_procs[:], entity_key) - slice.sort_by_key(pkg_proc_groups[:], entity_key) - slice.sort_by_key(pkg_types[:], entity_key) - slice.sort_by_key(pkg_vars[:], entity_key) - slice.sort_by_key(pkg_consts[:], entity_key) - - write_index :: proc(w: io.Writer, name: string, entities: []^doc.Entity) { - fmt.wprintln(w, `
`) - defer fmt.wprintln(w, `
`) - - - fmt.wprintf(w, `
`+"\n", name) - fmt.wprintf(w, ``+"\n", name) - io.write_string(w, name) - fmt.wprintln(w, ``) - defer fmt.wprintln(w, `
`) - - if len(entities) == 0 { - io.write_string(w, "

This section is empty.

\n") - } else { - fmt.wprintln(w, "
    ") - for e in entities { - name := str(e.name) - fmt.wprintf(w, "
  • {0:s}
  • \n", name) - } - fmt.wprintln(w, "
") - } - } - - entity_ordering := [?]struct{name: string, entities: []^doc.Entity} { - {"Types", pkg_types[:]}, - {"Constants", pkg_consts[:]}, - {"Variables", pkg_vars[:]}, - {"Procedures", pkg_procs[:]}, - {"Procedure Groups", pkg_proc_groups[:]}, - } - - - for eo in entity_ordering { - write_index(w, eo.name, eo.entities) - } - - fmt.wprintln(w, "
") - - - write_entity :: proc(w: io.Writer, e: ^doc.Entity) { - write_attributes :: proc(w: io.Writer, e: ^doc.Entity) { - for attr in array(e.attributes) { - io.write_string(w, "@(") - name := str(attr.name) - value := str(attr.value) - io.write_string(w, name) - if value != "" { - io.write_string(w, "=") - io.write_string(w, value) - } - io.write_string(w, ")\n") - } - } - - pkg_index := files[e.pos.file].pkg - pkg := &pkgs[pkg_index] - writer := &Type_Writer{ - w = w, - pkg = pkg_index, - } - defer delete(writer.generic_scope) - - name := str(e.name) - path := pkg_to_path[pkg] - filename := slashpath.base(str(files[e.pos.file].name)) - fmt.wprintf(w, "

{0:s}", name) - fmt.wprintf(w, " ¶") - if e.pos.file != 0 && e.pos.line > 0 { - src_url := fmt.tprintf("%s/%s/%s#L%d", GITHUB_CORE_URL, path, filename, e.pos.line) - fmt.wprintf(w, "", src_url) - } - fmt.wprintf(w, "

\n") - fmt.wprintln(w, `
`) - - switch e.kind { - case .Invalid, .Import_Name, .Library_Name: - // ignore - case .Constant: - fmt.wprint(w, `
`)
-			the_type := types[e.type]
-
-			init_string := str(e.init_string)
-			assert(init_string != "")
-
-			ignore_type := true
-			if the_type.kind == .Basic && is_type_untyped(the_type) {
-			} else {
-				ignore_type = false
-				type_name := str(the_type.name)
-				if type_name != "" && strings.has_prefix(init_string, type_name) {
-					ignore_type = true
-				}
-			}
-
-			if ignore_type {
-				fmt.wprintf(w, "%s :: ", name)
-			} else {
-				fmt.wprintf(w, "%s: ", name)
-				write_type(writer, the_type, {.Allow_Indent})
-				fmt.wprintf(w, " : ")
-			}
-
-
-			io.write_string(w, init_string)
-			fmt.wprintln(w, "
") - case .Variable: - fmt.wprint(w, `
`)
-			write_attributes(w, e)
-			fmt.wprintf(w, "%s: ", name)
-			write_type(writer, types[e.type], {.Allow_Indent})
-			init_string := str(e.init_string)
-			if init_string != "" {
-				io.write_string(w, " = ")
-				io.write_string(w, "…")
-			}
-			fmt.wprintln(w, "
") - - case .Type_Name: - fmt.wprint(w, `
`)
-			fmt.wprintf(w, "%s :: ", name)
-			the_type := types[e.type]
-			type_to_print := the_type
-			if the_type.kind == .Named && .Type_Alias not_in e.flags {
-				if e.pos == entities[array(the_type.entities)[0]].pos {
-					bt := base_type(the_type)
-					#partial switch bt.kind {
-					case .Struct, .Union, .Proc, .Enum:
-						// Okay
-					case:
-						io.write_string(w, "distinct ")
-					}
-					type_to_print = bt
-				}
-			}
-			write_type(writer, type_to_print, {.Allow_Indent})
-			fmt.wprintln(w, "
") - case .Procedure: - fmt.wprint(w, `
`)
-			fmt.wprintf(w, "%s :: ", name)
-			write_type(writer, types[e.type], nil)
-			write_where_clauses(w, array(e.where_clauses))
-			fmt.wprint(w, " {…}")
-			fmt.wprintln(w, "
") - case .Proc_Group: - fmt.wprint(w, `
`)
-			fmt.wprintf(w, "%s :: proc{{\n", name)
-			for entity_index in array(e.grouped_entities) {
-				this_proc := &entities[entity_index]
-				this_pkg := files[this_proc.pos.file].pkg
-				io.write_byte(w, '\t')
-				if this_pkg != pkg_index {
-					fmt.wprintf(w, "%s.", str(pkgs[this_pkg].name))
-				}
-				name := str(this_proc.name)
-				fmt.wprintf(w, ``, pkg_to_path[&pkgs[this_pkg]], name, BASE_CORE_URL)
-				io.write_string(w, name)
-				io.write_string(w, ``)
-				io.write_byte(w, ',')
-				io.write_byte(w, '\n')
-			}
-			fmt.wprintln(w, "}")
-			fmt.wprintln(w, "
") - - } - fmt.wprintln(w, `
`) - - the_docs := strings.trim_space(str(e.docs)) - if the_docs != "" { - fmt.wprintln(w, `
`) - fmt.wprintln(w, ` `) - write_docs(w, pkg, the_docs) - fmt.wprintln(w, `
`) - } - } - write_entities :: proc(w: io.Writer, title: string, entities: []^doc.Entity) { - fmt.wprintf(w, "

{0:s}

\n", title) - fmt.wprintln(w, `
`) - if len(entities) == 0 { - io.write_string(w, "

This section is empty.

\n") - } else { - for e in entities { - fmt.wprintln(w, `
`) - write_entity(w, e) - fmt.wprintln(w, `
`) - } - } - fmt.wprintln(w, "
") - } - - for eo in entity_ordering { - write_entities(w, eo.name, eo.entities) - } - - fmt.wprintln(w, `

Source Files

`) - fmt.wprintln(w, "
    ") - any_hidden := false - source_file_loop: for file_index in array(pkg.files) { - file := files[file_index] - filename := slashpath.base(str(file.name)) - switch { - case - strings.has_suffix(filename, "_windows.odin"), - strings.has_suffix(filename, "_darwin.odin"), - strings.has_suffix(filename, "_essence.odin"), - strings.has_suffix(filename, "_freebsd.odin"), - strings.has_suffix(filename, "_wasi.odin"), - strings.has_suffix(filename, "_js.odin"), - strings.has_suffix(filename, "_freestanding.odin"), - - strings.has_suffix(filename, "_amd64.odin"), - strings.has_suffix(filename, "_i386.odin"), - strings.has_suffix(filename, "_arch64.odin"), - strings.has_suffix(filename, "_wasm32.odin"), - strings.has_suffix(filename, "_wasm64.odin"), - false: - any_hidden = true - continue source_file_loop - } - fmt.wprintf(w, `
  • %s
  • `, GITHUB_CORE_URL, path, filename, filename) - fmt.wprintln(w) - } - if any_hidden { - fmt.wprintln(w, "
  • (hidden platform specific files)
  • ") - } - fmt.wprintln(w, "
") - - - fmt.wprintln(w, `
`) - { - write_link :: proc(w: io.Writer, id, text: string) { - fmt.wprintf(w, `
  • %s`, id, text) - } - - fmt.wprintln(w, ``) - } - -} \ No newline at end of file diff --git a/tools/odin-html-docs/style.css b/tools/odin-html-docs/style.css deleted file mode 100644 index cb06a8603..000000000 --- a/tools/odin-html-docs/style.css +++ /dev/null @@ -1,140 +0,0 @@ -/* doc directories */ - -table.directory { - /*border: 1px solid #ccc!important;*/ - table-layout: fixed; - border-collapse: collapse; -} - -header.collection-header ul { - margin-top: 1em; - margin-bottom: 0; - padding-left: 0.5em; - list-style-type: none; -} - -hr.collection-hr { - margin: 0; - padding: 0; -} - - -.doc-directory tr { - padding-left: 1em!important; - border-top: 1px solid #ccc!important; - border-bottom: 1px solid #ccc!important; -} - -.doc-directory td { - padding: 0.25em 0.5em; -} -.directory-child td { - padding-left: 2em!important; -} - -.directory-child td+td { - position: relative; - left: -1.5em!important; - padding-right: 0; -} - -.doc-directory tr[aria-expanded=true] td.pkg-name:before { - content: "\2193"; -} -.doc-directory tr[aria-expanded=false] td.pkg-name:before { - content: "\2192"!important; -} - -.doc-directory tr[aria-hidden=true] { - display: none; -} - - -/* doc page */ - -pre.doc-code { - white-space: pre-wrap; - word-break: keep-all; - word-wrap: break-word; - tab-size: 8; - background-color: #f8f8f8; - color: #202224; - border: 1px solid #c6c8ca; - border-radius: 0.25rem; - padding: 0.625rem; -} -pre.doc-code a { - font-family: Consolas,Liberation Mono,Menlo,monospace!important; - text-decoration: none; - color: #2179d8; - font-weight: 800; -} - -pre.doc-code a.code-procedure { - color: #047919; -} - -.pkg-line-doc { - color: #444; -} - - -.doc-source { - display: inline; - float: right; -} - -.doc-source a { - text-decoration: none; - color: #666666; - font-size: 0.75em; -} -.doc-source a:hover { - text-decoration: underline; -} - -a > .a-hidden { - opacity: 0; -} -a:hover > .a-hidden { - opacity: 100; -} -section.documentation h3 { - font-size: calc(1.1rem + .2vw); -} - -.pkg-index h3 { - margin-top: 0 !important; - padding-top: 0 !important; -} - - -.documentation .pkg-entity { - padding-bottom: 0.75rem; - border-bottom: 1px solid #d0d0d0; -} - -details.doc-index > summary { - position: relative; - font-size: 1.75rem; - left: -1.75rem; -} -details.doc-index ul { - list-style-type: none; -} - -details.odin-doc-toggle > summary.hideme span:before { - content: "Expand description"; -} -details.odin-doc-toggle[open] > summary.hideme span:before { - content: "Close description"; - opacity: 0.8; -} - -details.odin-doc-toggle[open] > summary.hideme { - margin-bottom: 0.5em; -} - -details.code-example > summary { - font-weight: 700; -} diff --git a/tools/odinfmt/main.odin b/tools/odinfmt/main.odin index bc1b521ca..cebb20888 100644 --- a/tools/odinfmt/main.odin +++ b/tools/odinfmt/main.odin @@ -114,7 +114,6 @@ main :: proc() { filepath.walk(path, walk_files); for file in files { - fmt.println(file); backup_path := strings.concatenate({file, "_bk"}); defer delete(backup_path); diff --git a/vendor/sdl2/ttf/SDL2_ttf.dll b/vendor/sdl2/ttf/SDL2_ttf.dll index 575636a91..2acc0e5bc 100644 Binary files a/vendor/sdl2/ttf/SDL2_ttf.dll and b/vendor/sdl2/ttf/SDL2_ttf.dll differ diff --git a/vendor/sdl2/ttf/SDL2_ttf.lib b/vendor/sdl2/ttf/SDL2_ttf.lib index 28810a7bc..bd53219e9 100644 Binary files a/vendor/sdl2/ttf/SDL2_ttf.lib and b/vendor/sdl2/ttf/SDL2_ttf.lib differ diff --git a/vendor/sdl2/ttf/sdl_ttf.odin b/vendor/sdl2/ttf/sdl_ttf.odin index 20db09729..8cd1df505 100644 --- a/vendor/sdl2/ttf/sdl_ttf.odin +++ b/vendor/sdl2/ttf/sdl_ttf.odin @@ -14,7 +14,7 @@ bool :: SDL.bool MAJOR_VERSION :: 2 MINOR_VERSION :: 0 -PATCHLEVEL :: 15 +PATCHLEVEL :: 18 UNICODE_BOM_NATIVE :: 0xFEFF UNICODE_BOM_SWAPPED :: 0xFFFE