mirror of
https://github.com/odin-lang/Odin.git
synced 2026-01-08 14:03:14 +00:00
@@ -294,6 +294,24 @@ peek_data_from_memory :: #force_inline proc(z: ^Context_Memory_Input, $T: typeid
|
||||
}
|
||||
}
|
||||
|
||||
@(optimization_mode="speed")
|
||||
peek_data_at_offset_from_memory :: #force_inline proc(z: ^Context_Memory_Input, $T: typeid, #any_int offset: int) -> (res: T, err: io.Error) {
|
||||
size :: size_of(T)
|
||||
|
||||
#no_bounds_check {
|
||||
if len(z.input_data) >= size + offset {
|
||||
buf := z.input_data[offset:][:size]
|
||||
return (^T)(&buf[0])^, .None
|
||||
}
|
||||
}
|
||||
|
||||
if len(z.input_data) == 0 {
|
||||
return T{}, .EOF
|
||||
} else {
|
||||
return T{}, .Short_Buffer
|
||||
}
|
||||
}
|
||||
|
||||
@(optimization_mode="speed")
|
||||
peek_data_from_stream :: #force_inline proc(z: ^Context_Stream_Input, $T: typeid) -> (res: T, err: io.Error) {
|
||||
size :: size_of(T)
|
||||
@@ -321,7 +339,44 @@ peek_data_from_stream :: #force_inline proc(z: ^Context_Stream_Input, $T: typeid
|
||||
return res, .None
|
||||
}
|
||||
|
||||
peek_data :: proc{peek_data_from_memory, peek_data_from_stream}
|
||||
@(optimization_mode="speed")
|
||||
peek_data_at_offset_from_stream :: #force_inline proc(z: ^Context_Stream_Input, $T: typeid, #any_int offset: int) -> (res: T, err: io.Error) {
|
||||
size :: size_of(T)
|
||||
|
||||
// Get current position to return to.
|
||||
cur_pos, e1 := z.input->impl_seek(0, .Current)
|
||||
if e1 != .None {
|
||||
return T{}, e1
|
||||
}
|
||||
|
||||
// Seek to offset.
|
||||
pos, e2 := z.input->impl_seek(offset, .Start)
|
||||
if e2 != .None {
|
||||
return T{}, e2
|
||||
}
|
||||
|
||||
r, e3 := io.to_reader_at(z.input)
|
||||
if !e3 {
|
||||
return T{}, .Empty
|
||||
}
|
||||
when size <= 128 {
|
||||
b: [size]u8
|
||||
} else {
|
||||
b := make([]u8, size, context.temp_allocator)
|
||||
}
|
||||
_, e4 := io.read_at(r, b[:], pos)
|
||||
if e4 != .None {
|
||||
return T{}, .Empty
|
||||
}
|
||||
|
||||
// Return read head to original position.
|
||||
z.input->impl_seek(cur_pos, .Start)
|
||||
|
||||
res = (^T)(&b[0])^
|
||||
return res, .None
|
||||
}
|
||||
|
||||
peek_data :: proc{peek_data_from_memory, peek_data_from_stream, peek_data_at_offset_from_memory, peek_data_at_offset_from_stream}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ Image :: struct {
|
||||
height: int,
|
||||
channels: int,
|
||||
depth: int, // Channel depth in bits, typically 8 or 16
|
||||
pixels: bytes.Buffer,
|
||||
pixels: bytes.Buffer `fmt:"-"`,
|
||||
/*
|
||||
Some image loaders/writers can return/take an optional background color.
|
||||
For convenience, we return them as u16 so we don't need to switch on the type
|
||||
@@ -61,6 +61,7 @@ Image_Metadata :: union #shared_nil {
|
||||
^Netpbm_Info,
|
||||
^PNG_Info,
|
||||
^QOI_Info,
|
||||
^TGA_Info,
|
||||
}
|
||||
|
||||
|
||||
@@ -168,6 +169,7 @@ Error :: union #shared_nil {
|
||||
|
||||
General_Image_Error :: enum {
|
||||
None = 0,
|
||||
Unsupported_Option,
|
||||
// File I/O
|
||||
Unable_To_Read_File,
|
||||
Unable_To_Write_File,
|
||||
@@ -376,10 +378,15 @@ QOI_Info :: struct {
|
||||
header: QOI_Header,
|
||||
}
|
||||
|
||||
TGA_Data_Type :: enum u8 {
|
||||
Uncompressed_RGB = 2,
|
||||
Compressed_RBB = 10,
|
||||
}
|
||||
|
||||
TGA_Header :: struct #packed {
|
||||
id_length: u8,
|
||||
color_map_type: u8,
|
||||
data_type_code: u8,
|
||||
data_type_code: TGA_Data_Type,
|
||||
color_map_origin: u16le,
|
||||
color_map_length: u16le,
|
||||
color_map_depth: u8,
|
||||
@@ -390,6 +397,21 @@ TGA_Header :: struct #packed {
|
||||
}
|
||||
#assert(size_of(TGA_Header) == 18)
|
||||
|
||||
New_TGA_Signature :: "TRUEVISION-XFILE.\x00"
|
||||
|
||||
TGA_Footer :: struct #packed {
|
||||
extension_area_offset: u32le,
|
||||
developer_directory_offset: u32le,
|
||||
signature: [18]u8 `fmt:"s"`, // Should match signature if New TGA.
|
||||
}
|
||||
#assert(size_of(TGA_Footer) == 26)
|
||||
|
||||
TGA_Info :: struct {
|
||||
header: TGA_Header,
|
||||
image_id: string,
|
||||
footer: Maybe(TGA_Footer),
|
||||
}
|
||||
|
||||
// Function to help with image buffer calculations
|
||||
compute_buffer_size :: proc(width, height, channels, depth: int, extra_row_bytes := int(0)) -> (size: int) {
|
||||
size = ((((channels * width * depth) + 7) >> 3) + extra_row_bytes) * height
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
List of contributors:
|
||||
Jeroen van Rijn: Initial implementation.
|
||||
Benoit Jacquier: tga loader
|
||||
*/
|
||||
|
||||
|
||||
@@ -14,6 +15,12 @@ import "core:mem"
|
||||
import "core:image"
|
||||
import "core:bytes"
|
||||
import "core:os"
|
||||
import "core:compress"
|
||||
import "core:strings"
|
||||
import "core:fmt"
|
||||
_ :: fmt
|
||||
|
||||
// TODO: alpha_premultiply support
|
||||
|
||||
Error :: image.Error
|
||||
Image :: image.Image
|
||||
@@ -57,7 +64,7 @@ save_to_memory :: proc(output: ^bytes.Buffer, img: ^Image, options := Options{}
|
||||
}
|
||||
|
||||
header := image.TGA_Header{
|
||||
data_type_code = 0x02, // Color, uncompressed.
|
||||
data_type_code = .Uncompressed_RGB,
|
||||
dimensions = {u16le(img.width), u16le(img.height)},
|
||||
bits_per_pixel = u8(img.depth * img.channels),
|
||||
image_descriptor = 1 << 5, // Origin is top left.
|
||||
@@ -98,4 +105,214 @@ save_to_file :: proc(output: string, img: ^Image, options := Options{}, allocato
|
||||
return nil if write_ok else .Unable_To_Write_File
|
||||
}
|
||||
|
||||
save :: proc{save_to_memory, save_to_file}
|
||||
save :: proc{save_to_memory, save_to_file}
|
||||
|
||||
load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
|
||||
context.allocator = allocator
|
||||
options := options
|
||||
|
||||
if .alpha_premultiply in options {
|
||||
return nil, .Unsupported_Option
|
||||
}
|
||||
|
||||
if .info in options {
|
||||
options |= {.return_metadata, .do_not_decompress_image}
|
||||
options -= {.info}
|
||||
}
|
||||
|
||||
if .return_header in options && .return_metadata in options {
|
||||
options -= {.return_header}
|
||||
}
|
||||
|
||||
// First check for a footer.
|
||||
filesize := compress.input_size(ctx) or_return
|
||||
|
||||
footer: image.TGA_Footer
|
||||
have_valid_footer := false
|
||||
|
||||
if filesize >= size_of(image.TGA_Header) + size_of(image.TGA_Footer) {
|
||||
if f, f_err := compress.peek_data(ctx, image.TGA_Footer, filesize - i64(size_of(image.TGA_Footer))); f_err == .None {
|
||||
if string(f.signature[:]) == image.New_TGA_Signature {
|
||||
have_valid_footer = true
|
||||
footer = f
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
header := image.read_data(ctx, image.TGA_Header) or_return
|
||||
|
||||
// Header checks
|
||||
rle_encoding := false
|
||||
|
||||
switch header.data_type_code {
|
||||
case .Compressed_RBB: rle_encoding = true
|
||||
case .Uncompressed_RGB:
|
||||
case: return nil, .Unsupported_Format
|
||||
}
|
||||
|
||||
if header.bits_per_pixel != 24 && header.bits_per_pixel != 32 {
|
||||
return nil, .Unsupported_Format
|
||||
}
|
||||
|
||||
if header.image_descriptor & IMAGE_DESCRIPTOR_INTERLEAVING_MASK != 0 {
|
||||
return nil, .Unsupported_Format
|
||||
}
|
||||
|
||||
if int(header.dimensions[0]) * int(header.dimensions[1]) > image.MAX_DIMENSIONS {
|
||||
return nil, .Image_Dimensions_Too_Large
|
||||
}
|
||||
|
||||
if img == nil {
|
||||
img = new(Image)
|
||||
}
|
||||
|
||||
defer if err != nil {
|
||||
destroy(img)
|
||||
}
|
||||
|
||||
src_channels := int(header.bits_per_pixel) / 8
|
||||
img.which = .TGA
|
||||
img.channels = 4 if .alpha_add_if_missing in options else src_channels
|
||||
img.channels = 3 if .alpha_drop_if_present in options else img.channels
|
||||
|
||||
img.depth = 8
|
||||
img.width = int(header.dimensions[0])
|
||||
img.height = int(header.dimensions[1])
|
||||
|
||||
// Read Image ID if present
|
||||
image_id := ""
|
||||
if _id, e := compress.read_slice(ctx, int(header.id_length)); e != .None {
|
||||
return nil, .Corrupt
|
||||
} else {
|
||||
if .return_metadata in options {
|
||||
id := strings.trim_right_null(string(_id))
|
||||
image_id = strings.clone(id)
|
||||
}
|
||||
}
|
||||
|
||||
if .return_metadata in options {
|
||||
info := new(image.TGA_Info)
|
||||
info.header = header
|
||||
info.image_id = image_id
|
||||
if have_valid_footer {
|
||||
info.footer = footer
|
||||
}
|
||||
img.metadata = info
|
||||
}
|
||||
|
||||
if .do_not_decompress_image in options {
|
||||
return img, nil
|
||||
}
|
||||
|
||||
if !resize(&img.pixels.buf, img.channels * img.width * img.height) {
|
||||
return img, .Unable_To_Allocate_Or_Resize
|
||||
}
|
||||
|
||||
origin_is_topleft := header.image_descriptor & IMAGE_DESCRIPTOR_TOPLEFT_MASK != 0
|
||||
rle_repetition_count := 0
|
||||
read_pixel := true
|
||||
is_packet_rle := false
|
||||
|
||||
pixel: [4]u8
|
||||
|
||||
stride := img.width * img.channels
|
||||
line := 0 if origin_is_topleft else img.height - 1
|
||||
|
||||
for _ in 0..<img.height {
|
||||
offset := line * stride
|
||||
for _ in 0..<img.width {
|
||||
// handle RLE decoding
|
||||
if rle_encoding {
|
||||
if rle_repetition_count == 0 {
|
||||
rle_cmd, err := compress.read_u8(ctx)
|
||||
if err != .None {
|
||||
return img, .Corrupt
|
||||
}
|
||||
is_packet_rle = (rle_cmd >> 7) != 0
|
||||
rle_repetition_count = 1 + int(rle_cmd & 0x7F)
|
||||
read_pixel = true
|
||||
} else if !is_packet_rle {
|
||||
read_pixel = rle_repetition_count > 0
|
||||
} else {
|
||||
read_pixel = false
|
||||
}
|
||||
}
|
||||
// Read pixel
|
||||
if read_pixel {
|
||||
src, src_err := compress.read_slice(ctx, src_channels)
|
||||
if src_err != .None {
|
||||
return img, .Corrupt
|
||||
}
|
||||
|
||||
pixel[2] = src[0]
|
||||
pixel[1] = src[1]
|
||||
pixel[0] = src[2]
|
||||
|
||||
pixel[3] = src_channels == 4 ? src[3] : 255
|
||||
if img.channels == 4 {
|
||||
if src_channels == 4 {
|
||||
img.pixels.buf[offset:][3] = src[3]
|
||||
} else {
|
||||
img.pixels.buf[offset:][3] = 255
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write pixel
|
||||
copy(img.pixels.buf[offset:], pixel[:img.channels])
|
||||
offset += img.channels
|
||||
rle_repetition_count -= 1
|
||||
}
|
||||
line += 1 if origin_is_topleft else -1
|
||||
}
|
||||
return img, nil
|
||||
}
|
||||
|
||||
load_from_bytes :: proc(data: []byte, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
|
||||
ctx := &compress.Context_Memory_Input{
|
||||
input_data = data,
|
||||
}
|
||||
|
||||
img, err = load_from_context(ctx, options, allocator)
|
||||
return img, err
|
||||
}
|
||||
|
||||
load_from_file :: proc(filename: string, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
|
||||
context.allocator = allocator
|
||||
|
||||
data, ok := os.read_entire_file(filename)
|
||||
defer delete(data)
|
||||
|
||||
if ok {
|
||||
return load_from_bytes(data, options)
|
||||
} else {
|
||||
return nil, .Unable_To_Read_File
|
||||
}
|
||||
}
|
||||
|
||||
load :: proc{load_from_file, load_from_bytes, load_from_context}
|
||||
|
||||
destroy :: proc(img: ^Image) {
|
||||
if img == nil || img.width == 0 || img.height == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
bytes.buffer_destroy(&img.pixels)
|
||||
if v, ok := img.metadata.(^image.TGA_Info); ok {
|
||||
delete(v.image_id)
|
||||
free(v)
|
||||
}
|
||||
|
||||
// Make destroy idempotent
|
||||
img.width = 0
|
||||
img.height = 0
|
||||
free(img)
|
||||
}
|
||||
|
||||
IMAGE_DESCRIPTOR_INTERLEAVING_MASK :: (1<<6) | (1<<7)
|
||||
IMAGE_DESCRIPTOR_TOPLEFT_MASK :: 1<<5
|
||||
|
||||
@(init, private)
|
||||
_register :: proc() {
|
||||
image.register(.TGA, load_from_bytes, destroy)
|
||||
}
|
||||
Reference in New Issue
Block a user