From 562b518394ca358ac8aef1df9b480106fafe9375 Mon Sep 17 00:00:00 2001 From: gingerBill Date: Sun, 6 Oct 2019 18:54:29 +0100 Subject: [PATCH] Change print*_err to eprint* in core library --- core/encoding/cel/cel.odin | 6 +++--- core/encoding/cel/token.odin | 6 +++--- core/odin/parser/parser.odin | 12 ++++++------ core/odin/tokenizer/tokenizer.odin | 6 +++--- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/core/encoding/cel/cel.odin b/core/encoding/cel/cel.odin index 793dba231..754d8cbfa 100644 --- a/core/encoding/cel/cel.odin +++ b/core/encoding/cel/cel.odin @@ -168,9 +168,9 @@ destroy :: proc(p: ^Parser) { } error :: proc(p: ^Parser, pos: Pos, msg: string, args: ..any) { - fmt.printf_err("%s(%d:%d) Error: ", pos.file, pos.line, pos.column); - fmt.printf_err(msg, ..args); - fmt.println_err(); + fmt.eprintf("%s(%d:%d) Error: ", pos.file, pos.line, pos.column); + fmt.eprintf(msg, ..args); + fmt.eprintln(); p.error_count += 1; } diff --git a/core/encoding/cel/token.odin b/core/encoding/cel/token.odin index 1ad86b299..a2bcb963a 100644 --- a/core/encoding/cel/token.odin +++ b/core/encoding/cel/token.odin @@ -183,9 +183,9 @@ tokenizer_init :: proc(t: ^Tokenizer, src: []byte, file := "") { } token_error :: proc(t: ^Tokenizer, msg: string, args: ..any) { - fmt.printf_err("%s(%d:%d) Error: ", t.file, t.line_count, t.read_offset-t.line_offset+1); - fmt.printf_err(msg, ..args); - fmt.println_err(); + fmt.eprintf("%s(%d:%d) Error: ", t.file, t.line_count, t.read_offset-t.line_offset+1); + fmt.eprintf(msg, ..args); + fmt.eprintln(); t.error_count += 1; } diff --git a/core/odin/parser/parser.odin b/core/odin/parser/parser.odin index 7c5c096ba..cc3768030 100644 --- a/core/odin/parser/parser.odin +++ b/core/odin/parser/parser.odin @@ -51,14 +51,14 @@ Import_Decl_Kind :: enum { default_warning_handler :: proc(pos: token.Pos, msg: string, args: ..any) { - fmt.printf_err("%s(%d:%d): Warning: ", pos.file, pos.line, pos.column); - fmt.printf_err(msg, ..args); - fmt.printf_err("\n"); + fmt.eprintf("%s(%d:%d): Warning: ", pos.file, pos.line, pos.column); + fmt.eprintf(msg, ..args); + fmt.eprintf("\n"); } default_error_handler :: proc(pos: token.Pos, msg: string, args: ..any) { - fmt.printf_err("%s(%d:%d): ", pos.file, pos.line, pos.column); - fmt.printf_err(msg, ..args); - fmt.printf_err("\n"); + fmt.eprintf("%s(%d:%d): ", pos.file, pos.line, pos.column); + fmt.eprintf(msg, ..args); + fmt.eprintf("\n"); } warn :: proc(p: ^Parser, pos: token.Pos, msg: string, args: ..any) { diff --git a/core/odin/tokenizer/tokenizer.odin b/core/odin/tokenizer/tokenizer.odin index b29ac700d..98aef62b0 100644 --- a/core/odin/tokenizer/tokenizer.odin +++ b/core/odin/tokenizer/tokenizer.odin @@ -54,9 +54,9 @@ offset_to_pos :: proc(t: ^Tokenizer, offset: int) -> token.Pos { } default_error_handler :: proc(pos: token.Pos, msg: string, args: ..any) { - fmt.printf_err("%s(%d:%d) ", pos.file, pos.line, pos.column); - fmt.printf_err(msg, ..args); - fmt.printf_err("\n"); + fmt.eprintf("%s(%d:%d) ", pos.file, pos.line, pos.column); + fmt.eprintf(msg, ..args); + fmt.eprintf("\n"); } error :: proc(t: ^Tokenizer, offset: int, msg: string, args: ..any) {