mirror of
https://github.com/nim-lang/Nim.git
synced 2026-04-22 23:35:22 +00:00
renderer: use the biggest integer type for masking literals (#15482)
On 32-bit system the mask would have a size of 32-bit, which is smaller than the BiggestInt (usually 64-bit) it was masked against. For some reason this only affect 32-bit Windows but not 32-bit Linux. Might just be a difference in how gcc handle out of bound shifts for Windows and Linux.
This commit is contained in:
@@ -336,7 +336,7 @@ proc litAux(g: TSrcGen; n: PNode, x: BiggestInt, size: int): string =
|
||||
|
||||
if nfBase2 in n.flags: result = "0b" & toBin(x, size * 8)
|
||||
elif nfBase8 in n.flags:
|
||||
var y = if size < sizeof(BiggestInt): x and ((1 shl (size*8)) - 1)
|
||||
var y = if size < sizeof(BiggestInt): x and ((1.BiggestInt shl (size*8)) - 1)
|
||||
else: x
|
||||
result = "0o" & toOct(y, size * 3)
|
||||
elif nfBase16 in n.flags: result = "0x" & toHex(x, size * 2)
|
||||
|
||||
Reference in New Issue
Block a user