Big compiler Cleanup (#14777)

This commit is contained in:
Clyybber
2020-08-28 22:18:09 +02:00
committed by GitHub
parent f8c48fc186
commit 13e659cfec
53 changed files with 481 additions and 783 deletions

View File

@@ -17,22 +17,21 @@ export int128
type
TCallingConvention* = enum
ccNimCall, # nimcall, also the default
ccStdCall, # procedure is stdcall
ccCDecl, # cdecl
ccSafeCall, # safecall
ccSysCall, # system call
ccInline, # proc should be inlined
ccNoInline, # proc should not be inlined
ccFastCall, # fastcall (pass parameters in registers)
ccThisCall, # thiscall (parameters are pushed right-to-left)
ccClosure, # proc has a closure
ccNimCall # nimcall, also the default
ccStdCall # procedure is stdcall
ccCDecl # cdecl
ccSafeCall # safecall
ccSysCall # system call
ccInline # proc should be inlined
ccNoInline # proc should not be inlined
ccFastCall # fastcall (pass parameters in registers)
ccThisCall # thiscall (parameters are pushed right-to-left)
ccClosure # proc has a closure
ccNoConvention # needed for generating proper C procs sometimes
const
CallingConvToStr*: array[TCallingConvention, string] = ["nimcall", "stdcall",
"cdecl", "safecall", "syscall", "inline", "noinline", "fastcall", "thiscall",
"closure", "noconv"]
const CallingConvToStr*: array[TCallingConvention, string] = ["nimcall", "stdcall",
"cdecl", "safecall", "syscall", "inline", "noinline", "fastcall", "thiscall",
"closure", "noconv"]
type
TNodeKind* = enum # order is extremely important, because ranges are used
@@ -1363,7 +1362,7 @@ proc newType*(kind: TTypeKind, owner: PSym): PType =
proc mergeLoc(a: var TLoc, b: TLoc) =
if a.k == low(a.k): a.k = b.k
if a.storage == low(a.storage): a.storage = b.storage
a.flags = a.flags + b.flags
a.flags.incl b.flags
if a.lode == nil: a.lode = b.lode
if a.r == nil: a.r = b.r
@@ -1388,7 +1387,7 @@ proc assignType*(dest, src: PType) =
# this fixes 'type TLock = TSysLock':
if src.sym != nil:
if dest.sym != nil:
dest.sym.flags = dest.sym.flags + (src.sym.flags-{sfExported})
dest.sym.flags.incl src.sym.flags-{sfExported}
if dest.sym.annex == nil: dest.sym.annex = src.sym.annex
mergeLoc(dest.sym.loc, src.sym.loc)
else:
@@ -1495,8 +1494,7 @@ proc isGCedMem*(t: PType): bool {.inline.} =
t.kind == tyProc and t.callConv == ccClosure
proc propagateToOwner*(owner, elem: PType; propagateHasAsgn = true) =
const HaveTheirOwnEmpty = {tySequence, tySet, tyPtr, tyRef, tyProc}
owner.flags = owner.flags + (elem.flags * {tfHasMeta, tfTriggersCompileTime})
owner.flags.incl elem.flags * {tfHasMeta, tfTriggersCompileTime}
if tfNotNil in elem.flags:
if owner.kind in {tyGenericInst, tyGenericBody, tyGenericInvocation}:
owner.flags.incl tfNotNil

View File

@@ -135,8 +135,7 @@ proc `$`[Key, Val](b: BTree[Key, Val]): string =
result = ""
toString(b.root, "", result)
proc hasNext*[Key, Val](b: BTree[Key, Val]; index: int): bool =
result = index < b.entries
proc hasNext*[Key, Val](b: BTree[Key, Val]; index: int): bool = index < b.entries
proc countSubTree[Key, Val](it: Node[Key, Val]): int =
if it.isInternal:

View File

@@ -1653,7 +1653,7 @@ template genDollar(p: BProc, n: PNode, d: var TLoc, frmt: string) =
var a: TLoc
initLocExpr(p, n[1], a)
a.r = ropecg(p.module, frmt, [rdLoc(a)])
a.flags = a.flags - {lfIndirect} # this flag should not be propagated here (not just for HCR)
a.flags.excl lfIndirect # this flag should not be propagated here (not just for HCR)
if d.k == locNone: getTemp(p, n.typ, d)
genAssignment(p, d, a, {})
gcUsage(p.config, n)

View File

@@ -282,7 +282,7 @@ proc mergeRequired*(m: BModule): bool =
if m.s[i] != nil:
#echo "not empty: ", i, " ", m.s[i]
return true
for i in low(TCProcSection)..high(TCProcSection):
for i in TCProcSection:
if m.initProc.s(i) != nil:
#echo "not empty: ", i, " ", m.initProc.s[i]
return true
@@ -292,7 +292,7 @@ proc mergeFiles*(cfilename: AbsoluteFile, m: BModule) =
var old: TMergeSections
readMergeSections(cfilename, old)
# do the merge; old section before new section:
for i in low(TCFileSection)..high(TCFileSection):
for i in TCFileSection:
m.s[i] = old.f[i] & m.s[i]
for i in low(TCProcSection)..high(TCProcSection):
for i in TCProcSection:
m.initProc.s(i) = old.p[i] & m.initProc.s(i)

View File

@@ -1498,7 +1498,7 @@ proc genPragma(p: BProc, n: PNode) =
of wEmit: genEmit(p, it)
of wInjectStmt:
var p = newProc(nil, p.module)
p.options = p.options - {optLineTrace, optStackTrace}
p.options.excl {optLineTrace, optStackTrace}
genStmts(p, it[1])
p.module.injectStmt = p.s(cpsStmts)
else: discard

View File

@@ -681,7 +681,7 @@ proc loadDynamicLib(m: BModule, lib: PLib) =
[loadlib, genStringLiteral(m, lib.path)])
else:
var p = newProc(nil, m)
p.options = p.options - {optStackTrace}
p.options.excl optStackTrace
p.flags.incl nimErrorFlagDisabled
var dest: TLoc
initLoc(dest, locTemp, lib.path, OnStack)

View File

@@ -485,7 +485,6 @@ proc lowerStmtListExprs(ctx: var Ctx, n: PNode, needsSplit: var bool): PNode =
if ns:
needsSplit = true
var tmp: PSym
var s: PNode
let isExpr = not isEmptyType(n.typ)
if isExpr:
tmp = ctx.newTempVar(n.typ)
@@ -742,8 +741,6 @@ proc lowerStmtListExprs(ctx: var Ctx, n: PNode, needsSplit: var bool): PNode =
result.add(n)
of nkWhileStmt:
var ns = false
var condNeedsSplit = false
n[0] = ctx.lowerStmtListExprs(n[0], condNeedsSplit)
var bodyNeedsSplit = false

View File

@@ -53,11 +53,10 @@ const
"Copyright (c) 2006-" & copyrightYear & " by Andreas Rumpf\n"
proc genFeatureDesc[T: enum](t: typedesc[T]): string {.compileTime.} =
var x = ""
for f in low(T)..high(T):
if x.len > 0: x.add "|"
x.add $f
x
result = ""
for f in T:
if result.len > 0: result.add "|"
result.add $f
const
Usage = slurp"../doc/basicopt.txt".replace(" //", " ")
@@ -146,24 +145,24 @@ proc splitSwitch(conf: ConfigRef; switch: string, cmd, arg: var string, pass: TC
proc processOnOffSwitch(conf: ConfigRef; op: TOptions, arg: string, pass: TCmdLinePass,
info: TLineInfo) =
case arg.normalize
of "","on": conf.options = conf.options + op
of "off": conf.options = conf.options - op
of "","on": conf.options.incl op
of "off": conf.options.excl op
else: localError(conf, info, errOnOrOffExpectedButXFound % arg)
proc processOnOffSwitchOrList(conf: ConfigRef; op: TOptions, arg: string, pass: TCmdLinePass,
info: TLineInfo): bool =
result = false
case arg.normalize
of "on": conf.options = conf.options + op
of "off": conf.options = conf.options - op
of "on": conf.options.incl op
of "off": conf.options.excl op
of "list": result = true
else: localError(conf, info, errOnOffOrListExpectedButXFound % arg)
proc processOnOffSwitchG(conf: ConfigRef; op: TGlobalOptions, arg: string, pass: TCmdLinePass,
info: TLineInfo) =
case arg.normalize
of "", "on": conf.globalOptions = conf.globalOptions + op
of "off": conf.globalOptions = conf.globalOptions - op
of "", "on": conf.globalOptions.incl op
of "off": conf.globalOptions.excl op
else: localError(conf, info, errOnOrOffExpectedButXFound % arg)
proc expectArg(conf: ConfigRef; switch, arg: string, pass: TCmdLinePass, info: TLineInfo) =

View File

@@ -86,7 +86,7 @@ proc initDefines*(symbols: StringTableRef) =
defineSymbol("nimMacrosSizealignof")
defineSymbol("nimNoZeroExtendMagic")
defineSymbol("nimMacrosGetNodeId")
for f in low(Feature)..high(Feature):
for f in Feature:
defineSymbol("nimHas" & $f)
for s in WarningsToStr:

View File

@@ -302,15 +302,13 @@ proc ropeFormatNamedVars(conf: ConfigRef; frmt: FormatStr,
proc genComment(d: PDoc, n: PNode): string =
result = ""
var dummyHasToc: bool
if n.comment.len > 0:
var comment2 = n.comment
let comment = n.comment
when false:
# RFC: to preseve newlines in comments, this would work:
comment2 = comment2.replace("\n", "\n\n")
renderRstToOut(d[], parseRst(comment2, toFullPath(d.conf, n.info),
toLinenumber(n.info), toColumn(n.info),
dummyHasToc, d.options, d.conf), result)
comment = comment.replace("\n", "\n\n")
renderRstToOut(d[], parseRst(comment, toFullPath(d.conf, n.info), toLinenumber(n.info),
toColumn(n.info), (var dummy: bool; dummy), d.options, d.conf), result)
proc genRecCommentAux(d: PDoc, n: PNode): Rope =
if n == nil: return nil
@@ -342,11 +340,10 @@ proc getPlainDocstring(n: PNode): string =
## You need to call this before genRecComment, whose side effects are removal
## of comments from the tree. The proc will recursively scan and return all
## the concatenated ``##`` comments of the node.
result = ""
if n == nil: return
if startsWith(n.comment, "##"):
if n == nil: result = ""
elif startsWith(n.comment, "##"):
result = n.comment
if result.len < 1:
else:
for i in 0..<n.safeLen:
result = getPlainDocstring(n[i])
if result.len > 0: return
@@ -458,7 +455,6 @@ proc writeExample(d: PDoc; ex: PNode, rdoccmd: string) =
d.exampleGroups[rdoccmd].code.add "import r\"$1\"\n" % outp.string
proc runAllExamples(d: PDoc) =
let backend = d.conf.backend
# This used to be: `let backend = if isDefined(d.conf, "js"): "js"` (etc), however
# using `-d:js` (etc) cannot work properly, eg would fail with `importjs`
# since semantics are affected by `config.backend`, not by isDefined(d.conf, "js")
@@ -522,20 +518,6 @@ proc prepareExample(d: PDoc; n: PNode): tuple[rdoccmd: string, code: string] =
for imp in imports: runnableExamples.add imp
runnableExamples.add newTree(nkBlockStmt, newNode(nkEmpty), copyTree savedLastSon)
proc renderNimCodeOld(d: PDoc, n: PNode, dest: var Rope) =
## this is a rather hacky way to get rid of the initial indentation
## that the renderer currently produces:
# deadcode
var i = 0
var body = n.lastSon
if body.len == 1 and body.kind == nkStmtList and
body.lastSon.kind == nkStmtList:
body = body.lastSon
for b in body:
if i > 0: dest.add "\n"
inc i
nodeToHighlightedHtml(d, b, dest, {renderRunnableExamples}, nil)
type RunnableState = enum
rsStart
rsComment
@@ -575,12 +557,9 @@ proc getAllRunnableExamplesImpl(d: PDoc; n: PNode, dest: var Rope, state: Runnab
inc d.listingCounter
let id = $d.listingCounter
dest.add(d.config.getOrDefault"doc.listing_start" % [id, "langNim"])
when true:
var dest2 = ""
renderNimCode(dest2, code, isLatex = d.conf.cmd == cmdRst2tex)
dest.add dest2
else:
renderNimCodeOld(d, n, dest)
var dest2 = ""
renderNimCode(dest2, code, isLatex = d.conf.cmd == cmdRst2tex)
dest.add dest2
dest.add(d.config.getOrDefault"doc.listing_end" % id)
return rsRunnable
else: discard
@@ -1213,12 +1192,12 @@ proc genOutFile(d: PDoc): Rope =
var tmp = ""
renderTocEntries(d[], j, 1, tmp)
var toc = tmp.rope
for i in low(TSymKind)..high(TSymKind):
for i in TSymKind:
genSection(d, i)
toc.add(d.toc[i])
if toc != nil:
toc = ropeFormatNamedVars(d.conf, getConfigVar(d.conf, "doc.toc"), ["content"], [toc])
for i in low(TSymKind)..high(TSymKind): code.add(d.section[i])
for i in TSymKind: code.add(d.section[i])
# Extract the title. Non API modules generate an entry in the index table.
if d.meta[metaTitle].len != 0:

View File

@@ -331,7 +331,7 @@ proc setCC*(conf: ConfigRef; ccname: string; info: TLineInfo) =
conf.compileOptions = getConfigVar(conf, conf.cCompiler, ".options.always")
conf.linkOptions = ""
conf.cCompilerPath = getConfigVar(conf, conf.cCompiler, ".path")
for i in low(CC)..high(CC): undefSymbol(conf.symbols, CC[i].name)
for c in CC: undefSymbol(conf.symbols, c.name)
defineSymbol(conf.symbols, CC[conf.cCompiler].name)
proc addOpt(dest: var string, src: string) =
@@ -353,7 +353,7 @@ proc addCompileOptionCmd*(conf: ConfigRef; option: string) =
proc initVars*(conf: ConfigRef) =
# we need to define the symbol here, because ``CC`` may have never been set!
for i in low(CC)..high(CC): undefSymbol(conf.symbols, CC[i].name)
for c in CC: undefSymbol(conf.symbols, c.name)
defineSymbol(conf.symbols, CC[conf.cCompiler].name)
addCompileOption(conf, getConfigVar(conf, conf.cCompiler, ".options.always"))
#addLinkOption(getConfigVar(cCompiler, ".options.linker"))

View File

@@ -199,8 +199,8 @@ proc parseLine(p: var TTmplParser) =
inc(j)
llStreamWrite(p.outp, "\\n\"")
proc filterTmpl*(stdin: PLLStream, filename: AbsoluteFile,
call: PNode; conf: ConfigRef): PLLStream =
proc filterTmpl*(conf: ConfigRef, stdin: PLLStream, filename: AbsoluteFile,
call: PNode): PLLStream =
var p: TTmplParser
p.config = conf
p.info = newLineInfo(conf, filename, 0, 0)
@@ -214,9 +214,9 @@ proc filterTmpl*(stdin: PLLStream, filename: AbsoluteFile,
p.x = newStringOfCap(120)
# do not process the first line which contains the directive:
if llStreamReadLine(p.inp, p.x):
p.info.line = p.info.line + 1'u16
inc p.info.line
while llStreamReadLine(p.inp, p.x):
p.info.line = p.info.line + 1'u16
inc p.info.line
parseLine(p)
newLine(p)
result = p.outp

View File

@@ -1,89 +0,0 @@
#
#
# The Nim Compiler
# (c) Copyright 2015 Andreas Rumpf
#
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
#
## This module implements for loop detection for better C code generation.
import ast, astalgo
const
someCmp = {mEqI, mEqF64, mEqEnum, mEqCh, mEqB, mEqRef, mEqProc,
mLeI, mLeF64, mLeU, mLeEnum,
mLeCh, mLeB, mLePtr, mLtI, mLtF64, mLtU, mLtEnum,
mLtCh, mLtB, mLtPtr}
proc isCounter(s: PSym): bool {.inline.} =
s.kind in {skResult, skVar, skLet, skTemp} and
{sfGlobal, sfAddrTaken} * s.flags == {}
proc isCall(n: PNode): bool {.inline.} =
n.kind in nkCallKinds and n[0].kind == nkSym
proc fromSystem(op: PSym): bool = sfSystemModule in getModule(op).flags
proc getCounter(lastStmt: PNode): PSym =
if lastStmt.isCall:
let op = lastStmt.sym
if op.magic in {mDec, mInc} or
((op.name.s == "+=" or op.name.s == "-=") and op.fromSystem):
if op[1].kind == nkSym and isCounter(op[1].sym):
result = op[1].sym
proc counterInTree(n, loop: PNode; counter: PSym): bool =
# prune the search tree: within the loop the counter may be used:
if n == loop: return
case n.kind
of nkSym:
if n.sym == counter: return true
of nkVarSection, nkLetSection:
# definitions are fine!
for it in n:
if counterInTree(it.lastSon): return true
else:
for i in 0..<n.safeLen:
if counterInTree(n[i], loop, counter): return true
proc copyExcept(n: PNode, x, dest: PNode) =
if x == n: return
if n.kind in {nkStmtList, nkStmtListExpr}:
for i in 0..<n.len: copyExcept(n[i], x, dest)
else:
dest.add n
type
ForLoop* = object
counter*: PSym
init*, cond*, increment*, body*: PNode
proc extractForLoop*(loop, fullTree: PNode): ForLoop =
## returns 'counter == nil' if the while loop 'n' is not a for loop:
assert loop.kind == nkWhileStmt
let cond == loop[0]
if not cond.isCall: return
if cond[0].sym.magic notin someCmp: return
var lastStmt = loop[1]
while lastStmt.kind in {nkStmtList, nkStmtListExpr}:
lastStmt = lastStmt.lastSon
let counter = getCounter(lastStmt)
if counter.isNil or counter.ast.isNil: return
template `=~`(a, b): expr = a.kind == nkSym and a.sym == b
if cond[1] =~ counter or cond[2] =~ counter:
# ok, now check 'counter' is not used *after* the loop
if counterInTree(fullTree, loop, counter): return
# ok, success, fill in the fields:
result.counter = counter
result.init = counter.ast
result.cond = cond
result.increment = lastStmt
result.body = newNodeI(nkStmtList, loop[1].info)
copyExcept(loop[1], lastStmt, result.body)

View File

@@ -91,7 +91,7 @@ proc isLastRead(location: PNode; cfg: ControlFlowGraph; otherRead: var PNode; pc
return -1
inc pc
of goto:
pc = pc + cfg[pc].dest
pc += cfg[pc].dest
of fork:
# every branch must lead to the last read of the location:
var variantA = pc + 1
@@ -156,7 +156,7 @@ proc isFirstWrite(location: PNode; cfg: ControlFlowGraph; pc, until: int): int =
return -1
inc pc
of goto:
pc = pc + cfg[pc].dest
pc += cfg[pc].dest
of fork:
# every branch must not contain a def/use of our location:
var variantA = pc + 1
@@ -198,7 +198,7 @@ proc initialized(code: ControlFlowGraph; pc: int,
while pc < code.len:
case code[pc].kind
of goto:
pc = pc + code[pc].dest
pc += code[pc].dest
of fork:
var initA = initIntSet()
var initB = initIntSet()

View File

@@ -373,8 +373,8 @@ proc `*`*(lhs,rhs: Int128): Int128 =
let b00 = cast[uint64](rhs.udata[0])
result = makeInt128(high64(lhs) * low64(rhs) + low64(lhs) * high64(rhs) + a32 * b32, a00 * b00)
result = result + toInt128(a32 * b00) shl 32
result = result + toInt128(a00 * b32) shl 32
result += toInt128(a32 * b00) shl 32
result += toInt128(a00 * b32) shl 32
proc `*=`*(a: var Int128, b: Int128) =
a = a * b
@@ -427,7 +427,7 @@ proc divMod*(dividend, divisor: Int128): tuple[quotient, remainder: Int128] =
for i in 0..shift:
quotient = quotient shl 1
if dividend >= denominator:
dividend = dividend - denominator
dividend -= denominator
quotient = bitor(quotient, One)
denominator = denominator shr 1

View File

@@ -35,7 +35,7 @@ type
Emitter* = object
config: ConfigRef
fid: FileIndex
lastTok: TTokType
lastTok: TokType
inquote, lastTokWasTerse: bool
semicolons: SemicolonKind
col, lastLineNumber, lineSpan, indentLevel, indWidth*, inSection: int
@@ -402,7 +402,7 @@ proc endsInAlpha(em: Emitter): bool =
while i >= 0 and em.kinds[i] in {ltBeginSection, ltEndSection}: dec(i)
result = if i >= 0: em.tokens[i].lastChar in SymChars+{'_'} else: false
proc emitComment(em: var Emitter; tok: TToken; dontIndent: bool) =
proc emitComment(em: var Emitter; tok: Token; dontIndent: bool) =
var col = em.col
let lit = strip fileSection(em.config, em.fid, tok.commentOffsetA, tok.commentOffsetB)
em.lineSpan = countNewlines(lit)
@@ -417,7 +417,7 @@ proc emitComment(em: var Emitter; tok: TToken; dontIndent: bool) =
inc col
emitMultilineComment(em, lit, col, dontIndent)
proc emitTok*(em: var Emitter; L: TLexer; tok: TToken) =
proc emitTok*(em: var Emitter; L: Lexer; tok: Token) =
template wasExportMarker(em): bool =
em.kinds.len > 0 and em.kinds[^1] == ltExportMarker

View File

@@ -30,7 +30,7 @@ const
# don't forget to update the 'highlite' module if these charsets should change
type
TTokType* = enum
TokType* = enum
tkInvalid, tkEof, # order is important here!
tkSymbol, # keywords:
tkAddr, tkAnd, tkAs, tkAsm,
@@ -64,7 +64,7 @@ type
tkOpr, tkComment, tkAccent,
tkSpaces, tkInfixOpr, tkPrefixOpr, tkPostfixOpr
TTokTypes* = set[TTokType]
TokTypes* = set[TokType]
const
weakTokens = {tkComma, tkSemiColon, tkColon,
@@ -73,7 +73,7 @@ const
# tokens that should not be considered for previousToken
tokKeywordLow* = succ(tkSymbol)
tokKeywordHigh* = pred(tkIntLit)
TokTypeToStr*: array[TTokType, string] = ["tkInvalid", "[EOF]",
TokTypeToStr*: array[TokType, string] = ["tkInvalid", "[EOF]",
"tkSymbol",
"addr", "and", "as", "asm",
"bind", "block", "break", "case", "cast",
@@ -104,22 +104,19 @@ const
"tkPrefixOpr", "tkPostfixOpr"]
type
TNumericalBase* = enum
NumericalBase* = enum
base10, # base10 is listed as the first element,
# so that it is the correct default value
base2, base8, base16
CursorPosition* {.pure.} = enum ## XXX remove this again
None, InToken, BeforeToken, AfterToken
TToken* = object # a Nim token
tokType*: TTokType # the type of the token
Token* = object # a Nim token
tokType*: TokType # the type of the token
indent*: int # the indentation; != -1 if the token has been
# preceded with indentation
ident*: PIdent # the parsed identifier
iNumber*: BiggestInt # the parsed integer literal
fNumber*: BiggestFloat # the parsed floating point literal
base*: TNumericalBase # the numerical base; only valid for int
base*: NumericalBase # the numerical base; only valid for int
# or float literals
strongSpaceA*: int8 # leading spaces of an operator
strongSpaceB*: int8 # trailing spaces of an operator
@@ -127,26 +124,25 @@ type
# documentation comments are here too
line*, col*: int
when defined(nimpretty):
offsetA*, offsetB*: int # used for pretty printing so that literals
# like 0b01 or r"\L" are unaffected
offsetA*, offsetB*: int # used for pretty printing so that literals
# like 0b01 or r"\L" are unaffected
commentOffsetA*, commentOffsetB*: int
TErrorHandler* = proc (conf: ConfigRef; info: TLineInfo; msg: TMsgKind; arg: string)
TLexer* = object of TBaseLexer
ErrorHandler* = proc (conf: ConfigRef; info: TLineInfo; msg: TMsgKind; arg: string)
Lexer* = object of TBaseLexer
fileIdx*: FileIndex
indentAhead*: int # if > 0 an indentation has already been read
# this is needed because scanning comments
# needs so much look-ahead
currLineIndent*: int
strongSpaces*, allowTabs*: bool
cursor*: CursorPosition
errorHandler*: TErrorHandler
errorHandler*: ErrorHandler
cache*: IdentCache
when defined(nimsuggest):
previousToken: TLineInfo
config*: ConfigRef
proc getLineInfo*(L: TLexer, tok: TToken): TLineInfo {.inline.} =
proc getLineInfo*(L: Lexer, tok: Token): TLineInfo {.inline.} =
result = newLineInfo(L.fileIdx, tok.line, tok.col)
when defined(nimpretty):
result.offsetA = tok.offsetA
@@ -154,8 +150,8 @@ proc getLineInfo*(L: TLexer, tok: TToken): TLineInfo {.inline.} =
result.commentOffsetA = tok.commentOffsetA
result.commentOffsetB = tok.commentOffsetB
proc isKeyword*(kind: TTokType): bool =
result = (kind >= tokKeywordLow) and (kind <= tokKeywordHigh)
proc isKeyword*(kind: TokType): bool =
(kind >= tokKeywordLow) and (kind <= tokKeywordHigh)
template ones(n): untyped = ((1 shl n)-1) # for utf-8 conversion
@@ -169,28 +165,27 @@ proc isNimIdentifier*(s: string): bool =
inc(i)
result = true
proc `$`*(tok: TToken): string =
proc `$`*(tok: Token): string =
case tok.tokType
of tkIntLit..tkInt64Lit: result = $tok.iNumber
of tkFloatLit..tkFloat64Lit: result = $tok.fNumber
of tkInvalid, tkStrLit..tkCharLit, tkComment: result = tok.literal
of tkParLe..tkColon, tkEof, tkAccent:
result = TokTypeToStr[tok.tokType]
of tkIntLit..tkInt64Lit: $tok.iNumber
of tkFloatLit..tkFloat64Lit: $tok.fNumber
of tkInvalid, tkStrLit..tkCharLit, tkComment: tok.literal
of tkParLe..tkColon, tkEof, tkAccent: TokTypeToStr[tok.tokType]
else:
if tok.ident != nil:
result = tok.ident.s
tok.ident.s
else:
result = ""
""
proc prettyTok*(tok: TToken): string =
if isKeyword(tok.tokType): result = "keyword " & tok.ident.s
else: result = $tok
proc prettyTok*(tok: Token): string =
if isKeyword(tok.tokType): "keyword " & tok.ident.s
else: $tok
proc printTok*(conf: ConfigRef; tok: TToken) =
proc printTok*(conf: ConfigRef; tok: Token) =
msgWriteln(conf, $tok.line & ":" & $tok.col & "\t" &
TokTypeToStr[tok.tokType] & " " & $tok)
proc initToken*(L: var TToken) =
proc initToken*(L: var Token) =
L.tokType = tkInvalid
L.iNumber = 0
L.indent = 0
@@ -203,7 +198,7 @@ proc initToken*(L: var TToken) =
L.commentOffsetA = 0
L.commentOffsetB = 0
proc fillToken(L: var TToken) =
proc fillToken(L: var Token) =
L.tokType = tkInvalid
L.iNumber = 0
L.indent = 0
@@ -216,7 +211,7 @@ proc fillToken(L: var TToken) =
L.commentOffsetA = 0
L.commentOffsetB = 0
proc openLexer*(lex: var TLexer, fileIdx: FileIndex, inputstream: PLLStream;
proc openLexer*(lex: var Lexer, fileIdx: FileIndex, inputstream: PLLStream;
cache: IdentCache; config: ConfigRef) =
openBaseLexer(lex, inputstream)
lex.fileIdx = fileIdx
@@ -228,36 +223,36 @@ proc openLexer*(lex: var TLexer, fileIdx: FileIndex, inputstream: PLLStream;
lex.previousToken.fileIndex = fileIdx
lex.config = config
proc openLexer*(lex: var TLexer, filename: AbsoluteFile, inputstream: PLLStream;
proc openLexer*(lex: var Lexer, filename: AbsoluteFile, inputstream: PLLStream;
cache: IdentCache; config: ConfigRef) =
openLexer(lex, fileInfoIdx(config, filename), inputstream, cache, config)
proc closeLexer*(lex: var TLexer) =
proc closeLexer*(lex: var Lexer) =
if lex.config != nil:
inc(lex.config.linesCompiled, lex.lineNumber)
closeBaseLexer(lex)
proc getLineInfo(L: TLexer): TLineInfo =
proc getLineInfo(L: Lexer): TLineInfo =
result = newLineInfo(L.fileIdx, L.lineNumber, getColNumber(L, L.bufpos))
proc dispMessage(L: TLexer; info: TLineInfo; msg: TMsgKind; arg: string) =
proc dispMessage(L: Lexer; info: TLineInfo; msg: TMsgKind; arg: string) =
if L.errorHandler.isNil:
msgs.message(L.config, info, msg, arg)
else:
L.errorHandler(L.config, info, msg, arg)
proc lexMessage*(L: TLexer, msg: TMsgKind, arg = "") =
proc lexMessage*(L: Lexer, msg: TMsgKind, arg = "") =
L.dispMessage(getLineInfo(L), msg, arg)
proc lexMessageTok*(L: TLexer, msg: TMsgKind, tok: TToken, arg = "") =
proc lexMessageTok*(L: Lexer, msg: TMsgKind, tok: Token, arg = "") =
var info = newLineInfo(L.fileIdx, tok.line, tok.col)
L.dispMessage(info, msg, arg)
proc lexMessagePos(L: var TLexer, msg: TMsgKind, pos: int, arg = "") =
proc lexMessagePos(L: var Lexer, msg: TMsgKind, pos: int, arg = "") =
var info = newLineInfo(L.fileIdx, L.lineNumber, pos - L.lineStart)
L.dispMessage(info, msg, arg)
proc matchTwoChars(L: TLexer, first: char, second: set[char]): bool =
proc matchTwoChars(L: Lexer, first: char, second: set[char]): bool =
result = (L.buf[L.bufpos] == first) and (L.buf[L.bufpos + 1] in second)
template tokenBegin(tok, pos) {.dirty.} =
@@ -271,7 +266,6 @@ template tokenEnd(tok, pos) {.dirty.} =
let colB = getColNumber(L, pos)+1
if L.fileIdx == L.config.m.trackPos.fileIndex and L.config.m.trackPos.col in colA..colB and
L.lineNumber == L.config.m.trackPos.line.int and L.config.ideCmd in {ideSug, ideCon}:
L.cursor = CursorPosition.InToken
L.config.m.trackPos.col = colA.int16
colA = 0
when defined(nimpretty):
@@ -296,23 +290,22 @@ template tokenEndPrevious(tok, pos) =
let colB = getColNumber(L, pos)
if L.fileIdx == L.config.m.trackPos.fileIndex and L.config.m.trackPos.col in colA..colB and
L.lineNumber == L.config.m.trackPos.line.int and L.config.ideCmd in {ideSug, ideCon}:
L.cursor = CursorPosition.BeforeToken
L.config.m.trackPos = L.previousToken
L.config.m.trackPosAttached = true
colA = 0
when defined(nimpretty):
tok.offsetB = L.offsetBase + pos
template eatChar(L: var TLexer, t: var TToken, replacementChar: char) =
template eatChar(L: var Lexer, t: var Token, replacementChar: char) =
t.literal.add(replacementChar)
inc(L.bufpos)
template eatChar(L: var TLexer, t: var TToken) =
template eatChar(L: var Lexer, t: var Token) =
t.literal.add(L.buf[L.bufpos])
inc(L.bufpos)
proc getNumber(L: var TLexer, result: var TToken) =
proc matchUnderscoreChars(L: var TLexer, tok: var TToken, chars: set[char]): Natural =
proc getNumber(L: var Lexer, result: var Token) =
proc matchUnderscoreChars(L: var Lexer, tok: var Token, chars: set[char]): Natural =
var pos = L.bufpos # use registers for pos, buf
result = 0
while true:
@@ -332,19 +325,19 @@ proc getNumber(L: var TLexer, result: var TToken) =
inc(pos)
L.bufpos = pos
proc matchChars(L: var TLexer, tok: var TToken, chars: set[char]) =
proc matchChars(L: var Lexer, tok: var Token, chars: set[char]) =
var pos = L.bufpos # use registers for pos, buf
while L.buf[pos] in chars:
tok.literal.add(L.buf[pos])
inc(pos)
L.bufpos = pos
proc lexMessageLitNum(L: var TLexer, msg: string, startpos: int, msgKind = errGenerated) =
proc lexMessageLitNum(L: var Lexer, msg: string, startpos: int, msgKind = errGenerated) =
# Used to get slightly human friendlier err messages.
const literalishChars = {'A'..'F', 'a'..'f', '0'..'9', 'X', 'x', 'o', 'O',
'c', 'C', 'b', 'B', '_', '.', '\'', 'd', 'i', 'u'}
var msgPos = L.bufpos
var t: TToken
var t: Token
t.literal = ""
L.bufpos = startpos # Use L.bufpos as pos because of matchChars
matchChars(L, t, literalishChars)
@@ -612,7 +605,7 @@ proc getNumber(L: var TLexer, result: var TToken) =
tokenEnd(result, postPos-1)
L.bufpos = postPos
proc handleHexChar(L: var TLexer, xi: var int; position: range[0..4]) =
proc handleHexChar(L: var Lexer, xi: var int; position: range[0..4]) =
template invalid() =
lexMessage(L, errGenerated,
"expected a hex digit, but found: " & L.buf[L.bufpos] &
@@ -637,7 +630,7 @@ proc handleHexChar(L: var TLexer, xi: var int; position: range[0..4]) =
# Need to progress for `nim check`
inc(L.bufpos)
proc handleDecChars(L: var TLexer, xi: var int) =
proc handleDecChars(L: var Lexer, xi: var int) =
while L.buf[L.bufpos] in {'0'..'9'}:
xi = (xi * 10) + (ord(L.buf[L.bufpos]) - ord('0'))
inc(L.bufpos)
@@ -680,7 +673,7 @@ proc addUnicodeCodePoint(s: var string, i: int) =
s[pos+4] = chr(i shr 6 and ones(6) or 0b10_0000_00)
s[pos+5] = chr(i and ones(6) or 0b10_0000_00)
proc getEscapedChar(L: var TLexer, tok: var TToken) =
proc getEscapedChar(L: var Lexer, tok: var Token) =
inc(L.bufpos) # skip '\'
case L.buf[L.bufpos]
of 'n', 'N':
@@ -760,13 +753,7 @@ proc getEscapedChar(L: var TLexer, tok: var TToken) =
else: lexMessage(L, errGenerated, "invalid character constant")
else: lexMessage(L, errGenerated, "invalid character constant")
proc newString(s: cstring, len: int): string =
## XXX, how come there is no support for this?
result = newString(len)
for i in 0..<len:
result[i] = s[i]
proc handleCRLF(L: var TLexer, pos: int): int =
proc handleCRLF(L: var Lexer, pos: int): int =
template registerLine =
let col = L.getColNumber(pos)
@@ -788,7 +775,7 @@ type
raw,
generalized
proc getString(L: var TLexer, tok: var TToken, mode: StringMode) =
proc getString(L: var Lexer, tok: var Token, mode: StringMode) =
var pos = L.bufpos
var line = L.lineNumber # save linenumber for better error message
tokenBegin(tok, pos - ord(mode == raw))
@@ -854,7 +841,7 @@ proc getString(L: var TLexer, tok: var TToken, mode: StringMode) =
inc(pos)
L.bufpos = pos
proc getCharacter(L: var TLexer, tok: var TToken) =
proc getCharacter(L: var Lexer, tok: var Token) =
tokenBegin(tok, L.bufpos)
inc(L.bufpos) # skip '
var c = L.buf[L.bufpos]
@@ -871,7 +858,7 @@ proc getCharacter(L: var TLexer, tok: var TToken) =
tokenEndIgnore(tok, L.bufpos)
inc(L.bufpos) # skip '
proc getSymbol(L: var TLexer, tok: var TToken) =
proc getSymbol(L: var Lexer, tok: var Token) =
var h: Hash = 0
var pos = L.bufpos
tokenBegin(tok, pos)
@@ -901,21 +888,21 @@ proc getSymbol(L: var TLexer, tok: var TToken) =
(tok.ident.id > ord(tokKeywordHigh) - ord(tkSymbol)):
tok.tokType = tkSymbol
else:
tok.tokType = TTokType(tok.ident.id + ord(tkSymbol))
tok.tokType = TokType(tok.ident.id + ord(tkSymbol))
if suspicious and {optStyleHint, optStyleError} * L.config.globalOptions != {}:
lintReport(L.config, getLineInfo(L), tok.ident.s.normalize, tok.ident.s)
L.bufpos = pos
proc endOperator(L: var TLexer, tok: var TToken, pos: int,
proc endOperator(L: var Lexer, tok: var Token, pos: int,
hash: Hash) {.inline.} =
var h = !$hash
tok.ident = L.cache.getIdent(addr(L.buf[L.bufpos]), pos - L.bufpos, h)
if (tok.ident.id < oprLow) or (tok.ident.id > oprHigh): tok.tokType = tkOpr
else: tok.tokType = TTokType(tok.ident.id - oprLow + ord(tkColon))
else: tok.tokType = TokType(tok.ident.id - oprLow + ord(tkColon))
L.bufpos = pos
proc getOperator(L: var TLexer, tok: var TToken) =
proc getOperator(L: var Lexer, tok: var Token) =
var pos = L.bufpos
tokenBegin(tok, pos)
var h: Hash = 0
@@ -935,18 +922,15 @@ proc getOperator(L: var TLexer, tok: var TToken) =
if L.buf[pos] in {CR, LF, nimlexbase.EndOfFile}:
tok.strongSpaceB = -1
proc getPrecedence*(tok: TToken, strongSpaces: bool): int =
proc getPrecedence*(tok: Token): int =
## Calculates the precedence of the given token.
template considerStrongSpaces(x): untyped =
x + (if strongSpaces: 100 - tok.strongSpaceA.int*10 else: 0)
case tok.tokType
of tkOpr:
let relevantChar = tok.ident.s[0]
# arrow like?
if tok.ident.s.len > 1 and tok.ident.s[^1] == '>' and
tok.ident.s[^2] in {'-', '~', '='}: return considerStrongSpaces(1)
tok.ident.s[^2] in {'-', '~', '='}: return 1
template considerAsgn(value: untyped) =
result = if tok.ident.s[^1] == '=': 1 else: value
@@ -962,15 +946,13 @@ proc getPrecedence*(tok: TToken, strongSpaces: bool): int =
of '?': result = 2
else: considerAsgn(2)
of tkDiv, tkMod, tkShl, tkShr: result = 9
of tkIn, tkNotin, tkIs, tkIsnot, tkOf, tkAs, tkFrom: result = 5
of tkDotDot: result = 6
of tkIn, tkNotin, tkIs, tkIsnot, tkOf, tkAs, tkFrom: result = 5
of tkAnd: result = 4
of tkOr, tkXor, tkPtr, tkRef: result = 3
else: return -10
result = considerStrongSpaces(result)
proc newlineFollows*(L: TLexer): bool =
proc newlineFollows*(L: Lexer): bool =
var pos = L.bufpos
while true:
case L.buf[pos]
@@ -986,7 +968,7 @@ proc newlineFollows*(L: TLexer): bool =
else:
break
proc skipMultiLineComment(L: var TLexer; tok: var TToken; start: int;
proc skipMultiLineComment(L: var Lexer; tok: var Token; start: int;
isDoc: bool) =
var pos = start
var toStrip = 0
@@ -1051,7 +1033,7 @@ proc skipMultiLineComment(L: var TLexer; tok: var TToken; start: int;
when defined(nimpretty):
tok.commentOffsetB = L.offsetBase + pos - 1
proc scanComment(L: var TLexer, tok: var TToken) =
proc scanComment(L: var Lexer, tok: var Token) =
var pos = L.bufpos
tok.tokType = tkComment
# iNumber contains the number of '\n' in the token
@@ -1101,7 +1083,7 @@ proc scanComment(L: var TLexer, tok: var TToken) =
when defined(nimpretty):
tok.commentOffsetB = L.offsetBase + pos - 1
proc skip(L: var TLexer, tok: var TToken) =
proc skip(L: var Lexer, tok: var Token) =
var pos = L.bufpos
tokenBegin(tok, pos)
tok.strongSpaceA = 0
@@ -1173,7 +1155,7 @@ proc skip(L: var TLexer, tok: var TToken) =
tok.tokType = tkComment
tok.indent = commentIndent
proc rawGetTok*(L: var TLexer, tok: var TToken) =
proc rawGetTok*(L: var Lexer, tok: var Token) =
template atTokenEnd() {.dirty.} =
when defined(nimsuggest):
# we attach the cursor to the last *strong* token
@@ -1181,8 +1163,6 @@ proc rawGetTok*(L: var TLexer, tok: var TToken) =
L.previousToken.line = tok.line.uint16
L.previousToken.col = tok.col.int16
when defined(nimsuggest):
L.cursor = CursorPosition.None
fillToken(tok)
if L.indentAhead >= 0:
tok.indent = L.indentAhead
@@ -1253,7 +1233,6 @@ proc rawGetTok*(L: var TLexer, tok: var TToken) =
if L.fileIdx == L.config.m.trackPos.fileIndex and tok.col+1 == L.config.m.trackPos.col and
tok.line == L.config.m.trackPos.line.int and L.config.ideCmd == ideSug:
tok.tokType = tkDot
L.cursor = CursorPosition.InToken
L.config.m.trackPos.col = tok.col.int16
inc(L.bufpos)
atTokenEnd()
@@ -1326,8 +1305,8 @@ proc rawGetTok*(L: var TLexer, tok: var TToken) =
proc getIndentWidth*(fileIdx: FileIndex, inputstream: PLLStream;
cache: IdentCache; config: ConfigRef): int =
var lex: TLexer
var tok: TToken
var lex: Lexer
var tok: Token
initToken(tok)
openLexer(lex, fileIdx, inputstream, cache, config)
var prevToken = tkEof
@@ -1341,11 +1320,11 @@ proc getIndentWidth*(fileIdx: FileIndex, inputstream: PLLStream;
proc getPrecedence*(ident: PIdent): int =
## assumes ident is binary operator already
var tok: TToken
var tok: Token
initToken(tok)
tok.ident = ident
tok.tokType =
if tok.ident.id in ord(tokKeywordLow) - ord(tkSymbol)..ord(tokKeywordHigh) - ord(tkSymbol):
TTokType(tok.ident.id + ord(tkSymbol))
TokType(tok.ident.id + ord(tkSymbol))
else: tkOpr
getPrecedence(tok, false)
getPrecedence(tok)

View File

@@ -919,11 +919,6 @@ template inst(field, t) =
proc isTrival(s: PSym): bool {.inline.} =
s == nil or (s.ast != nil and s.ast[bodyPos].len == 0)
proc isEmptyContainer(g: ModuleGraph, t: PType): bool =
(t.kind == tyArray and lengthOrd(g.config, t[0]) == 0) or
(t.kind == tySequence and t[0].kind == tyError)
proc createTypeBoundOps(g: ModuleGraph; c: PContext; orig: PType; info: TLineInfo) =
## In the semantic pass this is called in strategic places
## to ensure we lift assignment, destructors and moves properly.

View File

@@ -71,12 +71,9 @@ proc beautifyName(s: string, k: TSymKind): string =
proc differ*(line: string, a, b: int, x: string): string =
proc substrEq(s: string, pos, last: int, substr: string): bool =
var i = 0
while i < substr.len and pos+i <= last and s[pos+i] == substr[i]:
inc i
return i == substr.len
let last = min(b, line.len)
result = true
for i in 0..<substr.len:
if pos+i > last or s[pos+i] != substr[i]: return false
result = ""
if not substrEq(line, a, b, x):
@@ -84,11 +81,6 @@ proc differ*(line: string, a, b: int, x: string): string =
if cmpIgnoreStyle(y, x) == 0:
result = y
proc checkStyle(conf: ConfigRef; cache: IdentCache; info: TLineInfo, s: string, k: TSymKind; sym: PSym) =
let beau = beautifyName(s, k)
if s != beau:
lintReport(conf, info, beau, s)
proc nep1CheckDefImpl(conf: ConfigRef; info: TLineInfo; s: PSym; k: TSymKind) =
# operators stay as they are:
if k in {skResult, skTemp} or s.name.s[0] notin Letters: return
@@ -136,6 +128,6 @@ proc styleCheckUse*(conf: ConfigRef; info: TLineInfo; s: PSym) =
lintReport(conf, info, newName, oldName)
proc checkPragmaUse*(conf: ConfigRef; info: TLineInfo; w: TSpecialWord; pragmaName: string) =
let wanted = canonPragmaSpelling(w)
let wanted = specialWords[w]
if pragmaName != wanted:
lintReport(conf, info, wanted, pragmaName)

View File

@@ -163,8 +163,8 @@ proc commandScan(cache: IdentCache, config: ConfigRef) =
var stream = llStreamOpen(f, fmRead)
if stream != nil:
var
L: TLexer
tok: TToken
L: Lexer
tok: Token
initToken(tok)
openLexer(L, f, stream, cache, config)
while true:

View File

@@ -91,8 +91,7 @@ when false:
if result.len > 0: return result
proc scriptableImport(pkg, sub: string; info: TLineInfo): string =
result = resolveDollar(gProjectFull, info.toFullPath(), pkg, sub, info)
if result.isNil: result = ""
resolveDollar(gProjectFull, info.toFullPath(), pkg, sub, info)
proc lookupPackage(pkg, subdir: PNode): string =
let sub = if subdir != nil: renderTree(subdir, {renderNoComments}).replace(" ") else: ""
@@ -112,8 +111,7 @@ proc getModuleName*(conf: ConfigRef; n: PNode): string =
case n.kind
of nkStrLit, nkRStrLit, nkTripleStrLit:
try:
result =
pathSubs(conf, n.strVal, toFullPath(conf, n.info).splitFile().dir)
result = pathSubs(conf, n.strVal, toFullPath(conf, n.info).splitFile().dir)
except ValueError:
localError(conf, n.info, "invalid path: " & n.strVal)
result = n.strVal

View File

@@ -86,7 +86,7 @@ proc compileModule*(graph: ModuleGraph; fileIdx: FileIndex; flags: TSymFlags): P
result = r
if result == nil:
result = newModule(graph, fileIdx)
result.flags = result.flags + flags
result.flags.incl flags
result.id = id
registerModule(graph, result)
else:

View File

@@ -377,15 +377,12 @@ template styledMsgWriteln*(args: varargs[typed]) =
when defined(windows):
flushFile(stderr)
proc msgKindToString*(kind: TMsgKind): string =
proc msgKindToString*(kind: TMsgKind): string = MsgKindToStr[kind]
# later versions may provide translated error messages
result = MsgKindToStr[kind]
proc getMessageStr(msg: TMsgKind, arg: string): string =
result = msgKindToString(msg) % [arg]
proc getMessageStr(msg: TMsgKind, arg: string): string = msgKindToString(msg) % [arg]
type
TErrorHandling* = enum doNothing, doAbort, doRaise
type TErrorHandling* = enum doNothing, doAbort, doRaise
proc log*(s: string) =
var f: File

View File

@@ -16,13 +16,13 @@ import
# ---------------- configuration file parser -----------------------------
# we use Nim's scanner here to save space and work
proc ppGetTok(L: var TLexer, tok: var TToken) =
proc ppGetTok(L: var Lexer, tok: var Token) =
# simple filter
rawGetTok(L, tok)
while tok.tokType in {tkComment}: rawGetTok(L, tok)
proc parseExpr(L: var TLexer, tok: var TToken; config: ConfigRef): bool
proc parseAtom(L: var TLexer, tok: var TToken; config: ConfigRef): bool =
proc parseExpr(L: var Lexer, tok: var Token; config: ConfigRef): bool
proc parseAtom(L: var Lexer, tok: var Token; config: ConfigRef): bool =
if tok.tokType == tkParLe:
ppGetTok(L, tok)
result = parseExpr(L, tok, config)
@@ -35,21 +35,21 @@ proc parseAtom(L: var TLexer, tok: var TToken; config: ConfigRef): bool =
result = isDefined(config, tok.ident.s)
ppGetTok(L, tok)
proc parseAndExpr(L: var TLexer, tok: var TToken; config: ConfigRef): bool =
proc parseAndExpr(L: var Lexer, tok: var Token; config: ConfigRef): bool =
result = parseAtom(L, tok, config)
while tok.tokType == tkAnd:
ppGetTok(L, tok) # skip "and"
var b = parseAtom(L, tok, config)
result = result and b
proc parseExpr(L: var TLexer, tok: var TToken; config: ConfigRef): bool =
proc parseExpr(L: var Lexer, tok: var Token; config: ConfigRef): bool =
result = parseAndExpr(L, tok, config)
while tok.tokType == tkOr:
ppGetTok(L, tok) # skip "or"
var b = parseAndExpr(L, tok, config)
result = result or b
proc evalppIf(L: var TLexer, tok: var TToken; config: ConfigRef): bool =
proc evalppIf(L: var Lexer, tok: var Token; config: ConfigRef): bool =
ppGetTok(L, tok) # skip 'if' or 'elif'
result = parseExpr(L, tok, config)
if tok.tokType == tkColon: ppGetTok(L, tok)
@@ -57,7 +57,7 @@ proc evalppIf(L: var TLexer, tok: var TToken; config: ConfigRef): bool =
#var condStack: seq[bool] = @[]
proc doEnd(L: var TLexer, tok: var TToken; condStack: var seq[bool]) =
proc doEnd(L: var Lexer, tok: var Token; condStack: var seq[bool]) =
if high(condStack) < 0: lexMessage(L, errGenerated, "expected @if")
ppGetTok(L, tok) # skip 'end'
setLen(condStack, high(condStack))
@@ -66,21 +66,21 @@ type
TJumpDest = enum
jdEndif, jdElseEndif
proc jumpToDirective(L: var TLexer, tok: var TToken, dest: TJumpDest; config: ConfigRef;
proc jumpToDirective(L: var Lexer, tok: var Token, dest: TJumpDest; config: ConfigRef;
condStack: var seq[bool])
proc doElse(L: var TLexer, tok: var TToken; config: ConfigRef; condStack: var seq[bool]) =
proc doElse(L: var Lexer, tok: var Token; config: ConfigRef; condStack: var seq[bool]) =
if high(condStack) < 0: lexMessage(L, errGenerated, "expected @if")
ppGetTok(L, tok)
if tok.tokType == tkColon: ppGetTok(L, tok)
if condStack[high(condStack)]: jumpToDirective(L, tok, jdEndif, config, condStack)
proc doElif(L: var TLexer, tok: var TToken; config: ConfigRef; condStack: var seq[bool]) =
proc doElif(L: var Lexer, tok: var Token; config: ConfigRef; condStack: var seq[bool]) =
if high(condStack) < 0: lexMessage(L, errGenerated, "expected @if")
var res = evalppIf(L, tok, config)
if condStack[high(condStack)] or not res: jumpToDirective(L, tok, jdElseEndif, config, condStack)
else: condStack[high(condStack)] = true
proc jumpToDirective(L: var TLexer, tok: var TToken, dest: TJumpDest; config: ConfigRef;
proc jumpToDirective(L: var Lexer, tok: var Token, dest: TJumpDest; config: ConfigRef;
condStack: var seq[bool]) =
var nestedIfs = 0
while true:
@@ -110,7 +110,7 @@ proc jumpToDirective(L: var TLexer, tok: var TToken, dest: TJumpDest; config: Co
else:
ppGetTok(L, tok)
proc parseDirective(L: var TLexer, tok: var TToken; config: ConfigRef; condStack: var seq[bool]) =
proc parseDirective(L: var Lexer, tok: var Token; config: ConfigRef; condStack: var seq[bool]) =
ppGetTok(L, tok) # skip @
case whichKeyword(tok.ident)
of wIf:
@@ -149,16 +149,16 @@ proc parseDirective(L: var TLexer, tok: var TToken; config: ConfigRef; condStack
else:
lexMessage(L, errGenerated, "invalid directive: '$1'" % $tok)
proc confTok(L: var TLexer, tok: var TToken; config: ConfigRef; condStack: var seq[bool]) =
proc confTok(L: var Lexer, tok: var Token; config: ConfigRef; condStack: var seq[bool]) =
ppGetTok(L, tok)
while tok.ident != nil and tok.ident.s == "@":
parseDirective(L, tok, config, condStack) # else: give the token to the parser
proc checkSymbol(L: TLexer, tok: TToken) =
proc checkSymbol(L: Lexer, tok: Token) =
if tok.tokType notin {tkSymbol..tkInt64Lit, tkStrLit..tkTripleStrLit}:
lexMessage(L, errGenerated, "expected identifier, but got: " & $tok)
proc parseAssignment(L: var TLexer, tok: var TToken;
proc parseAssignment(L: var Lexer, tok: var Token;
config: ConfigRef; condStack: var seq[bool]) =
if tok.ident != nil:
if tok.ident.s == "-" or tok.ident.s == "--":
@@ -211,8 +211,8 @@ proc parseAssignment(L: var TLexer, tok: var TToken;
proc readConfigFile*(filename: AbsoluteFile; cache: IdentCache;
config: ConfigRef): bool =
var
L: TLexer
tok: TToken
L: Lexer
tok: Token
stream: PLLStream
stream = llStreamOpen(filename, fmRead)
if stream != nil:

View File

@@ -162,9 +162,9 @@ proc getColNumber(L: TBaseLexer, pos: int): int =
proc getCurrentLine(L: TBaseLexer, marker: bool = true): string =
result = ""
var i = L.lineStart
while not (L.buf[i] in {CR, LF, EndOfFile}):
result.add(L.buf[i])
inc(i)
result.add("\n")
while L.buf[i] notin {CR, LF, EndOfFile}:
result.add L.buf[i]
inc i
result.add "\n"
if marker:
result.add(spaces(getColNumber(L, L.bufpos)) & '^' & "\n")
result.add spaces(getColNumber(L, L.bufpos)) & '^' & "\n"

View File

@@ -691,12 +691,10 @@ iterator nimbleSubs*(conf: ConfigRef; p: string): string =
proc toGeneratedFile*(conf: ConfigRef; path: AbsoluteFile,
ext: string): AbsoluteFile =
## converts "/home/a/mymodule.nim", "rod" to "/home/a/nimcache/mymodule.rod"
let (head, tail) = splitPath(path.string)
result = getNimcacheDir(conf) / RelativeFile changeFileExt(tail, ext)
result = getNimcacheDir(conf) / RelativeFile path.string.splitPath.tail.changeFileExt(ext)
proc completeGeneratedFilePath*(conf: ConfigRef; f: AbsoluteFile,
createSubDir: bool = true): AbsoluteFile =
let (head, tail) = splitPath(f.string)
let subdir = getNimcacheDir(conf)
if createSubDir:
try:
@@ -704,7 +702,7 @@ proc completeGeneratedFilePath*(conf: ConfigRef; f: AbsoluteFile,
except OSError:
writeLine(stdout, "cannot create directory: " & subdir.string)
quit(1)
result = subdir / RelativeFile tail
result = subdir / RelativeFile f.string.splitPath.tail
#echo "completeGeneratedFilePath(", f, ") = ", result
proc rawFindFile(conf: ConfigRef; f: RelativeFile; suppressStdlib: bool): AbsoluteFile =

View File

@@ -28,8 +28,6 @@ proc getNimbleFile*(conf: ConfigRef; path: string): string =
result = file
break packageSearch
# we also store if we didn't find anything:
when not defined(nimNoNilSeqs):
if result.isNil: result = ""
for d in myParentDirs(path):
#echo "set cache ", d, " |", result, "|", parents
conf.packageCache[d] = result

View File

@@ -99,14 +99,14 @@ proc compileConstraints(p: PNode, result: var TPatternCode; conf: ConfigRef) =
else:
# check all symkinds:
internalAssert conf, int(high(TSymKind)) < 255
for i in low(TSymKind)..high(TSymKind):
for i in TSymKind:
if cmpIgnoreStyle(($i).substr(2), spec) == 0:
result.add(ppSymKind)
result.add(chr(i.ord))
return
# check all nodekinds:
internalAssert conf, int(high(TNodeKind)) < 255
for i in low(TNodeKind)..high(TNodeKind):
for i in TNodeKind:
if cmpIgnoreStyle($i, spec) == 0:
result.add(ppNodeKind)
result.add(chr(i.ord))

File diff suppressed because it is too large Load Diff

View File

@@ -126,7 +126,7 @@ proc moduleHasChanged*(graph: ModuleGraph; module: PSym): bool {.inline.} =
proc processModule*(graph: ModuleGraph; module: PSym, stream: PLLStream): bool {.discardable.} =
if graph.stopCompile(): return true
var
p: TParsers
p: Parser
a: TPassContextArray
s: PLLStream
fileIdx = module.fileIdx
@@ -164,7 +164,7 @@ proc processModule*(graph: ModuleGraph; module: PSym, stream: PLLStream): bool {
else:
s = stream
while true:
openParsers(p, fileIdx, s, graph.cache, graph.config)
openParser(p, fileIdx, s, graph.cache, graph.config)
if module.owner == nil or module.owner.name.s != "stdlib" or module.name.s == "distros":
# XXX what about caching? no processing then? what if I change the
@@ -212,7 +212,7 @@ proc processModule*(graph: ModuleGraph; module: PSym, stream: PLLStream): bool {
else:
#echo "----- single\n", n
if not processTopLevelStmt(graph, n, a): break
closeParsers(p)
closeParser(p)
if s.kind != llsStdIn: break
closePasses(graph, a)
# id synchronization point for more consistent code generation:

View File

@@ -238,7 +238,7 @@ proc processMagic(c: PContext, n: PNode, s: PSym) =
var v: string
if n[1].kind == nkIdent: v = n[1].ident.s
else: v = expectStrLit(c, n)
for m in low(TMagic)..high(TMagic):
for m in TMagic:
if substr($m, 1) == v:
s.magic = m
break
@@ -257,8 +257,8 @@ proc isTurnedOn(c: PContext, n: PNode): bool =
localError(c.config, n.info, "'on' or 'off' expected")
proc onOff(c: PContext, n: PNode, op: TOptions, resOptions: var TOptions) =
if isTurnedOn(c, n): resOptions = resOptions + op
else: resOptions = resOptions - op
if isTurnedOn(c, n): resOptions.incl op
else: resOptions.excl op
proc pragmaNoForward(c: PContext, n: PNode; flag=sfNoForward) =
if isTurnedOn(c, n):

View File

@@ -24,7 +24,7 @@ type
renderIr
TRenderFlags* = set[TRenderFlag]
TRenderTok* = object
kind*: TTokType
kind*: TokType
length*: int16
sym*: PSym
@@ -118,7 +118,7 @@ proc initSrcGen(g: var TSrcGen, renderFlags: TRenderFlags; config: ConfigRef) =
g.inGenericParams = false
g.config = config
proc addTok(g: var TSrcGen, kind: TTokType, s: string; sym: PSym = nil) =
proc addTok(g: var TSrcGen, kind: TokType, s: string; sym: PSym = nil) =
setLen(g.tokens, g.tokens.len + 1)
g.tokens[^1].kind = kind
g.tokens[^1].length = int16(s.len)
@@ -178,7 +178,7 @@ proc dedent(g: var TSrcGen) =
dec(g.pendingNL, IndentWidth)
dec(g.lineLen, IndentWidth)
proc put(g: var TSrcGen, kind: TTokType, s: string; sym: PSym = nil) =
proc put(g: var TSrcGen, kind: TokType, s: string; sym: PSym = nil) =
if kind != tkSpaces:
addPendingNL(g)
if s.len > 0:
@@ -250,7 +250,7 @@ proc maxLineLength(s: string): int =
inc(lineLen)
inc(i)
proc putRawStr(g: var TSrcGen, kind: TTokType, s: string) =
proc putRawStr(g: var TSrcGen, kind: TokType, s: string) =
var i = 0
let hi = s.len - 1
var str = ""
@@ -459,8 +459,8 @@ proc lsub(g: TSrcGen; n: PNode): int =
of nkDo: result = lsons(g, n) + len("do__:_")
of nkConstDef, nkIdentDefs:
result = lcomma(g, n, 0, - 3)
if n[^2].kind != nkEmpty: result = result + lsub(g, n[^2]) + 2
if n[^1].kind != nkEmpty: result = result + lsub(g, n[^1]) + 3
if n[^2].kind != nkEmpty: result += lsub(g, n[^2]) + 2
if n[^1].kind != nkEmpty: result += lsub(g, n[^1]) + 3
of nkVarTuple:
if n[^1].kind == nkEmpty:
result = lcomma(g, n, 0, - 2) + len("()")
@@ -471,8 +471,8 @@ proc lsub(g: TSrcGen; n: PNode): int =
of nkChckRange: result = len("chckRange") + 2 + lcomma(g, n)
of nkObjDownConv, nkObjUpConv:
result = 2
if n.len >= 1: result = result + lsub(g, n[0])
result = result + lcomma(g, n, 1)
if n.len >= 1: result += lsub(g, n[0])
result += lcomma(g, n, 1)
of nkExprColonExpr: result = lsons(g, n) + 2
of nkInfix: result = lsons(g, n) + 2
of nkPrefix:
@@ -536,7 +536,7 @@ proc lsub(g: TSrcGen; n: PNode): int =
of nkGenericParams: result = lcomma(g, n) + 2
of nkFormalParams:
result = lcomma(g, n, 1) + 2
if n[0].kind != nkEmpty: result = result + lsub(g, n[0]) + 2
if n[0].kind != nkEmpty: result += lsub(g, n[0]) + 2
of nkExceptBranch:
result = lcomma(g, n, 0, -2) + lsub(g, lastSon(n)) + len("except_:_")
of nkObjectTy:
@@ -575,7 +575,7 @@ proc hasCom(n: PNode): bool =
for i in 0..<n.len:
if hasCom(n[i]): return true
proc putWithSpace(g: var TSrcGen, kind: TTokType, s: string) =
proc putWithSpace(g: var TSrcGen, kind: TokType, s: string) =
put(g, kind, s)
put(g, tkSpaces, Space)
@@ -618,7 +618,7 @@ proc gsons(g: var TSrcGen, n: PNode, c: TContext, start: int = 0,
theEnd: int = - 1) =
for i in start..n.len + theEnd: gsub(g, n[i], c)
proc gsection(g: var TSrcGen, n: PNode, c: TContext, kind: TTokType,
proc gsection(g: var TSrcGen, n: PNode, c: TContext, kind: TokType,
k: string) =
if n.len == 0: return # empty var sections are possible
putWithSpace(g, kind, k)
@@ -847,7 +847,7 @@ proc gident(g: var TSrcGen, n: PNode) =
if sfAnon in n.sym.flags or
(n.typ != nil and tfImplicitTypeParam in n.typ.flags): return
var t: TTokType
var t: TokType
var s = atom(g, n)
if s.len > 0 and s[0] in lexer.SymChars:
if n.kind == nkIdent:
@@ -855,7 +855,7 @@ proc gident(g: var TSrcGen, n: PNode) =
(n.ident.id > ord(tokKeywordHigh) - ord(tkSymbol)):
t = tkSymbol
else:
t = TTokType(n.ident.id + ord(tkSymbol))
t = TokType(n.ident.id + ord(tkSymbol))
else:
t = tkSymbol
else:
@@ -1644,7 +1644,7 @@ proc initTokRender*(r: var TSrcGen, n: PNode, renderFlags: TRenderFlags = {}) =
initSrcGen(r, renderFlags, newPartialConfigRef())
gsub(r, n)
proc getNextTok*(r: var TSrcGen, kind: var TTokType, literal: var string) =
proc getNextTok*(r: var TSrcGen, kind: var TokType, literal: var string) =
if r.idx < r.tokens.len:
kind = r.tokens[r.idx].kind
let length = r.tokens[r.idx].length.int

View File

@@ -107,7 +107,6 @@ proc extractRunnableExamplesSource*(conf: ConfigRef; n: PNode): string =
var indent = info.col
let numLines = numLines(conf, info.fileIndex).uint16
var lastNonemptyPos = 0
result = ""
var ldata = LineData(lineFirst: first.line.int, conf: conf)
visitMultilineStrings(ldata, n[^1])
@@ -116,6 +115,7 @@ proc extractRunnableExamplesSource*(conf: ConfigRef; n: PNode): string =
for i in 0..<ldata.lines.len:
echo (i+ldata.lineFirst, ldata.lines[i])
result = ""
for line in first.line..numLines: # bugfix, see `testNimDocTrailingExample`
info.line = line
let src = sourceLine(conf, info)

View File

@@ -1,6 +1,6 @@
import
intsets, ast, idents, algorithm, renderer, os, strutils,
intsets, ast, idents, algorithm, renderer, strutils,
msgs, modulegraphs, syntaxes, options, modulepaths,
lineinfos
@@ -108,28 +108,6 @@ proc computeDeps(cache: IdentCache; n: PNode, declares, uses: var IntSet; topLev
else:
for i in 0..<n.safeLen: deps(n[i])
proc cleanPath(s: string): string =
# Here paths may have the form A / B or "A/B"
result = ""
for c in s:
if c != ' ' and c != '\"':
result.add c
proc joinPath(parts: seq[string]): string =
let nb = parts.len
assert nb > 0
if nb == 1:
return parts[0]
result = parts[0] / parts[1]
for i in 2..<parts.len:
result = result / parts[i]
proc getIncludePath(n: PNode, modulePath: string): string =
let istr = n.renderTree.cleanPath
let (pdir, _) = modulePath.splitPath
let p = istr.split('/').joinPath.addFileExt("nim")
result = pdir / p
proc hasIncludes(n:PNode): bool =
for a in n:
if a.kind == nkIncludeStmt:
@@ -409,8 +387,7 @@ proc strongConnect(v: var DepN, idx: var int, s: var seq[DepN],
proc getStrongComponents(g: var DepG): seq[seq[DepN]] =
## Tarjan's algorithm. Performs a topological sort
## and detects strongly connected components.
result = newSeq[seq[DepN]]()
var s = newSeq[DepN]()
var s: seq[DepN]
var idx = 0
for v in g.mitems:
if v.idx < 0:

View File

@@ -80,16 +80,14 @@ proc decodeStr*(s: cstring, pos: var int): string =
else: break
pos = i
const
chars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
const chars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
{.push overflowChecks: off.}
# since negative numbers require a leading '-' they use up 1 byte. Thus we
# subtract/add `vintDelta` here to save space for small negative numbers
# which are common in ROD files:
const
vintDelta = 5
const vintDelta = 5
template encodeIntImpl(self) =
var d: char

View File

@@ -2154,7 +2154,7 @@ proc instantiateCreateFlowVarCall(c: PContext; t: PType;
# since it's an instantiation, we unmark it as a compilerproc. Otherwise
# codegen would fail:
if sfCompilerProc in result.flags:
result.flags = result.flags - {sfCompilerProc, sfExportc, sfImportc}
result.flags.excl {sfCompilerProc, sfExportc, sfImportc}
result.loc.r = nil
proc setMs(n: PNode, s: PSym): PNode =

View File

@@ -475,7 +475,7 @@ proc foldArrayAccess(m: PSym, n: PNode; g: ModuleGraph): PNode =
else:
localError(g.config, n.info, formatErrorIndexBound(idx, x.len-1) & $n)
of nkBracket:
idx = idx - toInt64(firstOrd(g.config, x.typ))
idx -= toInt64(firstOrd(g.config, x.typ))
if idx >= 0 and idx < x.len: result = x[int(idx)]
else: localError(g.config, n.info, formatErrorIndexBound(idx, x.len-1) & $n)
of nkStrLit..nkTripleStrLit:

View File

@@ -66,7 +66,7 @@ iterator instantiateGenericParamList(c: PContext, n: PNode, pt: TIdTable): PSym
continue
let symKind = if q.typ.kind == tyStatic: skConst else: skType
var s = newSym(symKind, q.name, getCurrOwner(c), q.info)
s.flags = s.flags + {sfUsed, sfFromGeneric}
s.flags.incl {sfUsed, sfFromGeneric}
var t = PType(idTableGet(pt, q.typ))
if t == nil:
if tfRetType in q.typ.flags:

View File

@@ -139,8 +139,7 @@ template quoteStr(s: string): string = "'" & s & "'"
proc fieldsPresentInInitExpr(c: PContext, fieldsRecList, initExpr: PNode): string =
result = ""
for field in directFieldsInRecList(fieldsRecList):
let assignment = locateFieldInInitExpr(c, field.sym, initExpr)
if assignment != nil:
if locateFieldInInitExpr(c, field.sym, initExpr) != nil:
if result.len != 0: result.add ", "
result.add field.sym.name.s.quoteStr

View File

@@ -336,7 +336,6 @@ proc semTemplBody(c: var TemplCtx, n: PNode): PNode =
let s = qualifiedLookUp(c.c, n, {})
if s != nil:
if s.owner == c.owner and s.kind == skParam and sfTemplateParam in s.flags:
# oldCheck(c, sfGenSym notin s.flags or c.noGenSym == 0):
incl(s.flags, sfUsed)
result = newSymNode(s, n.info)
onUse(n.info, s)

View File

@@ -786,7 +786,7 @@ proc semRecordNodeAux(c: PContext, n: PNode, check: var IntSet, pos: var int,
{sfImportc, sfExportc} * fieldOwner.flags != {} and
not hasCaseFields and f.loc.r == nil:
f.loc.r = rope(f.name.s)
f.flags = f.flags + ({sfImportc, sfExportc} * fieldOwner.flags)
f.flags.incl {sfImportc, sfExportc} * fieldOwner.flags
inc(pos)
if containsOrIncl(check, f.name.id):
localError(c.config, info, "attempt to redefine: '" & f.name.s & "'")
@@ -1211,11 +1211,9 @@ proc semProcTypeNode(c: PContext, n, genericParams: PNode,
if hasType:
typ = semParamType(c, a[^2], constraint)
let sym = getCurrOwner(c)
var owner = sym.owner
# TODO: Disallow typed/untyped in procs in the compiler/stdlib
if kind == skProc and (typ.kind == tyTyped or typ.kind == tyUntyped):
if not isMagic(sym):
if not isMagic(getCurrOwner(c)):
localError(c.config, a[^2].info, "'" & typ.sym.name.s & "' is only allowed in templates and macros or magic procs")
if hasDefault:

View File

@@ -12,8 +12,7 @@
import ast, astalgo, msgs, types, magicsys, semdata, renderer, options,
lineinfos
const
tfInstClearedFlags = {tfHasMeta, tfUnresolved}
const tfInstClearedFlags = {tfHasMeta, tfUnresolved}
proc checkPartialConstructedType(conf: ConfigRef; info: TLineInfo, t: PType) =
if t.kind in {tyVar, tyLent} and t[0].kind in {tyVar, tyLent}:

View File

@@ -157,7 +157,7 @@ proc hashType(c: var MD5Context, t: PType; flags: set[ConsiderFlag]) =
if t.n.len > 0:
let oldFlags = t.sym.flags
# Mild hack to prevent endless recursion.
t.sym.flags = t.sym.flags - {sfAnon, sfGenSym}
t.sym.flags.excl {sfAnon, sfGenSym}
hashTree(c, t.n, flags + {CoHashTypeInsideNode})
t.sym.flags = oldFlags
else:

View File

@@ -265,17 +265,12 @@ proc sumGeneric(t: PType): int =
else:
return 0
#var ggDebug: bool
proc complexDisambiguation(a, b: PType): int =
# 'a' matches better if *every* argument matches better or equal than 'b'.
var winner = 0
for i in 1..<min(a.len, b.len):
let x = a[i].sumGeneric
let y = b[i].sumGeneric
#if ggDebug:
#echo "came herA ", typeToString(a[i]), " ", x
#echo "came herB ", typeToString(b[i]), " ", y
if x != y:
if winner == 0:
if x > y: winner = 1
@@ -337,14 +332,13 @@ proc argTypeToString(arg: PNode; prefer: TPreferedDesc): string =
else:
result = arg.typ.typeToString(prefer)
proc describeArgs*(c: PContext, n: PNode, startIdx = 1;
prefer: TPreferedDesc = preferName): string =
proc describeArgs*(c: PContext, n: PNode, startIdx = 1; prefer = preferName): string =
result = ""
for i in startIdx..<n.len:
var arg = n[i]
if n[i].kind == nkExprEqExpr:
result.add(renderTree(n[i][0]))
result.add(": ")
result.add renderTree(n[i][0])
result.add ": "
if arg.typ.isNil and arg.kind notin {nkStmtList, nkDo}:
# XXX we really need to 'tryExpr' here!
arg = c.semOperand(c, n[i][1])
@@ -357,8 +351,8 @@ proc describeArgs*(c: PContext, n: PNode, startIdx = 1;
arg = c.semOperand(c, n[i])
n[i] = arg
if arg.typ != nil and arg.typ.kind == tyError: return
result.add(argTypeToString(arg, prefer))
if i != n.len - 1: result.add(", ")
result.add argTypeToString(arg, prefer)
if i != n.len - 1: result.add ", "
proc concreteType(c: TCandidate, t: PType; f: PType = nil): PType =
case t.kind
@@ -2211,17 +2205,11 @@ proc paramTypesMatch*(m: var TCandidate, f, a: PType,
z.callee = arg[i].typ
if tfUnresolved in z.callee.flags: continue
z.calleeSym = arg[i].sym
#if arg[i].sym.name.s == "cmp":
# ggDebug = true
# echo "CALLLEEEEEEEE A ", typeToString(z.callee)
# XXX this is still all wrong: (T, T) should be 2 generic matches
# and (int, int) 2 exact matches, etc. Essentially you cannot call
# typeRel here and expect things to work!
let r = typeRel(z, f, arg[i].typ)
incMatches(z, r, 2)
#if arg[i].sym.name.s == "cmp": # and arg.info.line == 606:
# echo "M ", r, " ", arg.info, " ", typeToString(arg[i].sym.typ)
# writeMatches(z)
if r != isNone:
z.state = csMatch
case x.state
@@ -2314,14 +2302,7 @@ proc incrIndexType(t: PType) =
template isVarargsUntyped(x): untyped =
x.kind == tyVarargs and x[0].kind == tyUntyped
proc matchesAux(c: PContext, n, nOrig: PNode,
m: var TCandidate, marker: var IntSet) =
var
a = 1 # iterates over the actual given arguments
f = if m.callee.kind != tyGenericBody: 1
else: 0 # iterates over formal parameters
arg: PNode # current prepared argument
formal: PSym # current routine parameter
proc matchesAux(c: PContext, n, nOrig: PNode, m: var TCandidate, marker: var IntSet) =
template noMatch() =
c.mergeShadowScope #merge so that we don't have to resem for later overloads
@@ -2350,15 +2331,19 @@ proc matchesAux(c: PContext, n, nOrig: PNode,
m.state = csMatch # until proven otherwise
m.firstMismatch = MismatchInfo()
m.call = newNodeI(n.kind, n.info)
m.call.typ = base(m.callee) # may be nil
var formalLen = m.callee.n.len
m.call = newNodeIT(n.kind, n.info, m.callee.base)
m.call.add n[0]
var container: PNode = nil # constructed container
formal = if formalLen > 1: m.callee.n[1].sym else: nil
var
a = 1 # iterates over the actual given arguments
f = if m.callee.kind != tyGenericBody: 1
else: 0 # iterates over formal parameters
arg: PNode # current prepared argument
formalLen = m.callee.n.len
formal = if formalLen > 1: m.callee.n[1].sym else: nil # current routine parameter
container: PNode = nil # constructed container
while a < n.len:
c.openShadowScope
if a >= formalLen-1 and f < formalLen and m.callee.n[f].typ.isVarargsUntyped:
@@ -2493,7 +2478,7 @@ proc matchesAux(c: PContext, n, nOrig: PNode,
f = max(f, formalLen - n.len + a + 1)
elif formal.typ.kind != tyVarargs or container == nil:
setSon(m.call, formal.position + 1, arg)
inc(f)
inc f
container = nil
else:
# we end up here if the argument can be converted into the varargs
@@ -2511,7 +2496,7 @@ proc matchesAux(c: PContext, n, nOrig: PNode,
else:
c.closeShadowScope
inc(a)
inc a
# for some edge cases (see tdont_return_unowned_from_owned test case)
m.firstMismatch.arg = a
m.firstMismatch.formal = formal

View File

@@ -78,8 +78,7 @@ proc text*(sourceNode: SourceNode, depth: int): string =
result.add(child.node.text(depth + 1))
proc `$`*(sourceNode: SourceNode): string =
text(sourceNode, 0)
proc `$`*(sourceNode: SourceNode): string = text(sourceNode, 0)
# base64_VLQ
@@ -107,8 +106,7 @@ proc encode*(i: int): string =
z += 1
type
TokenState = enum Normal, String, Ident, Mangled
type TokenState = enum Normal, String, Ident, Mangled
iterator tokenize*(line: string): (bool, string) =
# result = @[]
@@ -244,10 +242,10 @@ proc serializeMappings(map: SourceMapGenerator, mappings: seq[Mapping]): string
var previous = Mapping(generated: (line: 1, column: 0), original: (line: 0, column: 0), name: "", source: "")
var previousSourceId = 0
var previousNameId = 0
result = ""
var next = ""
var nameId = 0
var sourceId = 0
result = ""
for z, mapping in mappings:
next = ""

View File

@@ -33,7 +33,7 @@
# included from sigmatch.nim
import algorithm, sets, prefixmatches, lineinfos, parseutils, linter
from wordrecg import wDeprecated, wError, wAddr, wYield, specialWords
from wordrecg import wDeprecated, wError, wAddr, wYield
when defined(nimsuggest):
import passes, tables, pathutils # importer

View File

@@ -13,44 +13,20 @@ import
strutils, llstream, ast, idents, lexer, options, msgs, parser,
filters, filter_tmpl, renderer, lineinfos, pathutils
type
TFilterKind* = enum
filtNone, filtTemplate, filtReplace, filtStrip
TParserKind* = enum
skinStandard, skinEndX
const
parserNames*: array[TParserKind, string] = ["standard",
"endx"]
filterNames*: array[TFilterKind, string] = ["none", "stdtmpl", "replace",
"strip"]
export Parser, parseAll, parseTopLevelStmt, closeParser
type
TParsers* = object
skin*: TParserKind
parser*: TParser
template config(p: TParsers): ConfigRef = p.parser.lex.config
proc parseAll*(p: var TParsers): PNode =
case p.skin
of skinStandard:
result = parser.parseAll(p.parser)
of skinEndX:
internalError(p.config, "parser to implement")
proc parseTopLevelStmt*(p: var TParsers): PNode =
case p.skin
of skinStandard:
result = parser.parseTopLevelStmt(p.parser)
of skinEndX:
internalError(p.config, "parser to implement")
FilterKind = enum
filtNone = "none"
filtTemplate = "stdtmpl"
filtReplace = "replace"
filtStrip = "strip"
proc utf8Bom(s: string): int =
if s.len >= 3 and s[0] == '\xEF' and s[1] == '\xBB' and s[2] == '\xBF':
result = 3
3
else:
result = 0
0
proc containsShebang(s: string, i: int): bool =
if i+1 < s.len and s[i] == '#' and s[i+1] == '!':
@@ -78,23 +54,16 @@ proc parsePipe(filename: AbsoluteFile, inputStream: PLLStream; cache: IdentCache
else:
inc(i, 2)
while i < line.len and line[i] in Whitespace: inc(i)
var q: TParser
parser.openParser(q, filename, llStreamOpen(substr(line, i)), cache, config)
result = parser.parseAll(q)
parser.closeParser(q)
var p: Parser
openParser(p, filename, llStreamOpen(substr(line, i)), cache, config)
result = parseAll(p)
closeParser(p)
llStreamClose(s)
proc getFilter(ident: PIdent): TFilterKind =
for i in low(TFilterKind)..high(TFilterKind):
if cmpIgnoreStyle(ident.s, filterNames[i]) == 0:
proc getFilter(ident: PIdent): FilterKind =
for i in FilterKind:
if cmpIgnoreStyle(ident.s, $i) == 0:
return i
result = filtNone
proc getParser(conf: ConfigRef; n: PNode; ident: PIdent): TParserKind =
for i in low(TParserKind)..high(TParserKind):
if cmpIgnoreStyle(ident.s, parserNames[i]) == 0:
return i
localError(conf, n.info, "unknown parser: " & ident.s)
proc getCallee(conf: ConfigRef; n: PNode): PIdent =
if n.kind in nkCallKinds and n[0].kind == nkIdent:
@@ -104,30 +73,28 @@ proc getCallee(conf: ConfigRef; n: PNode): PIdent =
else:
localError(conf, n.info, "invalid filter: " & renderTree(n))
proc applyFilter(p: var TParsers, n: PNode, filename: AbsoluteFile,
proc applyFilter(p: var Parser, n: PNode, filename: AbsoluteFile,
stdin: PLLStream): PLLStream =
var ident = getCallee(p.config, n)
var f = getFilter(ident)
case f
of filtNone:
p.skin = getParser(p.config, n, ident)
result = stdin
of filtTemplate:
result = filterTmpl(stdin, filename, n, p.config)
of filtStrip:
result = filterStrip(p.config, stdin, filename, n)
of filtReplace:
result = filterReplace(p.config, stdin, filename, n)
var f = getFilter(getCallee(p.lex.config, n))
result = case f
of filtNone:
stdin
of filtTemplate:
filterTmpl(p.lex.config, stdin, filename, n)
of filtStrip:
filterStrip(p.lex.config, stdin, filename, n)
of filtReplace:
filterReplace(p.lex.config, stdin, filename, n)
if f != filtNone:
assert p.config != nil
if p.config.hasHint(hintCodeBegin):
rawMessage(p.config, hintCodeBegin, "")
msgWriteln(p.config, result.s)
rawMessage(p.config, hintCodeEnd, "")
assert p.lex.config != nil
if p.lex.config.hasHint(hintCodeBegin):
rawMessage(p.lex.config, hintCodeBegin, "")
msgWriteln(p.lex.config, result.s)
rawMessage(p.lex.config, hintCodeEnd, "")
proc evalPipe(p: var TParsers, n: PNode, filename: AbsoluteFile,
proc evalPipe(p: var Parser, n: PNode, filename: AbsoluteFile,
start: PLLStream): PLLStream =
assert p.config != nil
assert p.lex.config != nil
result = start
if n.kind == nkEmpty: return
if n.kind == nkInfix and n[0].kind == nkIdent and n[0].ident.s == "|":
@@ -141,35 +108,28 @@ proc evalPipe(p: var TParsers, n: PNode, filename: AbsoluteFile,
else:
result = applyFilter(p, n, filename, result)
proc openParsers*(p: var TParsers, fileIdx: FileIndex, inputstream: PLLStream;
proc openParser*(p: var Parser, fileIdx: FileIndex, inputstream: PLLStream;
cache: IdentCache; config: ConfigRef) =
assert config != nil
var s: PLLStream
p.skin = skinStandard
let filename = toFullPathConsiderDirty(config, fileIdx)
var pipe = parsePipe(filename, inputstream, cache, config)
p.config() = config
if pipe != nil: s = evalPipe(p, pipe, filename, inputstream)
else: s = inputstream
case p.skin
of skinStandard, skinEndX:
parser.openParser(p.parser, fileIdx, s, cache, config)
p.lex.config = config
let s = if pipe != nil: evalPipe(p, pipe, filename, inputstream)
else: inputstream
parser.openParser(p, fileIdx, s, cache, config)
proc closeParsers*(p: var TParsers) =
parser.closeParser(p.parser)
proc setupParsers*(p: var TParsers; fileIdx: FileIndex; cache: IdentCache;
proc setupParser*(p: var Parser; fileIdx: FileIndex; cache: IdentCache;
config: ConfigRef): bool =
var f: File
let filename = toFullPathConsiderDirty(config, fileIdx)
var f: File
if not open(f, filename.string):
rawMessage(config, errGenerated, "cannot open file: " & filename.string)
return false
openParsers(p, fileIdx, llStreamOpen(f), cache, config)
openParser(p, fileIdx, llStreamOpen(f), cache, config)
result = true
proc parseFile*(fileIdx: FileIndex; cache: IdentCache; config: ConfigRef): PNode =
var p: TParsers
if setupParsers(p, fileIdx, cache, config):
var p: Parser
if setupParser(p, fileIdx, cache, config):
result = parseAll(p)
closeParsers(p)
closeParser(p)

View File

@@ -469,9 +469,9 @@ proc typeToString(typ: PType, prefer: TPreferedDesc = preferName): string =
prefer
proc typeToString(typ: PType, prefer: TPreferedDesc = preferName): string =
result = ""
let prefer = getPrefer(prefer)
let t = typ
result = ""
if t == nil: return
if prefer in preferToResolveSymbols and t.sym != nil and
sfAnon notin t.sym.flags and t.kind != tySequence:

View File

@@ -1360,19 +1360,6 @@ proc genMarshalStore(c: PCtx, n: PNode, dest: var TDest) =
c.gABx(n, opcMarshalStore, 0, c.genType(n[1].typ))
c.freeTemp(tmp)
const
atomicTypes = {tyBool, tyChar,
tyUntyped, tyTyped, tyTypeDesc, tyStatic,
tyEnum,
tyOrdinal,
tyRange,
tyProc,
tyPointer, tyOpenArray,
tyString, tyCString,
tyInt, tyInt8, tyInt16, tyInt32, tyInt64,
tyFloat, tyFloat32, tyFloat64, tyFloat128,
tyUInt, tyUInt8, tyUInt16, tyUInt32, tyUInt64}
proc unneededIndirection(n: PNode): bool =
n.typ.skipTypes(abstractInstOwned-{tyTypeDesc}).kind == tyRef
@@ -2200,7 +2187,7 @@ proc optimizeJumps(c: PCtx; start: int) =
for iters in countdown(maxIterations, 0):
case c.code[d].opcode
of opcJmp:
d = d + c.code[d].jmpDiff
d += c.code[d].jmpDiff
of opcTJmp, opcFJmp:
if c.code[d].regA != reg: break
# tjmp x, 23
@@ -2208,12 +2195,12 @@ proc optimizeJumps(c: PCtx; start: int) =
# tjmp x, 12
# -- we know 'x' is true, and so can jump to 12+13:
if c.code[d].opcode == opc:
d = d + c.code[d].jmpDiff
d += c.code[d].jmpDiff
else:
# tjmp x, 23
# fjmp x, 22
# We know 'x' is true so skip to the next instruction:
d = d + 1
d += 1
else: break
if d != i + c.code[i].jmpDiff:
c.finalJumpTarget(i, d - i)
@@ -2221,7 +2208,7 @@ proc optimizeJumps(c: PCtx; start: int) =
var d = i + c.code[i].jmpDiff
var iters = maxIterations
while c.code[d].opcode == opcJmp and iters > 0:
d = d + c.code[d].jmpDiff
d += c.code[d].jmpDiff
dec iters
if c.code[d].opcode == opcRet:
# optimize 'jmp to ret' to 'ret' here

View File

@@ -104,8 +104,7 @@ const
wAsm, wBreak, wCase, wConst, wContinue, wDo, wElse, wEnum, wExport,
wFor, wIf, wReturn, wStatic, wTemplate, wTry, wWhile, wUsing}
specialWords*: array[low(TSpecialWord)..high(TSpecialWord), string] = ["",
specialWords*: array[TSpecialWord, string] = ["",
"addr", "and", "as", "asm",
"bind", "block", "break", "case", "cast",
"concept", "const", "continue", "converter",
@@ -129,35 +128,35 @@ const
"immediate", "constructor", "destructor", "delegator", "override",
"importcpp", "importobjc",
"importcompilerproc", "importc", "importjs", "exportc", "exportcpp", "exportnims",
"incompletestruct",
"completestruct",
"requiresinit", "align", "nodecl", "pure", "sideeffect",
"header", "nosideeffect", "gcsafe", "noreturn", "nosinks", "merge", "lib", "dynlib",
"importCompilerProc", "importc", "importjs", "exportc", "exportcpp", "exportnims",
"incompleteStruct",
"completeStruct",
"requiresInit", "align", "nodecl", "pure", "sideEffect",
"header", "noSideEffect", "gcsafe", "noreturn", "nosinks", "merge", "lib", "dynlib",
"compilerproc", "core", "procvar", "base", "used",
"fatal", "error", "warning", "hint", "warningaserror", "line",
"push", "pop", "define", "undef", "linedir", "stacktrace", "linetrace",
"fatal", "error", "warning", "hint", "warningAsError", "line",
"push", "pop", "define", "undef", "lineDir", "stackTrace", "lineTrace",
"link", "compile", "linksys", "deprecated", "varargs",
"callconv", "debugger", "nimcall", "stdcall",
"cdecl", "safecall", "syscall", "inline", "noinline", "fastcall", "thiscall", "closure",
"noconv", "on", "off", "checks", "rangechecks", "boundchecks",
"overflowchecks", "nilchecks",
"floatchecks", "nanchecks", "infchecks", "stylechecks", "staticboundchecks",
"nonreloadable", "executeonreload",
"noconv", "on", "off", "checks", "rangeChecks", "boundChecks",
"overflowChecks", "nilChecks",
"floatChecks", "nanChecks", "infChecks", "styleChecks", "staticBoundChecks",
"nonReloadable", "executeOnReload",
"assertions", "patterns", "trmacros", "sinkinference", "warnings", "hints",
"optimization", "raises", "writes", "reads", "size", "effects", "tags",
"requires", "ensures", "invariant", "assume", "assert",
"deadcodeelim", # deprecated, dead code elim always happens
"deadCodeElim", # deprecated, dead code elim always happens
"safecode", "package", "noforward", "reorder", "norewrite", "nodestroy",
"pragma",
"compiletime", "noinit",
"passc", "passl", "localpassc", "borrow", "discardable", "fieldchecks",
"subschar", "acyclic", "shallow", "unroll", "linearscanend",
"computedgoto", "injectstmt", "experimental",
"compileTime", "noinit",
"passc", "passl", "localPassC", "borrow", "discardable", "fieldChecks",
"subschar", "acyclic", "shallow", "unroll", "linearScanEnd",
"computedGoto", "injectStmt", "experimental",
"write", "gensym", "inject", "dirty", "inheritable", "threadvar", "emit",
"asmnostackframe", "implicitstatic", "global", "codegendecl", "unchecked",
"guard", "locks", "partial", "explain", "liftlocals",
"asmNoStackFrame", "implicitStatic", "global", "codegenDecl", "unchecked",
"guard", "locks", "partial", "explain", "liftLocals",
"auto", "bool", "catch", "char", "class", "compl",
"const_cast", "default", "delete", "double",
@@ -184,38 +183,3 @@ proc findStr*(a: openArray[string], s: string): int =
if cmpIgnoreStyle(a[i], s) == 0:
return i
result = - 1
proc canonPragmaSpelling*(w: TSpecialWord): string =
case w
of wNoSideEffect: "noSideEffect"
of wImportCompilerProc: "importCompilerProc"
of wIncompleteStruct: "incompleteStruct"
of wCompleteStruct: "completeStruct"
of wRequiresInit: "requiresInit"
of wSideEffect: "sideEffect"
of wLineDir: "lineDir"
of wStackTrace: "stackTrace"
of wLineTrace: "lineTrace"
of wRangeChecks: "rangeChecks"
of wBoundChecks: "boundChecks"
of wOverflowChecks: "overflowChecks"
of wNilChecks: "nilChecks"
of wFloatChecks: "floatChecks"
of wNanChecks: "nanChecks"
of wInfChecks: "infChecks"
of wStyleChecks: "styleChecks"
of wNonReloadable: "nonReloadable"
of wExecuteOnReload: "executeOnReload"
of wDeadCodeElimUnused: "deadCodeElim"
of wCompileTime: "compileTime"
of wFieldChecks: "fieldChecks"
of wLinearScanEnd: "linearScanEnd"
of wComputedGoto: "computedGoto"
of wInjectStmt: "injectStmt"
of wAsmNoStackFrame: "asmNoStackFrame"
of wImplicitStatic: "implicitStatic"
of wCodegenDecl: "codegenDecl"
of wLiftLocals: "liftLocals"
of wLocalPassc: "localPassc"
of wWarningAsError: "warningAsError"
else: specialWords[w]

View File

@@ -54,12 +54,12 @@ proc prettyPrint(infile, outfile: string, opt: PrettyOptions) =
let f = splitFile(outfile.expandTilde)
conf.outFile = RelativeFile f.name & f.ext
conf.outDir = toAbsoluteDir f.dir
var p: TParsers
p.parser.em.indWidth = opt.indWidth
if setupParsers(p, fileIdx, newIdentCache(), conf):
p.parser.em.maxLineLen = opt.maxLineLen
discard parseAll(p)
closeParsers(p)
var parser: Parser
parser.em.indWidth = opt.indWidth
if setupParser(parser, fileIdx, newIdentCache(), conf):
parser.em.maxLineLen = opt.maxLineLen
discard parseAll(parser)
closeParser(parser)
proc main =
var outfile, outdir: string

View File

@@ -16,8 +16,8 @@ proc checkGrammarFileImpl(cache: IdentCache, config: ConfigRef) =
if stream != nil:
declaredSyms.incl "section" # special case for 'section(RULE)' in the grammar
var
L: TLexer
tok: TToken
L: Lexer
tok: Token
initToken(tok)
openLexer(L, f, stream, cache, config)
# load the first token:

View File

@@ -195,8 +195,7 @@ proc processCmdLine*(pass: TCmdLinePass, cmd: string; conf: ConfigRef) =
of cmdArgument:
let info = p.key.split(':')
if info.len == 3:
let (dir, file, ext) = info[0].splitFile()
conf.projectName = findProjectNimFile(conf, dir)
conf.projectName = findProjectNimFile(conf, info[0].splitFile.dir)
if conf.projectName.len == 0: conf.projectName = info[0]
try:
conf.m.trackPos = newLineInfo(conf, AbsoluteFile info[0],