mirror of
https://github.com/nim-lang/Nim.git
synced 2025-12-29 01:14:41 +00:00
big refactoring: step 1
This commit is contained in:
@@ -1050,8 +1050,6 @@ proc newSym*(symKind: TSymKind, name: PIdent, owner: PSym,
|
||||
var emptyNode* = newNode(nkEmpty)
|
||||
# There is a single empty node that is shared! Do not overwrite it!
|
||||
|
||||
var anyGlobal* = newSym(skVar, getIdent("*"), nil, unknownLineInfo())
|
||||
|
||||
proc isMetaType*(t: PType): bool =
|
||||
return t.kind in tyMetaTypes or
|
||||
(t.kind == tyStatic and t.n == nil) or
|
||||
@@ -1583,14 +1581,6 @@ proc skipStmtList*(n: PNode): PNode =
|
||||
else:
|
||||
result = n
|
||||
|
||||
proc createMagic*(name: string, m: TMagic): PSym =
|
||||
result = newSym(skProc, getIdent(name), nil, unknownLineInfo())
|
||||
result.magic = m
|
||||
|
||||
let
|
||||
opNot* = createMagic("not", mNot)
|
||||
opContains* = createMagic("contains", mInSet)
|
||||
|
||||
when false:
|
||||
proc containsNil*(n: PNode): bool =
|
||||
# only for debugging
|
||||
|
||||
@@ -124,17 +124,17 @@ proc splitSwitch(switch: string, cmd, arg: var string, pass: TCmdLinePass,
|
||||
|
||||
proc processOnOffSwitch(op: TOptions, arg: string, pass: TCmdLinePass,
|
||||
info: TLineInfo) =
|
||||
case whichKeyword(arg)
|
||||
of wOn: gOptions = gOptions + op
|
||||
of wOff: gOptions = gOptions - op
|
||||
case arg.normalize
|
||||
of "on": gOptions = gOptions + op
|
||||
of "off": gOptions = gOptions - op
|
||||
else: localError(info, errOnOrOffExpectedButXFound, arg)
|
||||
|
||||
proc processOnOffSwitchOrList(op: TOptions, arg: string, pass: TCmdLinePass,
|
||||
info: TLineInfo): bool =
|
||||
result = false
|
||||
case whichKeyword(arg)
|
||||
of wOn: gOptions = gOptions + op
|
||||
of wOff: gOptions = gOptions - op
|
||||
case arg.normalize
|
||||
of "on": gOptions = gOptions + op
|
||||
of "off": gOptions = gOptions - op
|
||||
else:
|
||||
if arg == "list":
|
||||
result = true
|
||||
@@ -143,9 +143,9 @@ proc processOnOffSwitchOrList(op: TOptions, arg: string, pass: TCmdLinePass,
|
||||
|
||||
proc processOnOffSwitchG(op: TGlobalOptions, arg: string, pass: TCmdLinePass,
|
||||
info: TLineInfo) =
|
||||
case whichKeyword(arg)
|
||||
of wOn: gGlobalOptions = gGlobalOptions + op
|
||||
of wOff: gGlobalOptions = gGlobalOptions - op
|
||||
case arg.normalize
|
||||
of "on": gGlobalOptions = gGlobalOptions + op
|
||||
of "off": gGlobalOptions = gGlobalOptions - op
|
||||
else: localError(info, errOnOrOffExpectedButXFound, arg)
|
||||
|
||||
proc expectArg(switch, arg: string, pass: TCmdLinePass, info: TLineInfo) =
|
||||
@@ -178,12 +178,12 @@ proc processSpecificNote*(arg: string, state: TSpecialWord, pass: TCmdLinePass,
|
||||
var x = findStr(msgs.WarningsToStr, id)
|
||||
if x >= 0: n = TNoteKind(x + ord(warnMin))
|
||||
else: localError(info, "unknown warning: " & id)
|
||||
case whichKeyword(substr(arg, i))
|
||||
of wOn:
|
||||
case substr(arg, i).normalize
|
||||
of "on":
|
||||
incl(gNotes, n)
|
||||
incl(gMainPackageNotes, n)
|
||||
incl(enableNotes, n)
|
||||
of wOff:
|
||||
of "off":
|
||||
excl(gNotes, n)
|
||||
excl(gMainPackageNotes, n)
|
||||
incl(disableNotes, n)
|
||||
@@ -630,12 +630,8 @@ proc processSwitch(switch, arg: string, pass: TCmdLinePass, info: TLineInfo) =
|
||||
of "dynliboverride":
|
||||
dynlibOverride(switch, arg, pass, info)
|
||||
of "cs":
|
||||
# only supported for compatibility. Does nothing.
|
||||
expectArg(switch, arg, pass, info)
|
||||
case arg
|
||||
of "partial": idents.firstCharIsCS = true
|
||||
of "none": idents.firstCharIsCS = false
|
||||
else: localError(info, errGenerated,
|
||||
"'partial' or 'none' expected, but found " & arg)
|
||||
of "experimental":
|
||||
expectNoArg(switch, arg, pass, info)
|
||||
gExperimentalMode = true
|
||||
|
||||
@@ -514,7 +514,7 @@ proc genJsonItem(d: PDoc, n, nameNode: PNode, k: TSymKind): JsonNode =
|
||||
result["code"] = %r.buf
|
||||
|
||||
proc checkForFalse(n: PNode): bool =
|
||||
result = n.kind == nkIdent and identEq(n.ident, "false")
|
||||
result = n.kind == nkIdent and cmpIgnoreStyle(n.ident.s, "false") == 0
|
||||
|
||||
proc traceDeps(d: PDoc, n: PNode) =
|
||||
const k = skModule
|
||||
@@ -691,7 +691,7 @@ proc writeOutputJson*(d: PDoc, filename, outExt: string,
|
||||
discard "fixme: error report"
|
||||
|
||||
proc commandDoc*() =
|
||||
var ast = parseFile(gProjectMainIdx)
|
||||
var ast = parseFile(gProjectMainIdx, newIdentCache())
|
||||
if ast == nil: return
|
||||
var d = newDocumentor(gProjectFull, options.gConfigVars)
|
||||
d.hasToc = true
|
||||
@@ -721,7 +721,7 @@ proc commandRst2TeX*() =
|
||||
commandRstAux(gProjectFull, TexExt)
|
||||
|
||||
proc commandJson*() =
|
||||
var ast = parseFile(gProjectMainIdx)
|
||||
var ast = parseFile(gProjectMainIdx, newIdentCache())
|
||||
if ast == nil: return
|
||||
var d = newDocumentor(gProjectFull, options.gConfigVars)
|
||||
d.hasToc = true
|
||||
|
||||
@@ -62,10 +62,7 @@ proc withInExpr(p: TTmplParser): bool {.inline.} =
|
||||
result = p.par > 0 or p.bracket > 0 or p.curly > 0
|
||||
|
||||
proc parseLine(p: var TTmplParser) =
|
||||
var
|
||||
d, j, curly: int
|
||||
keyw: string
|
||||
j = 0
|
||||
var j = 0
|
||||
while p.x[j] == ' ': inc(j)
|
||||
if p.x[0] == p.nimDirective and p.x[1] == '?':
|
||||
newLine(p)
|
||||
@@ -73,16 +70,16 @@ proc parseLine(p: var TTmplParser) =
|
||||
newLine(p)
|
||||
inc(j)
|
||||
while p.x[j] == ' ': inc(j)
|
||||
d = j
|
||||
keyw = ""
|
||||
let d = j
|
||||
var keyw = ""
|
||||
while p.x[j] in PatternChars:
|
||||
add(keyw, p.x[j])
|
||||
inc(j)
|
||||
|
||||
scanPar(p, j)
|
||||
p.pendingExprLine = withInExpr(p) or llstream.endsWithOpr(p.x)
|
||||
case whichKeyword(keyw)
|
||||
of wEnd:
|
||||
case keyw
|
||||
of "end":
|
||||
if p.indent >= 2:
|
||||
dec(p.indent, 2)
|
||||
else:
|
||||
@@ -90,15 +87,15 @@ proc parseLine(p: var TTmplParser) =
|
||||
localError(p.info, errXNotAllowedHere, "end")
|
||||
llStreamWrite(p.outp, spaces(p.indent))
|
||||
llStreamWrite(p.outp, "#end")
|
||||
of wIf, wWhen, wTry, wWhile, wFor, wBlock, wCase, wProc, wIterator,
|
||||
wConverter, wMacro, wTemplate, wMethod:
|
||||
of "if", "when", "try", "while", "for", "block", "case", "proc", "iterator",
|
||||
"converter", "macro", "template", "method":
|
||||
llStreamWrite(p.outp, spaces(p.indent))
|
||||
llStreamWrite(p.outp, substr(p.x, d))
|
||||
inc(p.indent, 2)
|
||||
of wElif, wOf, wElse, wExcept, wFinally:
|
||||
of "elif", "of", "else", "except", "finally":
|
||||
llStreamWrite(p.outp, spaces(p.indent - 2))
|
||||
llStreamWrite(p.outp, substr(p.x, d))
|
||||
of wLet, wVar, wConst, wType:
|
||||
of "wLet", "wVar", "wConst", "wType":
|
||||
llStreamWrite(p.outp, spaces(p.indent))
|
||||
llStreamWrite(p.outp, substr(p.x, d))
|
||||
if not p.x.contains({':', '='}):
|
||||
@@ -158,7 +155,7 @@ proc parseLine(p: var TTmplParser) =
|
||||
llStreamWrite(p.outp, p.toStr)
|
||||
llStreamWrite(p.outp, '(')
|
||||
inc(j)
|
||||
curly = 0
|
||||
var curly = 0
|
||||
while true:
|
||||
case p.x[j]
|
||||
of '\0':
|
||||
|
||||
@@ -30,7 +30,7 @@ proc getArg(n: PNode, name: string, pos: int): PNode =
|
||||
for i in countup(1, sonsLen(n) - 1):
|
||||
if n.sons[i].kind == nkExprEqExpr:
|
||||
if n.sons[i].sons[0].kind != nkIdent: invalidPragma(n)
|
||||
if identEq(n.sons[i].sons[0].ident, name):
|
||||
if cmpIgnoreStyle(n.sons[i].sons[0].ident.s, name) == 0:
|
||||
return n.sons[i].sons[1]
|
||||
elif i == pos:
|
||||
return n.sons[i]
|
||||
@@ -50,8 +50,8 @@ proc strArg(n: PNode, name: string, pos: int, default: string): string =
|
||||
proc boolArg(n: PNode, name: string, pos: int, default: bool): bool =
|
||||
var x = getArg(n, name, pos)
|
||||
if x == nil: result = default
|
||||
elif (x.kind == nkIdent) and identEq(x.ident, "true"): result = true
|
||||
elif (x.kind == nkIdent) and identEq(x.ident, "false"): result = false
|
||||
elif x.kind == nkIdent and cmpIgnoreStyle(x.ident.s, "true") == 0: result = true
|
||||
elif x.kind == nkIdent and cmpIgnoreStyle(x.ident.s, "false") == 0: result = false
|
||||
else: invalidPragma(n)
|
||||
|
||||
proc filterStrip(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
@@ -62,7 +62,7 @@ proc filterStrip(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
var line = newStringOfCap(80)
|
||||
while llStreamReadLine(stdin, line):
|
||||
var stripped = strip(line, leading, trailing)
|
||||
if (len(pattern) == 0) or startsWith(stripped, pattern):
|
||||
if len(pattern) == 0 or startsWith(stripped, pattern):
|
||||
llStreamWriteln(result, stripped)
|
||||
else:
|
||||
llStreamWriteln(result, line)
|
||||
|
||||
@@ -83,6 +83,10 @@ proc isLetLocation(m: PNode, isApprox: bool): bool =
|
||||
|
||||
proc interestingCaseExpr*(m: PNode): bool = isLetLocation(m, true)
|
||||
|
||||
let
|
||||
opNot* = createMagic("not", mNot)
|
||||
opContains* = createMagic("contains", mInSet)
|
||||
|
||||
let
|
||||
opLe = createMagic("<=", mLeI)
|
||||
opLt = createMagic("<", mLtI)
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
# id. This module is essential for the compiler's performance.
|
||||
|
||||
import
|
||||
hashes, strutils, etcpriv
|
||||
hashes, strutils, etcpriv, wordrecg
|
||||
|
||||
type
|
||||
TIdObj* = object of RootObj
|
||||
@@ -25,12 +25,20 @@ type
|
||||
next*: PIdent # for hash-table chaining
|
||||
h*: Hash # hash value of s
|
||||
|
||||
var firstCharIsCS*: bool = true
|
||||
var buckets*: array[0..4096 * 2 - 1, PIdent]
|
||||
IdentCache* = ref object
|
||||
buckets: array[0..4096 * 2 - 1, PIdent]
|
||||
wordCounter: int
|
||||
idAnon*, idDelegator*, emptyIdent*: PIdent
|
||||
|
||||
var
|
||||
legacy: IdentCache
|
||||
|
||||
proc resetIdentCache*() =
|
||||
for i in low(legacy.buckets)..high(legacy.buckets):
|
||||
legacy.buckets[i] = nil
|
||||
|
||||
proc cmpIgnoreStyle(a, b: cstring, blen: int): int =
|
||||
if firstCharIsCS:
|
||||
if a[0] != b[0]: return 1
|
||||
if a[0] != b[0]: return 1
|
||||
var i = 0
|
||||
var j = 0
|
||||
result = 1
|
||||
@@ -65,9 +73,9 @@ proc cmpExact(a, b: cstring, blen: int): int =
|
||||
if result == 0:
|
||||
if a[i] != '\0': result = 1
|
||||
|
||||
var wordCounter = 1
|
||||
{.this: self.}
|
||||
|
||||
proc getIdent*(identifier: cstring, length: int, h: Hash): PIdent =
|
||||
proc getIdent*(self: IdentCache; identifier: cstring, length: int, h: Hash): PIdent =
|
||||
var idx = h and high(buckets)
|
||||
result = buckets[idx]
|
||||
var last: PIdent = nil
|
||||
@@ -97,16 +105,34 @@ proc getIdent*(identifier: cstring, length: int, h: Hash): PIdent =
|
||||
else:
|
||||
result.id = id
|
||||
|
||||
proc getIdent*(identifier: string): PIdent =
|
||||
proc getIdent*(self: IdentCache; identifier: string): PIdent =
|
||||
result = getIdent(cstring(identifier), len(identifier),
|
||||
hashIgnoreStyle(identifier))
|
||||
|
||||
proc getIdent*(identifier: string, h: Hash): PIdent =
|
||||
proc getIdent*(self: IdentCache; identifier: string, h: Hash): PIdent =
|
||||
result = getIdent(cstring(identifier), len(identifier), h)
|
||||
|
||||
proc identEq*(id: PIdent, name: string): bool =
|
||||
proc identEq*(self: IdentCache; id: PIdent, name: string): bool =
|
||||
result = id.id == getIdent(name).id
|
||||
|
||||
var idAnon* = getIdent":anonymous"
|
||||
let idDelegator* = getIdent":delegator"
|
||||
proc newIdentCache*(): IdentCache =
|
||||
if legacy.isNil:
|
||||
result = IdentCache()
|
||||
result.idAnon = result.getIdent":anonymous"
|
||||
result.wordCounter = 1
|
||||
result.idDelegator = result.getIdent":delegator"
|
||||
result.emptyIdent = result.getIdent("")
|
||||
# initialize the keywords:
|
||||
for s in countup(succ(low(specialWords)), high(specialWords)):
|
||||
result.getIdent(specialWords[s], hashIgnoreStyle(specialWords[s])).id = ord(s)
|
||||
legacy = result
|
||||
else:
|
||||
result = legacy
|
||||
|
||||
proc whichKeyword*(id: PIdent): TSpecialWord =
|
||||
if id.id < 0: result = wInvalid
|
||||
else: result = TSpecialWord(id.id)
|
||||
|
||||
proc getIdent*(identifier: string): PIdent =
|
||||
## for backwards compatibility.
|
||||
legacy.getIdent identifier
|
||||
|
||||
@@ -162,7 +162,7 @@ proc importModuleAs(n: PNode, realModule: PSym): PSym =
|
||||
proc myImportModule(c: PContext, n: PNode): PSym =
|
||||
var f = checkModuleName(n)
|
||||
if f != InvalidFileIDX:
|
||||
result = importModuleAs(n, gImportModule(c.module, f))
|
||||
result = importModuleAs(n, gImportModule(c.module, f, c.cache))
|
||||
# we cannot perform this check reliably because of
|
||||
# test: modules/import_in_config)
|
||||
if result.info.fileIndex == c.module.info.fileIndex and
|
||||
|
||||
@@ -129,6 +129,7 @@ type
|
||||
currLineIndent*: int
|
||||
strongSpaces*, allowTabs*: bool
|
||||
errorHandler*: TErrorHandler
|
||||
cache*: IdentCache
|
||||
|
||||
var gLinesCompiled*: int # all lines that have been compiled
|
||||
|
||||
@@ -164,7 +165,6 @@ proc tokToStr*(tok: TToken): string =
|
||||
if tok.ident != nil:
|
||||
result = tok.ident.s
|
||||
else:
|
||||
internalError("tokToStr")
|
||||
result = ""
|
||||
|
||||
proc prettyTok*(tok: TToken): string =
|
||||
@@ -175,8 +175,6 @@ proc printTok*(tok: TToken) =
|
||||
msgWriteln($tok.line & ":" & $tok.col & "\t" &
|
||||
TokTypeToStr[tok.tokType] & " " & tokToStr(tok))
|
||||
|
||||
var dummyIdent: PIdent
|
||||
|
||||
proc initToken*(L: var TToken) =
|
||||
L.tokType = tkInvalid
|
||||
L.iNumber = 0
|
||||
@@ -185,7 +183,7 @@ proc initToken*(L: var TToken) =
|
||||
L.literal = ""
|
||||
L.fNumber = 0.0
|
||||
L.base = base10
|
||||
L.ident = dummyIdent
|
||||
L.ident = nil
|
||||
|
||||
proc fillToken(L: var TToken) =
|
||||
L.tokType = tkInvalid
|
||||
@@ -195,17 +193,20 @@ proc fillToken(L: var TToken) =
|
||||
setLen(L.literal, 0)
|
||||
L.fNumber = 0.0
|
||||
L.base = base10
|
||||
L.ident = dummyIdent
|
||||
L.ident = nil
|
||||
|
||||
proc openLexer*(lex: var TLexer, fileIdx: int32, inputstream: PLLStream) =
|
||||
proc openLexer*(lex: var TLexer, fileIdx: int32, inputstream: PLLStream;
|
||||
cache: IdentCache) =
|
||||
openBaseLexer(lex, inputstream)
|
||||
lex.fileIdx = fileidx
|
||||
lex.indentAhead = - 1
|
||||
lex.currLineIndent = 0
|
||||
inc(lex.lineNumber, inputstream.lineOffset)
|
||||
lex.cache = cache
|
||||
|
||||
proc openLexer*(lex: var TLexer, filename: string, inputstream: PLLStream) =
|
||||
openLexer(lex, filename.fileInfoIdx, inputstream)
|
||||
proc openLexer*(lex: var TLexer, filename: string, inputstream: PLLStream;
|
||||
cache: IdentCache) =
|
||||
openLexer(lex, filename.fileInfoIdx, inputstream, cache)
|
||||
|
||||
proc closeLexer*(lex: var TLexer) =
|
||||
inc(gLinesCompiled, lex.lineNumber)
|
||||
@@ -746,7 +747,7 @@ proc getSymbol(L: var TLexer, tok: var TToken) =
|
||||
|
||||
else: break
|
||||
h = !$h
|
||||
tok.ident = getIdent(addr(L.buf[L.bufpos]), pos - L.bufpos, h)
|
||||
tok.ident = L.cache.getIdent(addr(L.buf[L.bufpos]), pos - L.bufpos, h)
|
||||
L.bufpos = pos
|
||||
if (tok.ident.id < ord(tokKeywordLow) - ord(tkSymbol)) or
|
||||
(tok.ident.id > ord(tokKeywordHigh) - ord(tkSymbol)):
|
||||
@@ -757,7 +758,7 @@ proc getSymbol(L: var TLexer, tok: var TToken) =
|
||||
proc endOperator(L: var TLexer, tok: var TToken, pos: int,
|
||||
hash: Hash) {.inline.} =
|
||||
var h = !$hash
|
||||
tok.ident = getIdent(addr(L.buf[L.bufpos]), pos - L.bufpos, h)
|
||||
tok.ident = L.cache.getIdent(addr(L.buf[L.bufpos]), pos - L.bufpos, h)
|
||||
if (tok.ident.id < oprLow) or (tok.ident.id > oprHigh): tok.tokType = tkOpr
|
||||
else: tok.tokType = TTokType(tok.ident.id - oprLow + ord(tkColon))
|
||||
L.bufpos = pos
|
||||
@@ -847,34 +848,23 @@ proc scanComment(L: var TLexer, tok: var TToken) =
|
||||
tok.tokType = tkComment
|
||||
# iNumber contains the number of '\n' in the token
|
||||
tok.iNumber = 0
|
||||
when not defined(nimfix):
|
||||
assert buf[pos+1] == '#'
|
||||
if buf[pos+2] == '[':
|
||||
skipMultiLineComment(L, tok, pos+3, true)
|
||||
return
|
||||
inc(pos, 2)
|
||||
assert buf[pos+1] == '#'
|
||||
if buf[pos+2] == '[':
|
||||
skipMultiLineComment(L, tok, pos+3, true)
|
||||
return
|
||||
inc(pos, 2)
|
||||
|
||||
var toStrip = 0
|
||||
while buf[pos] == ' ':
|
||||
inc pos
|
||||
inc toStrip
|
||||
|
||||
when defined(nimfix):
|
||||
var col = getColNumber(L, pos)
|
||||
while true:
|
||||
var lastBackslash = -1
|
||||
while buf[pos] notin {CR, LF, nimlexbase.EndOfFile}:
|
||||
if buf[pos] == '\\': lastBackslash = pos+1
|
||||
add(tok.literal, buf[pos])
|
||||
inc(pos)
|
||||
when defined(nimfix):
|
||||
if lastBackslash > 0:
|
||||
# a backslash is a continuation character if only followed by spaces
|
||||
# plus a newline:
|
||||
while buf[lastBackslash] == ' ': inc(lastBackslash)
|
||||
if buf[lastBackslash] notin {CR, LF, nimlexbase.EndOfFile}:
|
||||
# false positive:
|
||||
lastBackslash = -1
|
||||
|
||||
pos = handleCRLF(L, pos)
|
||||
buf = L.buf
|
||||
@@ -883,21 +873,13 @@ proc scanComment(L: var TLexer, tok: var TToken) =
|
||||
inc(pos)
|
||||
inc(indent)
|
||||
|
||||
when defined(nimfix):
|
||||
template doContinue(): untyped =
|
||||
buf[pos] == '#' and (col == indent or lastBackslash > 0)
|
||||
else:
|
||||
template doContinue(): untyped =
|
||||
buf[pos] == '#' and buf[pos+1] == '#'
|
||||
if doContinue():
|
||||
if buf[pos] == '#' and buf[pos+1] == '#':
|
||||
tok.literal.add "\n"
|
||||
when defined(nimfix): col = indent
|
||||
else:
|
||||
inc(pos, 2)
|
||||
var c = toStrip
|
||||
while buf[pos] == ' ' and c > 0:
|
||||
inc pos
|
||||
dec c
|
||||
inc(pos, 2)
|
||||
var c = toStrip
|
||||
while buf[pos] == ' ' and c > 0:
|
||||
inc pos
|
||||
dec c
|
||||
inc tok.iNumber
|
||||
else:
|
||||
if buf[pos] > ' ':
|
||||
@@ -932,27 +914,19 @@ proc skip(L: var TLexer, tok: var TToken) =
|
||||
else:
|
||||
break
|
||||
tok.strongSpaceA = 0
|
||||
when defined(nimfix):
|
||||
template doBreak(): untyped = buf[pos] > ' '
|
||||
else:
|
||||
template doBreak(): untyped =
|
||||
buf[pos] > ' ' and (buf[pos] != '#' or buf[pos+1] == '#')
|
||||
if doBreak():
|
||||
if buf[pos] > ' ' and (buf[pos] != '#' or buf[pos+1] == '#'):
|
||||
tok.indent = indent
|
||||
L.currLineIndent = indent
|
||||
break
|
||||
of '#':
|
||||
when defined(nimfix):
|
||||
break
|
||||
# do not skip documentation comment:
|
||||
if buf[pos+1] == '#': break
|
||||
if buf[pos+1] == '[':
|
||||
skipMultiLineComment(L, tok, pos+2, false)
|
||||
pos = L.bufpos
|
||||
buf = L.buf
|
||||
else:
|
||||
# do not skip documentation comment:
|
||||
if buf[pos+1] == '#': break
|
||||
if buf[pos+1] == '[':
|
||||
skipMultiLineComment(L, tok, pos+2, false)
|
||||
pos = L.bufpos
|
||||
buf = L.buf
|
||||
else:
|
||||
while buf[pos] notin {CR, LF, nimlexbase.EndOfFile}: inc(pos)
|
||||
while buf[pos] notin {CR, LF, nimlexbase.EndOfFile}: inc(pos)
|
||||
else:
|
||||
break # EndOfFile also leaves the loop
|
||||
L.bufpos = pos
|
||||
@@ -1051,7 +1025,7 @@ proc rawGetTok*(L: var TLexer, tok: var TToken) =
|
||||
if L.buf[L.bufpos] notin SymChars+{'_'} and not
|
||||
isMagicIdentSeparatorRune(L.buf, L.bufpos):
|
||||
tok.tokType = tkSymbol
|
||||
tok.ident = getIdent("_")
|
||||
tok.ident = L.cache.getIdent("_")
|
||||
else:
|
||||
tok.literal = $c
|
||||
tok.tokType = tkInvalid
|
||||
@@ -1084,5 +1058,3 @@ proc rawGetTok*(L: var TLexer, tok: var TToken) =
|
||||
tok.tokType = tkInvalid
|
||||
lexMessage(L, errInvalidToken, c & " (\\" & $(ord(c)) & ')')
|
||||
inc(L.bufpos)
|
||||
|
||||
dummyIdent = getIdent("")
|
||||
|
||||
@@ -284,7 +284,7 @@ proc addLocalVar(varSection, varInit: PNode; owner: PSym; typ: PType;
|
||||
varInit.add newFastAsgnStmt(newSymNode(result), v)
|
||||
else:
|
||||
let deepCopyCall = newNodeI(nkCall, varInit.info, 3)
|
||||
deepCopyCall.sons[0] = newSymNode(createMagic("deepCopy", mDeepCopy))
|
||||
deepCopyCall.sons[0] = newSymNode(getSysMagic("deepCopy", mDeepCopy))
|
||||
deepCopyCall.sons[1] = newSymNode(result)
|
||||
deepCopyCall.sons[2] = v
|
||||
varInit.add deepCopyCall
|
||||
@@ -356,7 +356,7 @@ proc createWrapperProc(f: PNode; threadParam, argsParam: PSym;
|
||||
if fk == fvGC: "data" else: "blob", fv.info), call)
|
||||
if fk == fvGC:
|
||||
let incRefCall = newNodeI(nkCall, fv.info, 2)
|
||||
incRefCall.sons[0] = newSymNode(createMagic("GCref", mGCref))
|
||||
incRefCall.sons[0] = newSymNode(getSysMagic("GCref", mGCref))
|
||||
incRefCall.sons[1] = indirectAccess(threadLocalProm.newSymNode,
|
||||
"data", fv.info)
|
||||
body.add incRefCall
|
||||
@@ -446,7 +446,7 @@ proc genHigh*(n: PNode): PNode =
|
||||
else:
|
||||
result = newNodeI(nkCall, n.info, 2)
|
||||
result.typ = getSysType(tyInt)
|
||||
result.sons[0] = newSymNode(createMagic("high", mHigh))
|
||||
result.sons[0] = newSymNode(getSysMagic("high", mHigh))
|
||||
result.sons[1] = n
|
||||
|
||||
proc setupArgsForParallelism(n: PNode; objType: PType; scratchObj: PSym;
|
||||
|
||||
@@ -38,6 +38,10 @@ proc getSysSym*(name: string): PSym =
|
||||
if result.kind == skStub: loadStub(result)
|
||||
if result.kind == skAlias: result = result.owner
|
||||
|
||||
proc createMagic*(name: string, m: TMagic): PSym =
|
||||
result = newSym(skProc, getIdent(name), nil, unknownLineInfo())
|
||||
result.magic = m
|
||||
|
||||
proc getSysMagic*(name: string, m: TMagic): PSym =
|
||||
var ti: TIdentIter
|
||||
let id = getIdent(name)
|
||||
|
||||
@@ -193,9 +193,7 @@ proc resetMemory =
|
||||
resetRopeCache()
|
||||
resetSysTypes()
|
||||
gOwners = @[]
|
||||
for i in low(buckets)..high(buckets):
|
||||
buckets[i] = nil
|
||||
idAnon = nil
|
||||
resetIdentCache()
|
||||
|
||||
# XXX: clean these global vars
|
||||
# ccgstmts.gBreakpoints
|
||||
@@ -235,7 +233,7 @@ const
|
||||
SimulateCaasMemReset = false
|
||||
PrintRopeCacheStats = false
|
||||
|
||||
proc mainCommand* =
|
||||
proc mainCommand*(cache: IdentCache) =
|
||||
when SimulateCaasMemReset:
|
||||
gGlobalOptions.incl(optCaasEnabled)
|
||||
|
||||
@@ -276,37 +274,37 @@ proc mainCommand* =
|
||||
of "doc":
|
||||
wantMainModule()
|
||||
gCmd = cmdDoc
|
||||
loadConfigs(DocConfig)
|
||||
loadConfigs(DocConfig, cache)
|
||||
commandDoc()
|
||||
of "doc2":
|
||||
gCmd = cmdDoc
|
||||
loadConfigs(DocConfig)
|
||||
loadConfigs(DocConfig, cache)
|
||||
defineSymbol("nimdoc")
|
||||
commandDoc2(false)
|
||||
of "rst2html":
|
||||
gCmd = cmdRst2html
|
||||
loadConfigs(DocConfig)
|
||||
loadConfigs(DocConfig, cache)
|
||||
commandRst2Html()
|
||||
of "rst2tex":
|
||||
gCmd = cmdRst2tex
|
||||
loadConfigs(DocTexConfig)
|
||||
loadConfigs(DocTexConfig, cache)
|
||||
commandRst2TeX()
|
||||
of "jsondoc":
|
||||
wantMainModule()
|
||||
gCmd = cmdDoc
|
||||
loadConfigs(DocConfig)
|
||||
loadConfigs(DocConfig, cache)
|
||||
wantMainModule()
|
||||
defineSymbol("nimdoc")
|
||||
commandJson()
|
||||
of "jsondoc2":
|
||||
gCmd = cmdDoc
|
||||
loadConfigs(DocConfig)
|
||||
loadConfigs(DocConfig, cache)
|
||||
wantMainModule()
|
||||
defineSymbol("nimdoc")
|
||||
commandDoc2(true)
|
||||
of "buildindex":
|
||||
gCmd = cmdDoc
|
||||
loadConfigs(DocConfig)
|
||||
loadConfigs(DocConfig, cache)
|
||||
commandBuildIndex()
|
||||
of "gendepend":
|
||||
gCmd = cmdGenDepend
|
||||
@@ -394,3 +392,5 @@ proc mainCommand* =
|
||||
resetMemory()
|
||||
|
||||
resetAttributes()
|
||||
|
||||
proc mainCommand*() = mainCommand(newIdentCache())
|
||||
|
||||
@@ -167,7 +167,7 @@ proc newModule(fileIdx: int32): PSym =
|
||||
# strTableIncl() for error corrections:
|
||||
discard strTableIncl(packSym.tab, result)
|
||||
|
||||
proc compileModule*(fileIdx: int32, flags: TSymFlags): PSym =
|
||||
proc compileModule*(fileIdx: int32; cache: IdentCache, flags: TSymFlags): PSym =
|
||||
result = getModule(fileIdx)
|
||||
if result == nil:
|
||||
growCache gMemCacheData, fileIdx
|
||||
@@ -180,7 +180,7 @@ proc compileModule*(fileIdx: int32, flags: TSymFlags): PSym =
|
||||
gMainPackageId = result.owner.id
|
||||
|
||||
if gCmd in {cmdCompileToC, cmdCompileToCpp, cmdCheck, cmdIdeTools}:
|
||||
rd = handleSymbolFile(result)
|
||||
rd = handleSymbolFile(result, cache)
|
||||
if result.id < 0:
|
||||
internalError("handleSymbolFile should have set the module\'s ID")
|
||||
return
|
||||
@@ -197,9 +197,9 @@ proc compileModule*(fileIdx: int32, flags: TSymFlags): PSym =
|
||||
else:
|
||||
result = gCompiledModules[fileIdx]
|
||||
|
||||
proc importModule*(s: PSym, fileIdx: int32): PSym {.procvar.} =
|
||||
proc importModule*(s: PSym, fileIdx: int32; cache: IdentCache): PSym {.procvar.} =
|
||||
# this is called by the semantic checking phase
|
||||
result = compileModule(fileIdx, {})
|
||||
result = compileModule(fileIdx, cache, {})
|
||||
if optCaasEnabled in gGlobalOptions: addDep(s, fileIdx)
|
||||
#if sfSystemModule in result.flags:
|
||||
# localError(result.info, errAttemptToRedefine, result.name.s)
|
||||
@@ -207,7 +207,7 @@ proc importModule*(s: PSym, fileIdx: int32): PSym {.procvar.} =
|
||||
gNotes = if s.owner.id == gMainPackageId: gMainPackageNotes
|
||||
else: ForeignPackageNotes
|
||||
|
||||
proc includeModule*(s: PSym, fileIdx: int32): PNode {.procvar.} =
|
||||
proc includeModule*(s: PSym, fileIdx: int32; cache: IdentCache): PNode {.procvar.} =
|
||||
result = syntaxes.parseFile(fileIdx)
|
||||
if optCaasEnabled in gGlobalOptions:
|
||||
growCache gMemCacheData, fileIdx
|
||||
|
||||
@@ -156,7 +156,7 @@ proc checkSymbol(L: TLexer, tok: TToken) =
|
||||
lexMessage(L, errIdentifierExpected, tokToStr(tok))
|
||||
|
||||
proc parseAssignment(L: var TLexer, tok: var TToken) =
|
||||
if tok.ident.id == getIdent("-").id or tok.ident.id == getIdent("--").id:
|
||||
if tok.ident.s == "-" or tok.ident.s == "--":
|
||||
confTok(L, tok) # skip unnecessary prefix
|
||||
var info = getLineInfo(L, tok) # save for later in case of an error
|
||||
checkSymbol(L, tok)
|
||||
@@ -179,14 +179,14 @@ proc parseAssignment(L: var TLexer, tok: var TToken) =
|
||||
if tok.tokType == tkBracketRi: confTok(L, tok)
|
||||
else: lexMessage(L, errTokenExpected, "']'")
|
||||
add(val, ']')
|
||||
let percent = tok.ident.id == getIdent("%=").id
|
||||
let percent = tok.ident.s == "%="
|
||||
if tok.tokType in {tkColon, tkEquals} or percent:
|
||||
if len(val) > 0: add(val, ':')
|
||||
confTok(L, tok) # skip ':' or '=' or '%'
|
||||
checkSymbol(L, tok)
|
||||
add(val, tokToStr(tok))
|
||||
confTok(L, tok) # skip symbol
|
||||
while tok.ident != nil and tok.ident.id == getIdent("&").id:
|
||||
while tok.ident != nil and tok.ident.s == "&":
|
||||
confTok(L, tok)
|
||||
checkSymbol(L, tok)
|
||||
add(val, tokToStr(tok))
|
||||
@@ -197,7 +197,7 @@ proc parseAssignment(L: var TLexer, tok: var TToken) =
|
||||
else:
|
||||
processSwitch(s, val, passPP, info)
|
||||
|
||||
proc readConfigFile(filename: string) =
|
||||
proc readConfigFile(filename: string; cache: IdentCache) =
|
||||
var
|
||||
L: TLexer
|
||||
tok: TToken
|
||||
@@ -205,7 +205,7 @@ proc readConfigFile(filename: string) =
|
||||
stream = llStreamOpen(filename, fmRead)
|
||||
if stream != nil:
|
||||
initToken(tok)
|
||||
openLexer(L, filename, stream)
|
||||
openLexer(L, filename, stream, cache)
|
||||
tok.tokType = tkEof # to avoid a pointless warning
|
||||
confTok(L, tok) # read in the first token
|
||||
while tok.tokType != tkEof: parseAssignment(L, tok)
|
||||
@@ -225,22 +225,22 @@ proc getSystemConfigPath(filename: string): string =
|
||||
if not existsFile(result): result = joinPath([p, "etc", filename])
|
||||
if not existsFile(result): result = "/etc/" & filename
|
||||
|
||||
proc loadConfigs*(cfg: string) =
|
||||
proc loadConfigs*(cfg: string; cache: IdentCache) =
|
||||
setDefaultLibpath()
|
||||
|
||||
if optSkipConfigFile notin gGlobalOptions:
|
||||
readConfigFile(getSystemConfigPath(cfg))
|
||||
readConfigFile(getSystemConfigPath(cfg), cache)
|
||||
|
||||
if optSkipUserConfigFile notin gGlobalOptions:
|
||||
readConfigFile(getUserConfigPath(cfg))
|
||||
readConfigFile(getUserConfigPath(cfg), cache)
|
||||
|
||||
var pd = if gProjectPath.len > 0: gProjectPath else: getCurrentDir()
|
||||
if optSkipParentConfigFiles notin gGlobalOptions:
|
||||
for dir in parentDirs(pd, fromRoot=true, inclusive=false):
|
||||
readConfigFile(dir / cfg)
|
||||
readConfigFile(dir / cfg, cache)
|
||||
|
||||
if optSkipProjConfigFile notin gGlobalOptions:
|
||||
readConfigFile(pd / cfg)
|
||||
readConfigFile(pd / cfg, cache)
|
||||
|
||||
if gProjectName.len != 0:
|
||||
# new project wide config file:
|
||||
@@ -251,4 +251,8 @@ proc loadConfigs*(cfg: string) =
|
||||
projectConfig = changeFileExt(gProjectFull, "nimrod.cfg")
|
||||
if fileExists(projectConfig):
|
||||
rawMessage(warnDeprecated, projectConfig)
|
||||
readConfigFile(projectConfig)
|
||||
readConfigFile(projectConfig, cache)
|
||||
|
||||
proc loadConfigs*(cfg: string) =
|
||||
# for backwards compatibility only.
|
||||
loadConfigs(cfg, newIdentCache())
|
||||
|
||||
@@ -73,18 +73,20 @@ proc getTok(p: var TParser) =
|
||||
rawGetTok(p.lex, p.tok)
|
||||
|
||||
proc openParser*(p: var TParser, fileIdx: int32, inputStream: PLLStream,
|
||||
cache: IdentCache;
|
||||
strongSpaces=false) =
|
||||
## Open a parser, using the given arguments to set up its internal state.
|
||||
##
|
||||
initToken(p.tok)
|
||||
openLexer(p.lex, fileIdx, inputStream)
|
||||
openLexer(p.lex, fileIdx, inputStream, cache)
|
||||
getTok(p) # read the first token
|
||||
p.firstTok = true
|
||||
p.strongSpaces = strongSpaces
|
||||
|
||||
proc openParser*(p: var TParser, filename: string, inputStream: PLLStream,
|
||||
cache: IdentCache;
|
||||
strongSpaces=false) =
|
||||
openParser(p, filename.fileInfoIdx, inputStream, strongSpaces)
|
||||
openParser(p, filename.fileInfoIdx, inputStream, cache, strongSpaces)
|
||||
|
||||
proc closeParser(p: var TParser) =
|
||||
## Close a parser, freeing up its resources.
|
||||
@@ -320,9 +322,9 @@ proc parseSymbol(p: var TParser, allowNil = false): PNode =
|
||||
tkParLe..tkParDotRi}:
|
||||
accm.add(tokToStr(p.tok))
|
||||
getTok(p)
|
||||
result.add(newIdentNodeP(getIdent(accm), p))
|
||||
result.add(newIdentNodeP(p.lex.cache.getIdent(accm), p))
|
||||
of tokKeywordLow..tokKeywordHigh, tkSymbol, tkIntLit..tkCharLit:
|
||||
result.add(newIdentNodeP(getIdent(tokToStr(p.tok)), p))
|
||||
result.add(newIdentNodeP(p.lex.cache.getIdent(tokToStr(p.tok)), p))
|
||||
getTok(p)
|
||||
else:
|
||||
parMessage(p, errIdentifierExpected, p.tok)
|
||||
@@ -923,7 +925,7 @@ proc parseParamList(p: var TParser, retColon = true): PNode =
|
||||
optPar(p)
|
||||
eat(p, tkParRi)
|
||||
let hasRet = if retColon: p.tok.tokType == tkColon
|
||||
else: p.tok.tokType == tkOpr and identEq(p.tok.ident, "->")
|
||||
else: p.tok.tokType == tkOpr and p.tok.ident.s == "->"
|
||||
if hasRet and p.tok.indent < 0:
|
||||
getTok(p)
|
||||
optInd(p, result)
|
||||
@@ -2023,7 +2025,8 @@ proc parseTopLevelStmt(p: var TParser): PNode =
|
||||
if result.kind == nkEmpty: parMessage(p, errExprExpected, p.tok)
|
||||
break
|
||||
|
||||
proc parseString*(s: string; filename: string = ""; line: int = 0;
|
||||
proc parseString*(s: string; cache: IdentCache; filename: string = "";
|
||||
line: int = 0;
|
||||
errorHandler: TErrorHandler = nil): PNode =
|
||||
## Parses a string into an AST, returning the top node.
|
||||
## `filename` and `line`, although optional, provide info so that the
|
||||
@@ -2036,7 +2039,7 @@ proc parseString*(s: string; filename: string = ""; line: int = 0;
|
||||
# XXX for now the builtin 'parseStmt/Expr' functions do not know about strong
|
||||
# spaces...
|
||||
parser.lex.errorHandler = errorHandler
|
||||
openParser(parser, filename, stream, false)
|
||||
openParser(parser, filename, stream, cache, false)
|
||||
|
||||
result = parser.parseAll
|
||||
closeParser(parser)
|
||||
|
||||
@@ -21,7 +21,7 @@ type
|
||||
|
||||
PPassContext* = ref TPassContext
|
||||
|
||||
TPassOpen* = proc (module: PSym): PPassContext {.nimcall.}
|
||||
TPassOpen* = proc (module: PSym; cache: IdentCache): PPassContext {.nimcall.}
|
||||
TPassOpenCached* =
|
||||
proc (module: PSym, rd: PRodReader): PPassContext {.nimcall.}
|
||||
TPassClose* = proc (p: PPassContext, n: PNode): PNode {.nimcall.}
|
||||
@@ -48,8 +48,8 @@ proc makePass*(open: TPassOpen = nil,
|
||||
|
||||
# the semantic checker needs these:
|
||||
var
|
||||
gImportModule*: proc (m: PSym, fileIdx: int32): PSym {.nimcall.}
|
||||
gIncludeFile*: proc (m: PSym, fileIdx: int32): PNode {.nimcall.}
|
||||
gImportModule*: proc (m: PSym, fileIdx: int32; cache: IdentCache): PSym {.nimcall.}
|
||||
gIncludeFile*: proc (m: PSym, fileIdx: int32; cache: IdentCache): PNode {.nimcall.}
|
||||
|
||||
# implementation
|
||||
|
||||
@@ -90,22 +90,23 @@ proc registerPass*(p: TPass) =
|
||||
gPasses[gPassesLen] = p
|
||||
inc(gPassesLen)
|
||||
|
||||
proc carryPass*(p: TPass, module: PSym, m: TPassData): TPassData =
|
||||
var c = p.open(module)
|
||||
proc carryPass*(p: TPass, module: PSym; cache: IdentCache;
|
||||
m: TPassData): TPassData =
|
||||
var c = p.open(module, cache)
|
||||
result.input = p.process(c, m.input)
|
||||
result.closeOutput = if p.close != nil: p.close(c, m.closeOutput)
|
||||
else: m.closeOutput
|
||||
|
||||
proc carryPasses*(nodes: PNode, module: PSym, passes: TPasses) =
|
||||
proc carryPasses*(nodes: PNode, module: PSym; cache: IdentCache; passes: TPasses) =
|
||||
var passdata: TPassData
|
||||
passdata.input = nodes
|
||||
for pass in passes:
|
||||
passdata = carryPass(pass, module, passdata)
|
||||
passdata = carryPass(pass, module, cache, passdata)
|
||||
|
||||
proc openPasses(a: var TPassContextArray, module: PSym) =
|
||||
proc openPasses(a: var TPassContextArray, module: PSym; cache: IdentCache) =
|
||||
for i in countup(0, gPassesLen - 1):
|
||||
if not isNil(gPasses[i].open):
|
||||
a[i] = gPasses[i].open(module)
|
||||
a[i] = gPasses[i].open(module, cache)
|
||||
else: a[i] = nil
|
||||
|
||||
proc openPassesCached(a: var TPassContextArray, module: PSym, rd: PRodReader) =
|
||||
@@ -155,14 +156,14 @@ proc processImplicits(implicits: seq[string], nodeKind: TNodeKind,
|
||||
if not processTopLevelStmt(importStmt, a): break
|
||||
|
||||
proc processModule*(module: PSym, stream: PLLStream,
|
||||
rd: PRodReader): bool {.discardable.} =
|
||||
rd: PRodReader; cache: IdentCache): bool {.discardable.} =
|
||||
var
|
||||
p: TParsers
|
||||
a: TPassContextArray
|
||||
s: PLLStream
|
||||
fileIdx = module.fileIdx
|
||||
if rd == nil:
|
||||
openPasses(a, module)
|
||||
openPasses(a, module, cache)
|
||||
if stream == nil:
|
||||
let filename = fileIdx.toFullPathConsiderDirty
|
||||
s = llStreamOpen(filename, fmRead)
|
||||
@@ -172,7 +173,7 @@ proc processModule*(module: PSym, stream: PLLStream,
|
||||
else:
|
||||
s = stream
|
||||
while true:
|
||||
openParsers(p, fileIdx, s)
|
||||
openParsers(p, fileIdx, s, cache)
|
||||
|
||||
if sfSystemModule notin module.flags:
|
||||
# XXX what about caching? no processing then? what if I change the
|
||||
|
||||
@@ -19,16 +19,18 @@ proc getTok(p: var TParser) =
|
||||
## `tok` member.
|
||||
rawGetTok(p.lex, p.tok)
|
||||
|
||||
proc openParser*(p: var TParser, fileIdx: int32, inputStream: PLLStream) =
|
||||
proc openParser*(p: var TParser, fileIdx: int32, inputStream: PLLStream;
|
||||
cache: IdentCache) =
|
||||
## Open a parser, using the given arguments to set up its internal state.
|
||||
##
|
||||
initToken(p.tok)
|
||||
openLexer(p.lex, fileIdx, inputStream)
|
||||
openLexer(p.lex, fileIdx, inputStream, cache)
|
||||
getTok(p) # read the first token
|
||||
p.lex.allowTabs = true
|
||||
|
||||
proc openParser*(p: var TParser, filename: string, inputStream: PLLStream) =
|
||||
openParser(p, filename.fileInfoIdx, inputStream)
|
||||
proc openParser*(p: var TParser, filename: string, inputStream: PLLStream;
|
||||
cache: IdentCache) =
|
||||
openParser(p, filename.fileInfoIdx, inputStream, cache)
|
||||
|
||||
proc closeParser*(p: var TParser) =
|
||||
## Close a parser, freeing up its resources.
|
||||
@@ -232,9 +234,9 @@ proc parseSymbol(p: var TParser, allowNil = false): PNode =
|
||||
tkParLe..tkParDotRi}:
|
||||
accm.add(tokToStr(p.tok))
|
||||
getTok(p)
|
||||
result.add(newIdentNodeP(getIdent(accm), p))
|
||||
result.add(newIdentNodeP(p.lex.cache.getIdent(accm), p))
|
||||
of tokKeywordLow..tokKeywordHigh, tkSymbol, tkIntLit..tkCharLit:
|
||||
result.add(newIdentNodeP(getIdent(tokToStr(p.tok)), p))
|
||||
result.add(newIdentNodeP(p.lex.cache.getIdent(tokToStr(p.tok)), p))
|
||||
getTok(p)
|
||||
else:
|
||||
parMessage(p, errIdentifierExpected, p.tok)
|
||||
@@ -819,7 +821,7 @@ proc parseParamList(p: var TParser, retColon = true): PNode =
|
||||
optPar(p)
|
||||
eat(p, tkParRi)
|
||||
let hasRet = if retColon: p.tok.tokType == tkColon
|
||||
else: p.tok.tokType == tkOpr and identEq(p.tok.ident, "->")
|
||||
else: p.tok.tokType == tkOpr and p.tok.ident.s == "->"
|
||||
if hasRet and p.tok.indent < 0:
|
||||
getTok(p)
|
||||
optInd(p, result)
|
||||
|
||||
@@ -54,8 +54,6 @@ proc isKeyword*(i: PIdent): bool =
|
||||
(i.id <= ord(tokKeywordHigh) - ord(tkSymbol)):
|
||||
result = true
|
||||
|
||||
proc isKeyword*(s: string): bool = isKeyword(getIdent(s))
|
||||
|
||||
proc renderDefinitionName*(s: PSym, noQuotes = false): string =
|
||||
## Returns the definition name of the symbol.
|
||||
##
|
||||
|
||||
@@ -142,6 +142,7 @@ type
|
||||
methods*: TSymSeq
|
||||
origFile: string
|
||||
inViewMode: bool
|
||||
cache: IdentCache
|
||||
|
||||
PRodReader* = ref TRodReader
|
||||
|
||||
@@ -219,7 +220,7 @@ proc decodeNodeLazyBody(r: PRodReader, fInfo: TLineInfo,
|
||||
if r.s[r.pos] == '!':
|
||||
inc(r.pos)
|
||||
var fl = decodeStr(r.s, r.pos)
|
||||
result.ident = getIdent(fl)
|
||||
result.ident = r.cache.getIdent(fl)
|
||||
else:
|
||||
internalError(result.info, "decodeNode: nkIdent")
|
||||
of nkSym:
|
||||
@@ -401,7 +402,7 @@ proc decodeSym(r: PRodReader, info: TLineInfo): PSym =
|
||||
internalError(info, "decodeSym: no id")
|
||||
if r.s[r.pos] == '&':
|
||||
inc(r.pos)
|
||||
ident = getIdent(decodeStr(r.s, r.pos))
|
||||
ident = r.cache.getIdent(decodeStr(r.s, r.pos))
|
||||
else:
|
||||
internalError(info, "decodeSym: no ident")
|
||||
#echo "decoding: {", ident.s
|
||||
@@ -519,7 +520,7 @@ proc newStub(r: PRodReader, name: string, id: int): PSym =
|
||||
new(result)
|
||||
result.kind = skStub
|
||||
result.id = id
|
||||
result.name = getIdent(name)
|
||||
result.name = r.cache.getIdent(name)
|
||||
result.position = r.readerIndex
|
||||
setId(id) #MessageOut(result.name.s);
|
||||
if debugIds: registerID(result)
|
||||
@@ -632,7 +633,7 @@ proc processRodFile(r: PRodReader, hash: SecureHash) =
|
||||
while r.s[r.pos] > '\x0A':
|
||||
w = decodeStr(r.s, r.pos)
|
||||
inc(d)
|
||||
if not condsyms.isDefined(getIdent(w)):
|
||||
if not condsyms.isDefined(r.cache.getIdent(w)):
|
||||
r.reason = rrDefines #MessageOut('not defined, but should: ' + w);
|
||||
if r.s[r.pos] == ' ': inc(r.pos)
|
||||
if d != countDefinedSymbols(): r.reason = rrDefines
|
||||
@@ -707,8 +708,9 @@ proc startsWith(buf: cstring, token: string, pos = 0): bool =
|
||||
result = s == token.len
|
||||
|
||||
proc newRodReader(modfilename: string, hash: SecureHash,
|
||||
readerIndex: int): PRodReader =
|
||||
readerIndex: int; cache: IdentCache): PRodReader =
|
||||
new(result)
|
||||
result.cache = cache
|
||||
try:
|
||||
result.memfile = memfiles.open(modfilename)
|
||||
except OSError:
|
||||
@@ -866,7 +868,7 @@ proc getHash*(fileIdx: int32): SecureHash =
|
||||
template growCache*(cache, pos) =
|
||||
if cache.len <= pos: cache.setLen(pos+1)
|
||||
|
||||
proc checkDep(fileIdx: int32): TReasonForRecompile =
|
||||
proc checkDep(fileIdx: int32; cache: IdentCache): TReasonForRecompile =
|
||||
assert fileIdx != InvalidFileIDX
|
||||
growCache gMods, fileIdx
|
||||
if gMods[fileIdx].reason != rrEmpty:
|
||||
@@ -877,7 +879,7 @@ proc checkDep(fileIdx: int32): TReasonForRecompile =
|
||||
gMods[fileIdx].reason = rrNone # we need to set it here to avoid cycles
|
||||
result = rrNone
|
||||
var rodfile = toGeneratedFile(filename.withPackageName, RodExt)
|
||||
var r = newRodReader(rodfile, hash, fileIdx)
|
||||
var r = newRodReader(rodfile, hash, fileIdx, cache)
|
||||
if r == nil:
|
||||
result = (if existsFile(rodfile): rrRodInvalid else: rrRodDoesNotExist)
|
||||
else:
|
||||
@@ -888,10 +890,10 @@ proc checkDep(fileIdx: int32): TReasonForRecompile =
|
||||
# NOTE: we need to process the entire module graph so that no ID will
|
||||
# be used twice! However, compilation speed does not suffer much from
|
||||
# this, since results are cached.
|
||||
var res = checkDep(systemFileIdx)
|
||||
var res = checkDep(systemFileIdx, cache)
|
||||
if res != rrNone: result = rrModDeps
|
||||
for i in countup(0, high(r.modDeps)):
|
||||
res = checkDep(r.modDeps[i])
|
||||
res = checkDep(r.modDeps[i], cache)
|
||||
if res != rrNone:
|
||||
result = rrModDeps
|
||||
# we cannot break here, because of side-effects of `checkDep`
|
||||
@@ -904,14 +906,14 @@ proc checkDep(fileIdx: int32): TReasonForRecompile =
|
||||
gMods[fileIdx].rd = r
|
||||
gMods[fileIdx].reason = result # now we know better
|
||||
|
||||
proc handleSymbolFile*(module: PSym): PRodReader =
|
||||
proc handleSymbolFile*(module: PSym; cache: IdentCache): PRodReader =
|
||||
let fileIdx = module.fileIdx
|
||||
if optSymbolFiles notin gGlobalOptions:
|
||||
module.id = getID()
|
||||
return nil
|
||||
idgen.loadMaxIds(options.gProjectPath / options.gProjectName)
|
||||
|
||||
discard checkDep(fileIdx)
|
||||
discard checkDep(fileIdx, cache)
|
||||
if gMods[fileIdx].reason == rrEmpty: internalError("handleSymbolFile")
|
||||
result = gMods[fileIdx].rd
|
||||
if result != nil:
|
||||
@@ -1078,7 +1080,7 @@ proc writeType(f: File; t: PType) =
|
||||
f.write("]\n")
|
||||
|
||||
proc viewFile(rodfile: string) =
|
||||
var r = newRodReader(rodfile, secureHash(""), 0)
|
||||
var r = newRodReader(rodfile, secureHash(""), 0, newIdentCache())
|
||||
if r == nil:
|
||||
rawMessage(errGenerated, "cannot open file (or maybe wrong version):" &
|
||||
rodfile)
|
||||
|
||||
@@ -34,6 +34,7 @@ type
|
||||
tstack: TTypeSeq # a stack of types to process
|
||||
files: TStringSeq
|
||||
origFile: string
|
||||
cache: IdentCache
|
||||
|
||||
PRodWriter = ref TRodWriter
|
||||
|
||||
@@ -54,7 +55,7 @@ proc fileIdx(w: PRodWriter, filename: string): int =
|
||||
template filename*(w: PRodWriter): string =
|
||||
w.module.filename
|
||||
|
||||
proc newRodWriter(hash: SecureHash, module: PSym): PRodWriter =
|
||||
proc newRodWriter(hash: SecureHash, module: PSym; cache: IdentCache): PRodWriter =
|
||||
new(result)
|
||||
result.sstack = @[]
|
||||
result.tstack = @[]
|
||||
@@ -76,6 +77,7 @@ proc newRodWriter(hash: SecureHash, module: PSym): PRodWriter =
|
||||
result.init = ""
|
||||
result.origFile = module.info.toFullPath
|
||||
result.data = newStringOfCap(12_000)
|
||||
result.cache = cache
|
||||
|
||||
proc addModDep(w: PRodWriter, dep: string; info: TLineInfo) =
|
||||
if w.modDeps.len != 0: add(w.modDeps, ' ')
|
||||
@@ -621,9 +623,9 @@ proc process(c: PPassContext, n: PNode): PNode =
|
||||
else:
|
||||
discard
|
||||
|
||||
proc myOpen(module: PSym): PPassContext =
|
||||
proc myOpen(module: PSym; cache: IdentCache): PPassContext =
|
||||
if module.id < 0: internalError("rodwrite: module ID not set")
|
||||
var w = newRodWriter(module.fileIdx.getHash, module)
|
||||
var w = newRodWriter(module.fileIdx.getHash, module, cache)
|
||||
rawAddInterfaceSym(w, module)
|
||||
result = w
|
||||
|
||||
|
||||
@@ -272,7 +272,7 @@ proc tryConstExpr(c: PContext, n: PNode): PNode =
|
||||
msgs.gErrorMax = high(int)
|
||||
|
||||
try:
|
||||
result = evalConstExpr(c.module, e)
|
||||
result = evalConstExpr(c.module, c.cache, e)
|
||||
if result == nil or result.kind == nkEmpty:
|
||||
result = nil
|
||||
else:
|
||||
@@ -293,7 +293,7 @@ proc semConstExpr(c: PContext, n: PNode): PNode =
|
||||
result = getConstExpr(c.module, e)
|
||||
if result == nil:
|
||||
#if e.kind == nkEmpty: globalError(n.info, errConstExprExpected)
|
||||
result = evalConstExpr(c.module, e)
|
||||
result = evalConstExpr(c.module, c.cache, e)
|
||||
if result == nil or result.kind == nkEmpty:
|
||||
if e.info != n.info:
|
||||
pushInfoContext(n.info)
|
||||
@@ -364,7 +364,7 @@ proc semMacroExpr(c: PContext, n, nOrig: PNode, sym: PSym,
|
||||
|
||||
#if c.evalContext == nil:
|
||||
# c.evalContext = c.createEvalContext(emStatic)
|
||||
result = evalMacroCall(c.module, n, nOrig, sym)
|
||||
result = evalMacroCall(c.module, c.cache, n, nOrig, sym)
|
||||
if efNoSemCheck notin flags:
|
||||
result = semAfterMacroCall(c, result, sym, flags)
|
||||
popInfoContext()
|
||||
|
||||
@@ -106,6 +106,7 @@ type
|
||||
instTypeBoundOp*: proc (c: PContext; dc: PSym; t: PType; info: TLineInfo;
|
||||
op: TTypeAttachedOp; col: int): PSym {.nimcall.}
|
||||
selfName*: PIdent
|
||||
cache*: IdentCache
|
||||
signatures*: TStrTable
|
||||
|
||||
proc makeInstPair*(s: PSym, inst: PInstantiation): TInstantiationPair =
|
||||
@@ -116,29 +117,13 @@ proc filename*(c: PContext): string =
|
||||
# the module's filename
|
||||
return c.module.filename
|
||||
|
||||
proc newContext*(module: PSym): PContext
|
||||
|
||||
proc lastOptionEntry*(c: PContext): POptionEntry
|
||||
proc newOptionEntry*(): POptionEntry
|
||||
proc newLib*(kind: TLibKind): PLib
|
||||
proc addToLib*(lib: PLib, sym: PSym)
|
||||
proc makePtrType*(c: PContext, baseType: PType): PType
|
||||
proc newTypeS*(kind: TTypeKind, c: PContext): PType
|
||||
proc fillTypeS*(dest: PType, kind: TTypeKind, c: PContext)
|
||||
|
||||
proc scopeDepth*(c: PContext): int {.inline.} =
|
||||
result = if c.currentScope != nil: c.currentScope.depthLevel
|
||||
else: 0
|
||||
|
||||
# owner handling:
|
||||
proc getCurrOwner*(): PSym
|
||||
proc pushOwner*(owner: PSym)
|
||||
proc popOwner*()
|
||||
# implementation
|
||||
|
||||
var gOwners*: seq[PSym] = @[]
|
||||
|
||||
proc getCurrOwner(): PSym =
|
||||
proc getCurrOwner*(): PSym =
|
||||
# owner stack (used for initializing the
|
||||
# owner field of syms)
|
||||
# the documentation comment always gets
|
||||
@@ -146,27 +131,27 @@ proc getCurrOwner(): PSym =
|
||||
# BUGFIX: global array is needed!
|
||||
result = gOwners[high(gOwners)]
|
||||
|
||||
proc pushOwner(owner: PSym) =
|
||||
proc pushOwner*(owner: PSym) =
|
||||
add(gOwners, owner)
|
||||
|
||||
proc popOwner() =
|
||||
proc popOwner*() =
|
||||
var length = len(gOwners)
|
||||
if length > 0: setLen(gOwners, length - 1)
|
||||
else: internalError("popOwner")
|
||||
|
||||
proc lastOptionEntry(c: PContext): POptionEntry =
|
||||
proc lastOptionEntry*(c: PContext): POptionEntry =
|
||||
result = POptionEntry(c.optionStack.tail)
|
||||
|
||||
proc popProcCon*(c: PContext) {.inline.} = c.p = c.p.next
|
||||
|
||||
proc newOptionEntry(): POptionEntry =
|
||||
proc newOptionEntry*(): POptionEntry =
|
||||
new(result)
|
||||
result.options = gOptions
|
||||
result.defaultCC = ccDefault
|
||||
result.dynlib = nil
|
||||
result.notes = gNotes
|
||||
|
||||
proc newContext(module: PSym): PContext =
|
||||
proc newContext*(module: PSym; cache: IdentCache): PContext =
|
||||
new(result)
|
||||
result.ambiguousSymbols = initIntSet()
|
||||
initLinkedList(result.optionStack)
|
||||
@@ -180,6 +165,7 @@ proc newContext(module: PSym): PContext =
|
||||
initStrTable(result.userPragmas)
|
||||
result.generics = @[]
|
||||
result.unknownIdents = initIntSet()
|
||||
result.cache = cache
|
||||
initStrTable(result.signatures)
|
||||
|
||||
|
||||
@@ -196,16 +182,19 @@ proc addConverter*(c: PContext, conv: PSym) =
|
||||
proc addPattern*(c: PContext, p: PSym) =
|
||||
inclSym(c.patterns, p)
|
||||
|
||||
proc newLib(kind: TLibKind): PLib =
|
||||
proc newLib*(kind: TLibKind): PLib =
|
||||
new(result)
|
||||
result.kind = kind #initObjectSet(result.syms)
|
||||
|
||||
proc addToLib(lib: PLib, sym: PSym) =
|
||||
proc addToLib*(lib: PLib, sym: PSym) =
|
||||
#if sym.annex != nil and not isGenericRoutine(sym):
|
||||
# LocalError(sym.info, errInvalidPragma)
|
||||
sym.annex = lib
|
||||
|
||||
proc makePtrType(c: PContext, baseType: PType): PType =
|
||||
proc newTypeS*(kind: TTypeKind, c: PContext): PType =
|
||||
result = newType(kind, getCurrOwner())
|
||||
|
||||
proc makePtrType*(c: PContext, baseType: PType): PType =
|
||||
result = newTypeS(tyPtr, c)
|
||||
addSonSkipIntLit(result, baseType.assertNotNil)
|
||||
|
||||
@@ -222,7 +211,7 @@ proc makeTypeDesc*(c: PContext, typ: PType): PType =
|
||||
|
||||
proc makeTypeSymNode*(c: PContext, typ: PType, info: TLineInfo): PNode =
|
||||
let typedesc = makeTypeDesc(c, typ)
|
||||
let sym = newSym(skType, idAnon, getCurrOwner(), info).linkTo(typedesc)
|
||||
let sym = newSym(skType, c.cache.idAnon, getCurrOwner(), info).linkTo(typedesc)
|
||||
return newSymNode(sym, info)
|
||||
|
||||
proc makeTypeFromExpr*(c: PContext, n: PNode): PType =
|
||||
@@ -284,9 +273,6 @@ template rangeHasStaticIf*(t: PType): bool =
|
||||
template getStaticTypeFromRange*(t: PType): PType =
|
||||
t.n[1][0][1].typ
|
||||
|
||||
proc newTypeS(kind: TTypeKind, c: PContext): PType =
|
||||
result = newType(kind, getCurrOwner())
|
||||
|
||||
proc errorType*(c: PContext): PType =
|
||||
## creates a type representing an error state
|
||||
result = newTypeS(tyError, c)
|
||||
@@ -295,7 +281,7 @@ proc errorNode*(c: PContext, n: PNode): PNode =
|
||||
result = newNodeI(nkEmpty, n.info)
|
||||
result.typ = errorType(c)
|
||||
|
||||
proc fillTypeS(dest: PType, kind: TTypeKind, c: PContext) =
|
||||
proc fillTypeS*(dest: PType, kind: TTypeKind, c: PContext) =
|
||||
dest.kind = kind
|
||||
dest.owner = getCurrOwner()
|
||||
dest.size = - 1
|
||||
|
||||
@@ -607,12 +607,12 @@ proc evalAtCompileTime(c: PContext, n: PNode): PNode =
|
||||
call.add(a)
|
||||
#echo "NOW evaluating at compile time: ", call.renderTree
|
||||
if sfCompileTime in callee.flags:
|
||||
result = evalStaticExpr(c.module, call, c.p.owner)
|
||||
result = evalStaticExpr(c.module, c.cache, call, c.p.owner)
|
||||
if result.isNil:
|
||||
localError(n.info, errCannotInterpretNodeX, renderTree(call))
|
||||
else: result = fixupTypeAfterEval(c, result, n)
|
||||
else:
|
||||
result = evalConstExpr(c.module, call)
|
||||
result = evalConstExpr(c.module, c.cache, call)
|
||||
if result.isNil: result = n
|
||||
else: result = fixupTypeAfterEval(c, result, n)
|
||||
#if result != n:
|
||||
@@ -1574,9 +1574,9 @@ proc getMagicSym(magic: TMagic): PSym =
|
||||
result = newSym(skProc, getIdent($magic), systemModule, gCodegenLineInfo)
|
||||
result.magic = magic
|
||||
|
||||
proc newAnonSym(kind: TSymKind, info: TLineInfo,
|
||||
proc newAnonSym(c: PContext; kind: TSymKind, info: TLineInfo,
|
||||
owner = getCurrOwner()): PSym =
|
||||
result = newSym(kind, idAnon, owner, info)
|
||||
result = newSym(kind, c.cache.idAnon, owner, info)
|
||||
result.flags = {sfGenSym}
|
||||
|
||||
proc semExpandToAst(c: PContext, n: PNode): PNode =
|
||||
@@ -1648,7 +1648,7 @@ proc semQuoteAst(c: PContext, n: PNode): PNode =
|
||||
|
||||
processQuotations(doBlk.sons[bodyPos], op, quotes, ids)
|
||||
|
||||
doBlk.sons[namePos] = newAnonSym(skTemplate, n.info).newSymNode
|
||||
doBlk.sons[namePos] = newAnonSym(c, skTemplate, n.info).newSymNode
|
||||
if ids.len > 0:
|
||||
doBlk.sons[paramsPos] = newNodeI(nkFormalParams, n.info)
|
||||
doBlk[paramsPos].add getSysSym("stmt").newSymNode # return type
|
||||
|
||||
@@ -524,7 +524,7 @@ proc semVarOrLet(c: PContext, n: PNode, symkind: TSymKind): PNode =
|
||||
addDefer(c, result, v)
|
||||
checkNilable(v)
|
||||
if sfCompileTime in v.flags: hasCompileTime = true
|
||||
if hasCompileTime: vm.setupCompileTimeVar(c.module, result)
|
||||
if hasCompileTime: vm.setupCompileTimeVar(c.module, c.cache, result)
|
||||
|
||||
proc semConst(c: PContext, n: PNode): PNode =
|
||||
result = copyNode(n)
|
||||
@@ -820,7 +820,7 @@ proc semAllTypeSections(c: PContext; n: PNode): PNode =
|
||||
if containsOrIncl(c.includedFiles, f):
|
||||
localError(n.info, errRecursiveDependencyX, f.toFilename)
|
||||
else:
|
||||
let code = gIncludeFile(c.module, f)
|
||||
let code = gIncludeFile(c.module, f, c.cache)
|
||||
gatherStmts c, code, result
|
||||
excl(c.includedFiles, f)
|
||||
of nkStmtList:
|
||||
@@ -922,7 +922,7 @@ proc semProcAnnotation(c: PContext, prc: PNode;
|
||||
if m == nil:
|
||||
if key.kind == nkIdent and key.ident.id == ord(wDelegator):
|
||||
if considerQuotedIdent(prc.sons[namePos]).s == "()":
|
||||
prc.sons[namePos] = newIdentNode(idDelegator, prc.info)
|
||||
prc.sons[namePos] = newIdentNode(c.cache.idDelegator, prc.info)
|
||||
prc.sons[pragmasPos] = copyExcept(n, i)
|
||||
else:
|
||||
localError(prc.info, errOnlyACallOpCanBeDelegator)
|
||||
@@ -965,7 +965,7 @@ proc semLambda(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
checkSonsLen(n, bodyPos + 1)
|
||||
var s: PSym
|
||||
if n[namePos].kind != nkSym:
|
||||
s = newSym(skProc, idAnon, getCurrOwner(), n.info)
|
||||
s = newSym(skProc, c.cache.idAnon, getCurrOwner(), n.info)
|
||||
s.ast = n
|
||||
n.sons[namePos] = newSymNode(s)
|
||||
else:
|
||||
@@ -1159,7 +1159,7 @@ proc semProcAux(c: PContext, n: PNode, kind: TSymKind,
|
||||
assert phase == stepRegisterSymbol
|
||||
|
||||
if n[namePos].kind == nkEmpty:
|
||||
s = newSym(kind, idAnon, getCurrOwner(), n.info)
|
||||
s = newSym(kind, c.cache.idAnon, getCurrOwner(), n.info)
|
||||
incl(s.flags, sfUsed)
|
||||
isAnon = true
|
||||
else:
|
||||
@@ -1418,7 +1418,7 @@ proc evalInclude(c: PContext, n: PNode): PNode =
|
||||
if containsOrIncl(c.includedFiles, f):
|
||||
localError(n.info, errRecursiveDependencyX, f.toFilename)
|
||||
else:
|
||||
addSon(result, semStmt(c, gIncludeFile(c.module, f)))
|
||||
addSon(result, semStmt(c, gIncludeFile(c.module, f, c.cache)))
|
||||
excl(c.includedFiles, f)
|
||||
|
||||
proc setLine(n: PNode, info: TLineInfo) =
|
||||
@@ -1445,7 +1445,7 @@ proc semStaticStmt(c: PContext, n: PNode): PNode =
|
||||
#writeStackTrace()
|
||||
let a = semStmt(c, n.sons[0])
|
||||
n.sons[0] = a
|
||||
evalStaticStmt(c.module, a, c.p.owner)
|
||||
evalStaticStmt(c.module, c.cache, a, c.p.owner)
|
||||
result = newNodeI(nkDiscardStmt, n.info, 1)
|
||||
result.sons[0] = emptyNode
|
||||
when false:
|
||||
|
||||
@@ -30,30 +30,7 @@ type
|
||||
skin*: TParserKind
|
||||
parser*: TParser
|
||||
|
||||
|
||||
proc parseFile*(fileIdx: int32): PNode{.procvar.}
|
||||
proc openParsers*(p: var TParsers, fileIdx: int32, inputstream: PLLStream)
|
||||
proc closeParsers*(p: var TParsers)
|
||||
proc parseAll*(p: var TParsers): PNode
|
||||
proc parseTopLevelStmt*(p: var TParsers): PNode
|
||||
# implements an iterator. Returns the next top-level statement or nil if end
|
||||
# of stream.
|
||||
|
||||
# implementation
|
||||
|
||||
proc parseFile(fileIdx: int32): PNode =
|
||||
var
|
||||
p: TParsers
|
||||
f: File
|
||||
let filename = fileIdx.toFullPathConsiderDirty
|
||||
if not open(f, filename):
|
||||
rawMessage(errCannotOpenFile, filename)
|
||||
return
|
||||
openParsers(p, fileIdx, llStreamOpen(f))
|
||||
result = parseAll(p)
|
||||
closeParsers(p)
|
||||
|
||||
proc parseAll(p: var TParsers): PNode =
|
||||
proc parseAll*(p: var TParsers): PNode =
|
||||
case p.skin
|
||||
of skinStandard, skinStrongSpaces:
|
||||
result = parser.parseAll(p.parser)
|
||||
@@ -63,7 +40,7 @@ proc parseAll(p: var TParsers): PNode =
|
||||
internalError("parser to implement")
|
||||
result = ast.emptyNode
|
||||
|
||||
proc parseTopLevelStmt(p: var TParsers): PNode =
|
||||
proc parseTopLevelStmt*(p: var TParsers): PNode =
|
||||
case p.skin
|
||||
of skinStandard, skinStrongSpaces:
|
||||
result = parser.parseTopLevelStmt(p.parser)
|
||||
@@ -74,18 +51,18 @@ proc parseTopLevelStmt(p: var TParsers): PNode =
|
||||
result = ast.emptyNode
|
||||
|
||||
proc utf8Bom(s: string): int =
|
||||
if (s[0] == '\xEF') and (s[1] == '\xBB') and (s[2] == '\xBF'):
|
||||
if s[0] == '\xEF' and s[1] == '\xBB' and s[2] == '\xBF':
|
||||
result = 3
|
||||
else:
|
||||
result = 0
|
||||
|
||||
proc containsShebang(s: string, i: int): bool =
|
||||
if (s[i] == '#') and (s[i + 1] == '!'):
|
||||
if s[i] == '#' and s[i+1] == '!':
|
||||
var j = i + 2
|
||||
while s[j] in Whitespace: inc(j)
|
||||
result = s[j] == '/'
|
||||
|
||||
proc parsePipe(filename: string, inputStream: PLLStream): PNode =
|
||||
proc parsePipe(filename: string, inputStream: PLLStream; cache: IdentCache): PNode =
|
||||
result = ast.emptyNode
|
||||
var s = llStreamOpen(filename, fmRead)
|
||||
if s != nil:
|
||||
@@ -101,20 +78,20 @@ proc parsePipe(filename: string, inputStream: PLLStream): PNode =
|
||||
inc(i, 2)
|
||||
while line[i] in Whitespace: inc(i)
|
||||
var q: TParser
|
||||
parser.openParser(q, filename, llStreamOpen(substr(line, i)))
|
||||
parser.openParser(q, filename, llStreamOpen(substr(line, i)), cache)
|
||||
result = parser.parseAll(q)
|
||||
parser.closeParser(q)
|
||||
llStreamClose(s)
|
||||
|
||||
proc getFilter(ident: PIdent): TFilterKind =
|
||||
for i in countup(low(TFilterKind), high(TFilterKind)):
|
||||
if identEq(ident, filterNames[i]):
|
||||
if cmpIgnoreStyle(ident.s, filterNames[i]) == 0:
|
||||
return i
|
||||
result = filtNone
|
||||
|
||||
proc getParser(ident: PIdent): TParserKind =
|
||||
for i in countup(low(TParserKind), high(TParserKind)):
|
||||
if identEq(ident, parserNames[i]):
|
||||
if cmpIgnoreStyle(ident.s, parserNames[i]) == 0:
|
||||
return i
|
||||
rawMessage(errInvalidDirectiveX, ident.s)
|
||||
|
||||
@@ -150,8 +127,7 @@ proc evalPipe(p: var TParsers, n: PNode, filename: string,
|
||||
start: PLLStream): PLLStream =
|
||||
result = start
|
||||
if n.kind == nkEmpty: return
|
||||
if n.kind == nkInfix and n.sons[0].kind == nkIdent and
|
||||
identEq(n.sons[0].ident, "|"):
|
||||
if n.kind == nkInfix and n[0].kind == nkIdent and n[0].ident.s == "|":
|
||||
for i in countup(1, 2):
|
||||
if n.sons[i].kind == nkInfix:
|
||||
result = evalPipe(p, n.sons[i], filename, result)
|
||||
@@ -162,18 +138,31 @@ proc evalPipe(p: var TParsers, n: PNode, filename: string,
|
||||
else:
|
||||
result = applyFilter(p, n, filename, result)
|
||||
|
||||
proc openParsers(p: var TParsers, fileIdx: int32, inputstream: PLLStream) =
|
||||
proc openParsers*(p: var TParsers, fileIdx: int32, inputstream: PLLStream;
|
||||
cache: IdentCache) =
|
||||
var s: PLLStream
|
||||
p.skin = skinStandard
|
||||
let filename = fileIdx.toFullPathConsiderDirty
|
||||
var pipe = parsePipe(filename, inputstream)
|
||||
var pipe = parsePipe(filename, inputstream, cache)
|
||||
if pipe != nil: s = evalPipe(p, pipe, filename, inputstream)
|
||||
else: s = inputstream
|
||||
case p.skin
|
||||
of skinStandard, skinBraces, skinEndX:
|
||||
parser.openParser(p.parser, fileIdx, s, false)
|
||||
parser.openParser(p.parser, fileIdx, s, cache, false)
|
||||
of skinStrongSpaces:
|
||||
parser.openParser(p.parser, fileIdx, s, true)
|
||||
parser.openParser(p.parser, fileIdx, s, cache, true)
|
||||
|
||||
proc closeParsers(p: var TParsers) =
|
||||
proc closeParsers*(p: var TParsers) =
|
||||
parser.closeParser(p.parser)
|
||||
|
||||
proc parseFile*(fileIdx: int32; cache: IdentCache): PNode {.procvar.} =
|
||||
var
|
||||
p: TParsers
|
||||
f: File
|
||||
let filename = fileIdx.toFullPathConsiderDirty
|
||||
if not open(f, filename):
|
||||
rawMessage(errCannotOpenFile, filename)
|
||||
return
|
||||
openParsers(p, fileIdx, llStreamOpen(f), cache)
|
||||
result = parseAll(p)
|
||||
closeParsers(p)
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
# tree helper routines
|
||||
|
||||
import
|
||||
ast, astalgo, lexer, msgs, strutils, wordrecg
|
||||
ast, astalgo, lexer, msgs, strutils, wordrecg, idents
|
||||
|
||||
proc cyclicTreeAux(n: PNode, visited: var seq[PNode]): bool =
|
||||
if n == nil: return
|
||||
|
||||
@@ -1258,7 +1258,7 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
|
||||
decodeB(rkNode)
|
||||
# c.debug[pc].line.int - countLines(regs[rb].strVal) ?
|
||||
var error: string
|
||||
let ast = parseString(regs[rb].node.strVal, c.debug[pc].toFullPath,
|
||||
let ast = parseString(regs[rb].node.strVal, c.cache, c.debug[pc].toFullPath,
|
||||
c.debug[pc].line.int,
|
||||
proc (info: TLineInfo; msg: TMsgKind; arg: string) =
|
||||
if error.isNil and msg <= msgs.errMax:
|
||||
@@ -1272,7 +1272,7 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
|
||||
of opcParseStmtToAst:
|
||||
decodeB(rkNode)
|
||||
var error: string
|
||||
let ast = parseString(regs[rb].node.strVal, c.debug[pc].toFullPath,
|
||||
let ast = parseString(regs[rb].node.strVal, c.cache, c.debug[pc].toFullPath,
|
||||
c.debug[pc].line.int,
|
||||
proc (info: TLineInfo; msg: TMsgKind; arg: string) =
|
||||
if error.isNil and msg <= msgs.errMax:
|
||||
@@ -1509,20 +1509,20 @@ include vmops
|
||||
var
|
||||
globalCtx*: PCtx
|
||||
|
||||
proc setupGlobalCtx(module: PSym) =
|
||||
proc setupGlobalCtx(module: PSym; cache: IdentCache) =
|
||||
if globalCtx.isNil:
|
||||
globalCtx = newCtx(module)
|
||||
globalCtx = newCtx(module, cache)
|
||||
registerAdditionalOps(globalCtx)
|
||||
else:
|
||||
refresh(globalCtx, module)
|
||||
|
||||
proc myOpen(module: PSym): PPassContext =
|
||||
proc myOpen(module: PSym; cache: IdentCache): PPassContext =
|
||||
#var c = newEvalContext(module, emRepl)
|
||||
#c.features = {allowCast, allowFFI, allowInfiniteLoops}
|
||||
#pushStackFrame(c, newStackFrame())
|
||||
|
||||
# XXX produce a new 'globals' environment here:
|
||||
setupGlobalCtx(module)
|
||||
setupGlobalCtx(module, cache)
|
||||
result = globalCtx
|
||||
when hasFFI:
|
||||
globalCtx.features = {allowFFI, allowCast}
|
||||
@@ -1540,9 +1540,10 @@ proc myProcess(c: PPassContext, n: PNode): PNode =
|
||||
|
||||
const evalPass* = makePass(myOpen, nil, myProcess, myProcess)
|
||||
|
||||
proc evalConstExprAux(module, prc: PSym, n: PNode, mode: TEvalMode): PNode =
|
||||
proc evalConstExprAux(module: PSym; cache: IdentCache; prc: PSym, n: PNode,
|
||||
mode: TEvalMode): PNode =
|
||||
let n = transformExpr(module, n)
|
||||
setupGlobalCtx(module)
|
||||
setupGlobalCtx(module, cache)
|
||||
var c = globalCtx
|
||||
let oldMode = c.mode
|
||||
defer: c.mode = oldMode
|
||||
@@ -1557,17 +1558,17 @@ proc evalConstExprAux(module, prc: PSym, n: PNode, mode: TEvalMode): PNode =
|
||||
result = rawExecute(c, start, tos).regToNode
|
||||
if result.info.line < 0: result.info = n.info
|
||||
|
||||
proc evalConstExpr*(module: PSym, e: PNode): PNode =
|
||||
result = evalConstExprAux(module, nil, e, emConst)
|
||||
proc evalConstExpr*(module: PSym; cache: IdentCache, e: PNode): PNode =
|
||||
result = evalConstExprAux(module, cache, nil, e, emConst)
|
||||
|
||||
proc evalStaticExpr*(module: PSym, e: PNode, prc: PSym): PNode =
|
||||
result = evalConstExprAux(module, prc, e, emStaticExpr)
|
||||
proc evalStaticExpr*(module: PSym; cache: IdentCache, e: PNode, prc: PSym): PNode =
|
||||
result = evalConstExprAux(module, cache, prc, e, emStaticExpr)
|
||||
|
||||
proc evalStaticStmt*(module: PSym, e: PNode, prc: PSym) =
|
||||
discard evalConstExprAux(module, prc, e, emStaticStmt)
|
||||
proc evalStaticStmt*(module: PSym; cache: IdentCache, e: PNode, prc: PSym) =
|
||||
discard evalConstExprAux(module, cache, prc, e, emStaticStmt)
|
||||
|
||||
proc setupCompileTimeVar*(module: PSym, n: PNode) =
|
||||
discard evalConstExprAux(module, nil, n, emStaticStmt)
|
||||
proc setupCompileTimeVar*(module: PSym; cache: IdentCache, n: PNode) =
|
||||
discard evalConstExprAux(module, cache, nil, n, emStaticStmt)
|
||||
|
||||
proc setupMacroParam(x: PNode, typ: PType): TFullReg =
|
||||
case typ.kind
|
||||
@@ -1586,7 +1587,8 @@ proc setupMacroParam(x: PNode, typ: PType): TFullReg =
|
||||
|
||||
var evalMacroCounter: int
|
||||
|
||||
proc evalMacroCall*(module: PSym, n, nOrig: PNode, sym: PSym): PNode =
|
||||
proc evalMacroCall*(module: PSym; cache: IdentCache, n, nOrig: PNode,
|
||||
sym: PSym): PNode =
|
||||
# XXX globalError() is ugly here, but I don't know a better solution for now
|
||||
inc(evalMacroCounter)
|
||||
if evalMacroCounter > 100:
|
||||
@@ -1599,7 +1601,7 @@ proc evalMacroCall*(module: PSym, n, nOrig: PNode, sym: PSym): PNode =
|
||||
n.renderTree,
|
||||
$ <n.safeLen, $ <sym.typ.len])
|
||||
|
||||
setupGlobalCtx(module)
|
||||
setupGlobalCtx(module, cache)
|
||||
var c = globalCtx
|
||||
|
||||
c.callsite = nOrig
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
## This module contains the type definitions for the new evaluation engine.
|
||||
## An instruction is 1-3 int32s in memory, it is a register based VM.
|
||||
|
||||
import ast, passes, msgs, intsets
|
||||
import ast, passes, msgs, idents, intsets
|
||||
|
||||
const
|
||||
byteExcess* = 128 # we use excess-K for immediates
|
||||
@@ -203,16 +203,18 @@ type
|
||||
comesFromHeuristic*: TLineInfo # Heuristic for better macro stack traces
|
||||
callbacks*: seq[tuple[key: string, value: VmCallback]]
|
||||
errorFlag*: string
|
||||
cache*: IdentCache
|
||||
|
||||
TPosition* = distinct int
|
||||
|
||||
PEvalContext* = PCtx
|
||||
|
||||
proc newCtx*(module: PSym): PCtx =
|
||||
proc newCtx*(module: PSym; cache: IdentCache): PCtx =
|
||||
PCtx(code: @[], debug: @[],
|
||||
globals: newNode(nkStmtListExpr), constants: newNode(nkStmtList), types: @[],
|
||||
prc: PProc(blocks: @[]), module: module, loopIterations: MaxLoopIterations,
|
||||
comesFromHeuristic: unknownLineInfo(), callbacks: @[], errorFlag: "")
|
||||
comesFromHeuristic: unknownLineInfo(), callbacks: @[], errorFlag: "",
|
||||
cache: cache)
|
||||
|
||||
proc refresh*(c: PCtx, module: PSym) =
|
||||
c.module = module
|
||||
|
||||
@@ -13,8 +13,7 @@
|
||||
# does not support strings. Without this the code would
|
||||
# be slow and unreadable.
|
||||
|
||||
import
|
||||
hashes, strutils, idents
|
||||
from strutils import cmpIgnoreStyle
|
||||
|
||||
# Keywords must be kept sorted and within a range
|
||||
|
||||
@@ -180,17 +179,3 @@ proc findStr*(a: openArray[string], s: string): int =
|
||||
if cmpIgnoreStyle(a[i], s) == 0:
|
||||
return i
|
||||
result = - 1
|
||||
|
||||
proc whichKeyword*(id: PIdent): TSpecialWord =
|
||||
if id.id < 0: result = wInvalid
|
||||
else: result = TSpecialWord(id.id)
|
||||
|
||||
proc whichKeyword*(id: string): TSpecialWord =
|
||||
result = whichKeyword(getIdent(id))
|
||||
|
||||
proc initSpecials() =
|
||||
# initialize the keywords:
|
||||
for s in countup(succ(low(specialWords)), high(specialWords)):
|
||||
getIdent(specialWords[s], hashIgnoreStyle(specialWords[s])).id = ord(s)
|
||||
|
||||
initSpecials()
|
||||
|
||||
Reference in New Issue
Block a user