fixes token's column information

This commit is contained in:
Araq
2013-07-30 08:45:00 +02:00
parent 28ad262a46
commit fd2a808266
6 changed files with 19 additions and 21 deletions

View File

@@ -889,17 +889,10 @@ proc evalRepr(c: PEvalContext, n: PNode): PNode =
proc isEmpty(n: PNode): bool =
result = n != nil and n.kind == nkEmpty
# The lexer marks multi-line strings as residing at the line where they
# are closed. This function returns the line where the string begins
# Maybe the lexer should mark both the beginning and the end of expressions,
# then this function could be removed.
proc stringStartingLine(s: PNode): int =
result = s.info.line.int - countLines(s.strVal)
proc evalParseExpr(c: PEvalContext, n: PNode): PNode =
var code = evalAux(c, n.sons[1], {})
var ast = parseString(code.getStrValue, code.info.toFilename,
code.stringStartingLine)
code.info.line.int)
if sonsLen(ast) != 1:
GlobalError(code.info, errExprExpected, "multiple statements")
result = ast.sons[0]
@@ -908,7 +901,7 @@ proc evalParseExpr(c: PEvalContext, n: PNode): PNode =
proc evalParseStmt(c: PEvalContext, n: PNode): PNode =
var code = evalAux(c, n.sons[1], {})
result = parseString(code.getStrValue, code.info.toFilename,
code.stringStartingLine)
code.info.line.int)
#result.typ = newType(tyStmt, c.module)
proc evalTypeTrait*(n: PNode, context: PSym): PNode =

View File

@@ -110,6 +110,7 @@ type
# or float literals
literal*: string # the parsed (string) literal; and
# documentation comments are here too
line*, col*: int
TLexer* = object of TBaseLexer
fileIdx*: int32
@@ -124,8 +125,10 @@ proc isKeyword*(kind: TTokType): bool
proc openLexer*(lex: var TLexer, fileidx: int32, inputstream: PLLStream)
proc rawGetTok*(L: var TLexer, tok: var TToken)
# reads in the next token into tok and skips it
proc getColumn*(L: TLexer): int
proc getLineInfo*(L: TLexer): TLineInfo
proc getLineInfo*(L: TLexer, tok: TToken): TLineInfo {.inline.} =
newLineInfo(L.fileIdx, tok.line, tok.col)
proc closeLexer*(lex: var TLexer)
proc PrintTok*(tok: TToken)
proc tokToStr*(tok: TToken): string
@@ -702,6 +705,8 @@ proc rawGetTok(L: var TLexer, tok: var TToken) =
tok.indent = -1
skip(L, tok)
var c = L.buf[L.bufpos]
tok.line = L.linenumber
tok.col = getColNumber(L, L.bufpos)
if c in SymStartChars - {'r', 'R', 'l'}:
getSymbol(L, tok)
else:

View File

@@ -157,7 +157,7 @@ proc checkSymbol(L: TLexer, tok: TToken) =
proc parseAssignment(L: var TLexer, tok: var TToken) =
if tok.ident.id == getIdent("-").id or tok.ident.id == getIdent("--").id:
confTok(L, tok) # skip unnecessary prefix
var info = getLineInfo(L) # safe for later in case of an error
var info = getLineInfo(L, tok) # safe for later in case of an error
checkSymbol(L, tok)
var s = tokToStr(tok)
confTok(L, tok) # skip symbol
@@ -176,7 +176,7 @@ proc parseAssignment(L: var TLexer, tok: var TToken) =
add(val, tokToStr(tok))
confTok(L, tok)
if tok.tokType == tkBracketRi: confTok(L, tok)
else: lexMessage(L, errTokenExpected, "\']\'")
else: lexMessage(L, errTokenExpected, "']'")
add(val, ']')
if tok.tokType in {tkColon, tkEquals}:
if len(val) > 0: add(val, ':')

View File

@@ -144,7 +144,7 @@ proc Eat(p: var TParser, TokType: TTokType) =
else: lexMessage(p.lex, errTokenExpected, TokTypeToStr[tokType])
proc parLineInfo(p: TParser): TLineInfo =
result = getLineInfo(p.lex)
result = getLineInfo(p.lex, p.tok)
proc indAndComment(p: var TParser, n: PNode) =
if p.tok.indent > p.currInd:
@@ -154,7 +154,7 @@ proc indAndComment(p: var TParser, n: PNode) =
skipComment(p, n)
proc newNodeP(kind: TNodeKind, p: TParser): PNode =
result = newNodeI(kind, getLineInfo(p.lex))
result = newNodeI(kind, parLineInfo(p))
proc newIntNodeP(kind: TNodeKind, intVal: BiggestInt, p: TParser): PNode =
result = newNodeP(kind, p)
@@ -350,7 +350,7 @@ proc exprList(p: var TParser, endTok: TTokType, result: PNode) =
proc dotExpr(p: var TParser, a: PNode): PNode =
#| dotExpr = expr '.' optInd ('type' | 'addr' | symbol)
var info = p.lex.getlineInfo
var info = p.parLineInfo
getTok(p)
optInd(p, a)
case p.tok.tokType

View File

@@ -60,7 +60,9 @@ proc beautifyName(s: string, k: TSymKind): string =
# for 'const' we keep how it's spelt; either upper case or lower case:
result.add s[0]
else:
result.add toLower(s[0])
# as a special rule, don't transform 'L' to 'l'
if s.len == 1 and s[0] == 'L': result.add 'L'
else: result.add toLower(s[0])
inc i
let allUpper = allCharsInSet(s, {'A'..'Z', '0'..'9', '_'})
while i < s.len:
@@ -109,7 +111,7 @@ proc processSym(c: PPassContext, n: PNode): PNode =
loadFile(n.info)
let line = gSourceFiles[n.info.fileIndex].lines[n.info.line-1]
var first = n.info.col.int - len(s.name.s)
var first = n.info.col.int
if first < 0: return
#inc first, skipIgnoreCase(line, "proc ", first)
if line[first] == '`': inc first

View File

@@ -205,13 +205,11 @@ proc findClosestCall(n: PNode): PNode =
if result != nil: return
proc isTracked(current: TLineInfo, tokenLen: int): bool =
# the column of an identifier is at its *end*, so we subtract to get the
# start of it.
for i in countup(0, high(checkPoints)):
if current.fileIndex == checkPoints[i].fileIndex:
if current.line == checkPoints[i].line:
let col = checkPoints[i].col
if col >= current.col-tokenLen and col <= current.col:
if col >= current.col and col <= current.col+tokenLen-1:
return true
proc findClosestSym(n: PNode): PNode =