mirror of
https://github.com/nim-lang/Nim.git
synced 2026-01-09 14:32:53 +00:00
Merge branch 'devel' into araq
This commit is contained in:
@@ -16,10 +16,12 @@ addons:
|
||||
- libcurl4-openssl-dev
|
||||
- libsdl1.2-dev
|
||||
- libgc-dev
|
||||
- libsfml-dev
|
||||
|
||||
before_install:
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install boehmgc; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install sfml; fi
|
||||
|
||||
before_script:
|
||||
- set -e
|
||||
|
||||
36
changelog.md
36
changelog.md
@@ -5,3 +5,39 @@
|
||||
- Removed basic2d/basic3d out of the stdlib and into Nimble packages.
|
||||
These packages deprecated however, use the ``glm``, ``arraymancer``, ``neo``
|
||||
or another package.
|
||||
- Arrays of char cannot be converted to ``cstring`` anymore, pointers to
|
||||
arrays of char can! This means ``$`` for arrays can finally exist
|
||||
in ``system.nim`` and do the right thing.
|
||||
- ``echo`` now works with strings that contain ``\0`` (the binary zero is not
|
||||
shown) and ``nil`` strings are equal to empty strings.
|
||||
- JSON: Deprecated `getBVal`, `getFNum`, and `getNum` in favour to
|
||||
`getBool`, `getFloat`, `getBiggestInt`. Also `getInt` procedure was added.
|
||||
- `reExtended` is no longer default for the `re` constructor in the `re`
|
||||
module.
|
||||
- The overloading rules changed slightly so that constrained generics are
|
||||
preferred over unconstrained generics. (Bug #6526)
|
||||
- It is now possible to forward declare object types so that mutually
|
||||
recursive types can be created across module boundaries. See
|
||||
[package level objects](https://nim-lang.org/docs/manual.html#package-level-objects)
|
||||
for more information.
|
||||
- The **unary** ``<`` is now deprecated, for ``.. <`` use ``..<`` for other usages
|
||||
use the ``pred`` proc.
|
||||
- We changed how array accesses "from backwards" like ``a[^1]`` or ``a[0..^1]`` are
|
||||
implemented. These are now implemented purely in ``system.nim`` without compiler
|
||||
support. There is a new "heterogenous" slice type ``system.HSlice`` that takes 2
|
||||
generic parameters which can be ``BackwardsIndex`` indices. ``BackwardsIndex`` is
|
||||
produced by ``system.^``.
|
||||
This means if you overload ``[]`` or ``[]=`` you need to ensure they also work
|
||||
with ``system.BackwardsIndex`` (if applicable for the accessors).
|
||||
- ``mod`` and bitwise ``and`` do not produce ``range`` subtypes anymore. This
|
||||
turned out to be more harmful than helpful and the language is simpler
|
||||
without this special typing rule.
|
||||
- Added ``algorithm.rotateLeft``.
|
||||
- ``rationals.toRational`` now uses an algorithm based on continued fractions.
|
||||
This means its results are more precise and it can't run into an infinite loop
|
||||
anymore.
|
||||
- Added ``typetraits.$`` as an alias for ``typetraits.name``.
|
||||
- ``os.getEnv`` now takes an optional ``default`` parameter that tells ``getEnv``
|
||||
what to return if the environment variable does not exist.
|
||||
- Removed PDCurses wrapper from the stdlib and published it as a separate
|
||||
Nimble package.
|
||||
|
||||
@@ -874,7 +874,8 @@ type
|
||||
# mean that there is no destructor.
|
||||
# see instantiateDestructor in semdestruct.nim
|
||||
deepCopy*: PSym # overriden 'deepCopy' operation
|
||||
assignment*: PSym # overriden '=' operator
|
||||
assignment*: PSym # overriden '=' operation
|
||||
sink*: PSym # overriden '=sink' operation
|
||||
methods*: seq[(int,PSym)] # attached methods
|
||||
size*: BiggestInt # the size of the type in bytes
|
||||
# -1 means that the size is unkwown
|
||||
@@ -1019,16 +1020,11 @@ proc add*(father, son: PNode) =
|
||||
|
||||
type Indexable = PNode | PType
|
||||
|
||||
template `[]`*(n: Indexable, i: int): Indexable =
|
||||
n.sons[i]
|
||||
template `[]`*(n: Indexable, i: int): Indexable = n.sons[i]
|
||||
template `[]=`*(n: Indexable, i: int; x: Indexable) = n.sons[i] = x
|
||||
|
||||
template `-|`*(b, s: untyped): untyped =
|
||||
(if b >= 0: b else: s.len + b)
|
||||
|
||||
# son access operators with support for negative indices
|
||||
template `{}`*(n: Indexable, i: int): untyped = n[i -| n]
|
||||
template `{}=`*(n: Indexable, i: int, s: Indexable) =
|
||||
n.sons[i -| n] = s
|
||||
template `[]`*(n: Indexable, i: BackwardsIndex): Indexable = n[n.len - i.int]
|
||||
template `[]=`*(n: Indexable, i: BackwardsIndex; x: Indexable) = n[n.len - i.int] = x
|
||||
|
||||
when defined(useNodeIds):
|
||||
const nodeIdToDebug* = -1 # 299750 # 300761 #300863 # 300879
|
||||
@@ -1038,9 +1034,9 @@ proc newNode*(kind: TNodeKind): PNode =
|
||||
new(result)
|
||||
result.kind = kind
|
||||
#result.info = UnknownLineInfo() inlined:
|
||||
result.info.fileIndex = int32(- 1)
|
||||
result.info.col = int16(- 1)
|
||||
result.info.line = int16(- 1)
|
||||
result.info.fileIndex = int32(-1)
|
||||
result.info.col = int16(-1)
|
||||
result.info.line = int16(-1)
|
||||
when defined(useNodeIds):
|
||||
result.id = gNodeId
|
||||
if result.id == nodeIdToDebug:
|
||||
@@ -1050,6 +1046,8 @@ proc newNode*(kind: TNodeKind): PNode =
|
||||
|
||||
proc newTree*(kind: TNodeKind; children: varargs[PNode]): PNode =
|
||||
result = newNode(kind)
|
||||
if children.len > 0:
|
||||
result.info = children[0].info
|
||||
result.sons = @children
|
||||
|
||||
proc newIntNode*(kind: TNodeKind, intVal: BiggestInt): PNode =
|
||||
@@ -1081,7 +1079,7 @@ proc newSym*(symKind: TSymKind, name: PIdent, owner: PSym,
|
||||
result.info = info
|
||||
result.options = gOptions
|
||||
result.owner = owner
|
||||
result.offset = - 1
|
||||
result.offset = -1
|
||||
result.id = getID()
|
||||
when debugIds:
|
||||
registerId(result)
|
||||
@@ -1293,6 +1291,7 @@ proc assignType*(dest, src: PType) =
|
||||
dest.align = src.align
|
||||
dest.destructor = src.destructor
|
||||
dest.deepCopy = src.deepCopy
|
||||
dest.sink = src.sink
|
||||
dest.assignment = src.assignment
|
||||
dest.lockLevel = src.lockLevel
|
||||
# this fixes 'type TLock = TSysLock':
|
||||
@@ -1391,6 +1390,14 @@ proc skipTypes*(t: PType, kinds: TTypeKinds): PType =
|
||||
result = t
|
||||
while result.kind in kinds: result = lastSon(result)
|
||||
|
||||
proc skipTypes*(t: PType, kinds: TTypeKinds; maxIters: int): PType =
|
||||
result = t
|
||||
var i = maxIters
|
||||
while result.kind in kinds:
|
||||
result = lastSon(result)
|
||||
dec i
|
||||
if i == 0: return nil
|
||||
|
||||
proc skipTypesOrNil*(t: PType, kinds: TTypeKinds): PType =
|
||||
## same as skipTypes but handles 'nil'
|
||||
result = t
|
||||
@@ -1403,7 +1410,7 @@ proc isGCedMem*(t: PType): bool {.inline.} =
|
||||
t.kind == tyProc and t.callConv == ccClosure
|
||||
|
||||
proc propagateToOwner*(owner, elem: PType) =
|
||||
const HaveTheirOwnEmpty = {tySequence, tySet, tyPtr, tyRef, tyProc}
|
||||
const HaveTheirOwnEmpty = {tySequence, tyOpt, tySet, tyPtr, tyRef, tyProc}
|
||||
owner.flags = owner.flags + (elem.flags * {tfHasMeta})
|
||||
if tfNotNil in elem.flags:
|
||||
if owner.kind in {tyGenericInst, tyGenericBody, tyGenericInvocation}:
|
||||
@@ -1419,9 +1426,9 @@ proc propagateToOwner*(owner, elem: PType) =
|
||||
owner.flags.incl tfHasMeta
|
||||
|
||||
if tfHasAsgn in elem.flags:
|
||||
let o2 = elem.skipTypes({tyGenericInst, tyAlias})
|
||||
let o2 = owner.skipTypes({tyGenericInst, tyAlias})
|
||||
if o2.kind in {tyTuple, tyObject, tyArray,
|
||||
tySequence, tySet, tyDistinct}:
|
||||
tySequence, tyOpt, tySet, tyDistinct}:
|
||||
o2.flags.incl tfHasAsgn
|
||||
owner.flags.incl tfHasAsgn
|
||||
|
||||
@@ -1603,10 +1610,10 @@ proc hasPattern*(s: PSym): bool {.inline.} =
|
||||
result = isRoutine(s) and s.ast.sons[patternPos].kind != nkEmpty
|
||||
|
||||
iterator items*(n: PNode): PNode =
|
||||
for i in 0.. <n.safeLen: yield n.sons[i]
|
||||
for i in 0..<n.safeLen: yield n.sons[i]
|
||||
|
||||
iterator pairs*(n: PNode): tuple[i: int, n: PNode] =
|
||||
for i in 0.. <n.len: yield (i, n.sons[i])
|
||||
for i in 0..<n.len: yield (i, n.sons[i])
|
||||
|
||||
proc isAtom*(n: PNode): bool {.inline.} =
|
||||
result = n.kind >= nkNone and n.kind <= nkNilLit
|
||||
@@ -1662,3 +1669,10 @@ when false:
|
||||
if n.isNil: return true
|
||||
for i in 0 ..< n.safeLen:
|
||||
if n[i].containsNil: return true
|
||||
|
||||
template hasDestructor*(t: PType): bool = tfHasAsgn in t.flags
|
||||
template incompleteType*(t: PType): bool =
|
||||
t.sym != nil and {sfForward, sfNoForward} * t.sym.flags == {sfForward}
|
||||
|
||||
template typeCompleted*(s: PSym) =
|
||||
incl s.flags, sfNoForward
|
||||
|
||||
@@ -102,7 +102,7 @@ proc hashTree(c: var MD5Context, n: PNode) =
|
||||
of nkStrLit..nkTripleStrLit:
|
||||
c &= n.strVal
|
||||
else:
|
||||
for i in 0.. <n.len: hashTree(c, n.sons[i])
|
||||
for i in 0..<n.len: hashTree(c, n.sons[i])
|
||||
|
||||
proc hashType(c: var MD5Context, t: PType) =
|
||||
# modelled after 'typeToString'
|
||||
@@ -151,13 +151,13 @@ proc hashType(c: var MD5Context, t: PType) =
|
||||
c.hashType(t.sons[0])
|
||||
of tyProc:
|
||||
c &= (if tfIterator in t.flags: "iterator " else: "proc ")
|
||||
for i in 0.. <t.len: c.hashType(t.sons[i])
|
||||
for i in 0..<t.len: c.hashType(t.sons[i])
|
||||
md5Update(c, cast[cstring](addr(t.callConv)), 1)
|
||||
|
||||
if tfNoSideEffect in t.flags: c &= ".noSideEffect"
|
||||
if tfThread in t.flags: c &= ".thread"
|
||||
else:
|
||||
for i in 0.. <t.len: c.hashType(t.sons[i])
|
||||
for i in 0..<t.len: c.hashType(t.sons[i])
|
||||
if tfNotNil in t.flags: c &= "not nil"
|
||||
|
||||
proc canonConst(n: PNode): TUid =
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
proc leftAppearsOnRightSide(le, ri: PNode): bool =
|
||||
if le != nil:
|
||||
for i in 1 .. <ri.len:
|
||||
for i in 1 ..< ri.len:
|
||||
let r = ri[i]
|
||||
if isPartOf(le, r) != arNo: return true
|
||||
|
||||
@@ -364,7 +364,7 @@ proc genPatternCall(p: BProc; ri: PNode; pat: string; typ: PType): Rope =
|
||||
of '@':
|
||||
if j < ri.len:
|
||||
result.add genOtherArg(p, ri, j, typ)
|
||||
for k in j+1 .. < ri.len:
|
||||
for k in j+1 ..< ri.len:
|
||||
result.add(~", ")
|
||||
result.add genOtherArg(p, ri, k, typ)
|
||||
inc i
|
||||
@@ -377,7 +377,7 @@ proc genPatternCall(p: BProc; ri: PNode; pat: string; typ: PType): Rope =
|
||||
result.add(~"(")
|
||||
if 1 < ri.len:
|
||||
result.add genOtherArg(p, ri, 1, typ)
|
||||
for k in j+1 .. < ri.len:
|
||||
for k in j+1 ..< ri.len:
|
||||
result.add(~", ")
|
||||
result.add genOtherArg(p, ri, k, typ)
|
||||
result.add(~")")
|
||||
|
||||
@@ -228,7 +228,7 @@ proc genOptAsgnTuple(p: BProc, dest, src: TLoc, flags: TAssignmentFlags) =
|
||||
else:
|
||||
flags
|
||||
let t = skipTypes(dest.t, abstractInst).getUniqueType()
|
||||
for i in 0 .. <t.len:
|
||||
for i in 0 ..< t.len:
|
||||
let t = t.sons[i]
|
||||
let field = "Field$1" % [i.rope]
|
||||
genAssignment(p, optAsgnLoc(dest, t, field),
|
||||
@@ -270,10 +270,10 @@ proc genGenericAsgn(p: BProc, dest, src: TLoc, flags: TAssignmentFlags) =
|
||||
addrLoc(dest), addrLoc(src), rdLoc(dest))
|
||||
else:
|
||||
linefmt(p, cpsStmts, "#genericShallowAssign((void*)$1, (void*)$2, $3);$n",
|
||||
addrLoc(dest), addrLoc(src), genTypeInfo(p.module, dest.t))
|
||||
addrLoc(dest), addrLoc(src), genTypeInfo(p.module, dest.t, dest.lode.info))
|
||||
else:
|
||||
linefmt(p, cpsStmts, "#genericAssign((void*)$1, (void*)$2, $3);$n",
|
||||
addrLoc(dest), addrLoc(src), genTypeInfo(p.module, dest.t))
|
||||
addrLoc(dest), addrLoc(src), genTypeInfo(p.module, dest.t, dest.lode.info))
|
||||
|
||||
proc genAssignment(p: BProc, dest, src: TLoc, flags: TAssignmentFlags) =
|
||||
# This function replaces all other methods for generating
|
||||
@@ -291,7 +291,8 @@ proc genAssignment(p: BProc, dest, src: TLoc, flags: TAssignmentFlags) =
|
||||
genRefAssign(p, dest, src, flags)
|
||||
else:
|
||||
linefmt(p, cpsStmts, "#genericSeqAssign($1, $2, $3);$n",
|
||||
addrLoc(dest), rdLoc(src), genTypeInfo(p.module, dest.t))
|
||||
addrLoc(dest), rdLoc(src),
|
||||
genTypeInfo(p.module, dest.t, dest.lode.info))
|
||||
of tyString:
|
||||
if (needToCopy notin flags and src.storage != OnStatic) or canMove(src.lode):
|
||||
genRefAssign(p, dest, src, flags)
|
||||
@@ -352,7 +353,8 @@ proc genAssignment(p: BProc, dest, src: TLoc, flags: TAssignmentFlags) =
|
||||
if needsComplexAssignment(dest.t):
|
||||
linefmt(p, cpsStmts, # XXX: is this correct for arrays?
|
||||
"#genericAssignOpenArray((void*)$1, (void*)$2, $1Len_0, $3);$n",
|
||||
addrLoc(dest), addrLoc(src), genTypeInfo(p.module, dest.t))
|
||||
addrLoc(dest), addrLoc(src),
|
||||
genTypeInfo(p.module, dest.t, dest.lode.info))
|
||||
else:
|
||||
useStringh(p.module)
|
||||
linefmt(p, cpsStmts,
|
||||
@@ -393,14 +395,17 @@ proc genDeepCopy(p: BProc; dest, src: TLoc) =
|
||||
of tyPtr, tyRef, tyProc, tyTuple, tyObject, tyArray:
|
||||
# XXX optimize this
|
||||
linefmt(p, cpsStmts, "#genericDeepCopy((void*)$1, (void*)$2, $3);$n",
|
||||
addrLoc(dest), addrLocOrTemp(src), genTypeInfo(p.module, dest.t))
|
||||
addrLoc(dest), addrLocOrTemp(src),
|
||||
genTypeInfo(p.module, dest.t, dest.lode.info))
|
||||
of tySequence, tyString:
|
||||
linefmt(p, cpsStmts, "#genericSeqDeepCopy($1, $2, $3);$n",
|
||||
addrLoc(dest), rdLoc(src), genTypeInfo(p.module, dest.t))
|
||||
addrLoc(dest), rdLoc(src),
|
||||
genTypeInfo(p.module, dest.t, dest.lode.info))
|
||||
of tyOpenArray, tyVarargs:
|
||||
linefmt(p, cpsStmts,
|
||||
"#genericDeepCopyOpenArray((void*)$1, (void*)$2, $1Len_0, $3);$n",
|
||||
addrLoc(dest), addrLocOrTemp(src), genTypeInfo(p.module, dest.t))
|
||||
addrLoc(dest), addrLocOrTemp(src),
|
||||
genTypeInfo(p.module, dest.t, dest.lode.info))
|
||||
of tySet:
|
||||
if mapType(ty) == ctArray:
|
||||
useStringh(p.module)
|
||||
@@ -965,23 +970,30 @@ proc genEcho(p: BProc, n: PNode) =
|
||||
# this unusal way of implementing it ensures that e.g. ``echo("hallo", 45)``
|
||||
# is threadsafe.
|
||||
internalAssert n.kind == nkBracket
|
||||
var args: Rope = nil
|
||||
var a: TLoc
|
||||
for i in countup(0, n.len-1):
|
||||
if n.sons[i].skipConv.kind == nkNilLit:
|
||||
add(args, ", \"nil\"")
|
||||
else:
|
||||
initLocExpr(p, n.sons[i], a)
|
||||
addf(args, ", $1? ($1)->data:\"nil\"", [rdLoc(a)])
|
||||
if platform.targetOS == osGenode:
|
||||
# bypass libc and print directly to the Genode LOG session
|
||||
var args: Rope = nil
|
||||
var a: TLoc
|
||||
for i in countup(0, n.len-1):
|
||||
if n.sons[i].skipConv.kind == nkNilLit:
|
||||
add(args, ", \"nil\"")
|
||||
else:
|
||||
initLocExpr(p, n.sons[i], a)
|
||||
addf(args, ", $1? ($1)->data:\"nil\"", [rdLoc(a)])
|
||||
p.module.includeHeader("<base/log.h>")
|
||||
linefmt(p, cpsStmts, """Genode::log(""$1);$n""", args)
|
||||
else:
|
||||
p.module.includeHeader("<stdio.h>")
|
||||
linefmt(p, cpsStmts, "printf($1$2);$n",
|
||||
makeCString(repeat("%s", n.len) & tnl), args)
|
||||
linefmt(p, cpsStmts, "fflush(stdout);$n")
|
||||
if n.len == 0:
|
||||
linefmt(p, cpsStmts, "#echoBinSafe(NIM_NIL, $1);$n", n.len.rope)
|
||||
else:
|
||||
var a: TLoc
|
||||
initLocExpr(p, n, a)
|
||||
linefmt(p, cpsStmts, "#echoBinSafe($1, $2);$n", a.rdLoc, n.len.rope)
|
||||
when false:
|
||||
p.module.includeHeader("<stdio.h>")
|
||||
linefmt(p, cpsStmts, "printf($1$2);$n",
|
||||
makeCString(repeat("%s", n.len) & tnl), args)
|
||||
linefmt(p, cpsStmts, "fflush(stdout);$n")
|
||||
|
||||
proc gcUsage(n: PNode) =
|
||||
if gSelectedGC == gcNone: message(n.info, warnGcMem, n.renderTree)
|
||||
@@ -1094,7 +1106,8 @@ proc genReset(p: BProc, n: PNode) =
|
||||
var a: TLoc
|
||||
initLocExpr(p, n.sons[1], a)
|
||||
linefmt(p, cpsStmts, "#genericReset((void*)$1, $2);$n",
|
||||
addrLoc(a), genTypeInfo(p.module, skipTypes(a.t, {tyVar})))
|
||||
addrLoc(a),
|
||||
genTypeInfo(p.module, skipTypes(a.t, {tyVar}), n.info))
|
||||
|
||||
proc rawGenNew(p: BProc, a: TLoc, sizeExpr: Rope) =
|
||||
var sizeExpr = sizeExpr
|
||||
@@ -1108,7 +1121,7 @@ proc rawGenNew(p: BProc, a: TLoc, sizeExpr: Rope) =
|
||||
sizeExpr = "sizeof($1)" %
|
||||
[getTypeDesc(p.module, bt)]
|
||||
let args = [getTypeDesc(p.module, typ),
|
||||
genTypeInfo(p.module, typ),
|
||||
genTypeInfo(p.module, typ, a.lode.info),
|
||||
sizeExpr]
|
||||
if a.storage == OnHeap and usesNativeGC():
|
||||
# use newObjRC1 as an optimization
|
||||
@@ -1138,7 +1151,7 @@ proc genNew(p: BProc, e: PNode) =
|
||||
proc genNewSeqAux(p: BProc, dest: TLoc, length: Rope) =
|
||||
let seqtype = skipTypes(dest.t, abstractVarRange)
|
||||
let args = [getTypeDesc(p.module, seqtype),
|
||||
genTypeInfo(p.module, seqtype), length]
|
||||
genTypeInfo(p.module, seqtype, dest.lode.info), length]
|
||||
var call: TLoc
|
||||
initLoc(call, locExpr, dest.lode, OnHeap)
|
||||
if dest.storage == OnHeap and usesNativeGC():
|
||||
@@ -1166,7 +1179,7 @@ proc genNewSeqOfCap(p: BProc; e: PNode; d: var TLoc) =
|
||||
putIntoDest(p, d, e, ropecg(p.module,
|
||||
"($1)#nimNewSeqOfCap($2, $3)", [
|
||||
getTypeDesc(p.module, seqtype),
|
||||
genTypeInfo(p.module, seqtype), a.rdLoc]))
|
||||
genTypeInfo(p.module, seqtype, e.info), a.rdLoc]))
|
||||
gcUsage(e)
|
||||
|
||||
proc genConstExpr(p: BProc, n: PNode): Rope
|
||||
@@ -1205,7 +1218,7 @@ proc genObjConstr(p: BProc, e: PNode, d: var TLoc) =
|
||||
constructLoc(p, tmp)
|
||||
discard getTypeDesc(p.module, t)
|
||||
let ty = getUniqueType(t)
|
||||
for i in 1 .. <e.len:
|
||||
for i in 1 ..< e.len:
|
||||
let it = e.sons[i]
|
||||
var tmp2: TLoc
|
||||
tmp2.r = r
|
||||
@@ -1248,17 +1261,31 @@ proc genArrToSeq(p: BProc, n: PNode, d: var TLoc) =
|
||||
if d.k == locNone:
|
||||
getTemp(p, n.typ, d)
|
||||
# generate call to newSeq before adding the elements per hand:
|
||||
var L = int(lengthOrd(n.sons[1].typ))
|
||||
|
||||
let L = int(lengthOrd(n.sons[1].typ))
|
||||
genNewSeqAux(p, d, intLiteral(L))
|
||||
initLocExpr(p, n.sons[1], a)
|
||||
for i in countup(0, L - 1):
|
||||
# bug #5007; do not produce excessive C source code:
|
||||
if L < 10:
|
||||
for i in countup(0, L - 1):
|
||||
initLoc(elem, locExpr, lodeTyp elemType(skipTypes(n.typ, abstractInst)), OnHeap)
|
||||
elem.r = rfmt(nil, "$1->data[$2]", rdLoc(d), intLiteral(i))
|
||||
elem.storage = OnHeap # we know that sequences are on the heap
|
||||
initLoc(arr, locExpr, lodeTyp elemType(skipTypes(n.sons[1].typ, abstractInst)), a.storage)
|
||||
arr.r = rfmt(nil, "$1[$2]", rdLoc(a), intLiteral(i))
|
||||
genAssignment(p, elem, arr, {afDestIsNil, needToCopy})
|
||||
else:
|
||||
var i: TLoc
|
||||
getTemp(p, getSysType(tyInt), i)
|
||||
let oldCode = p.s(cpsStmts)
|
||||
linefmt(p, cpsStmts, "for ($1 = 0; $1 < $2; $1++) {$n", i.r, L.rope)
|
||||
initLoc(elem, locExpr, lodeTyp elemType(skipTypes(n.typ, abstractInst)), OnHeap)
|
||||
elem.r = rfmt(nil, "$1->data[$2]", rdLoc(d), intLiteral(i))
|
||||
elem.r = rfmt(nil, "$1->data[$2]", rdLoc(d), rdLoc(i))
|
||||
elem.storage = OnHeap # we know that sequences are on the heap
|
||||
initLoc(arr, locExpr, lodeTyp elemType(skipTypes(n.sons[1].typ, abstractInst)), a.storage)
|
||||
arr.r = rfmt(nil, "$1[$2]", rdLoc(a), intLiteral(i))
|
||||
arr.r = rfmt(nil, "$1[$2]", rdLoc(a), rdLoc(i))
|
||||
genAssignment(p, elem, arr, {afDestIsNil, needToCopy})
|
||||
lineF(p, cpsStmts, "}$n", [])
|
||||
|
||||
|
||||
proc genNewFinalize(p: BProc, e: PNode) =
|
||||
var
|
||||
@@ -1269,7 +1296,7 @@ proc genNewFinalize(p: BProc, e: PNode) =
|
||||
initLocExpr(p, e.sons[1], a)
|
||||
initLocExpr(p, e.sons[2], f)
|
||||
initLoc(b, locExpr, a.lode, OnHeap)
|
||||
ti = genTypeInfo(p.module, refType)
|
||||
ti = genTypeInfo(p.module, refType, e.info)
|
||||
addf(p.module.s[cfsTypeInit3], "$1->finalizer = (void*)$2;$n", [ti, rdLoc(f)])
|
||||
b.r = ropecg(p.module, "($1) #newObj($2, sizeof($3))", [
|
||||
getTypeDesc(p.module, refType),
|
||||
@@ -1279,10 +1306,10 @@ proc genNewFinalize(p: BProc, e: PNode) =
|
||||
genObjectInit(p, cpsStmts, bt, a, false)
|
||||
gcUsage(e)
|
||||
|
||||
proc genOfHelper(p: BProc; dest: PType; a: Rope): Rope =
|
||||
proc genOfHelper(p: BProc; dest: PType; a: Rope; info: TLineInfo): Rope =
|
||||
# unfortunately 'genTypeInfo' sets tfObjHasKids as a side effect, so we
|
||||
# have to call it here first:
|
||||
let ti = genTypeInfo(p.module, dest)
|
||||
let ti = genTypeInfo(p.module, dest, info)
|
||||
if tfFinal in dest.flags or (objHasKidsValid in p.module.flags and
|
||||
tfObjHasKids notin dest.flags):
|
||||
result = "$1.m_type == $2" % [a, ti]
|
||||
@@ -1295,7 +1322,7 @@ proc genOfHelper(p: BProc; dest: PType; a: Rope): Rope =
|
||||
when false:
|
||||
# former version:
|
||||
result = rfmt(p.module, "#isObj($1.m_type, $2)",
|
||||
a, genTypeInfo(p.module, dest))
|
||||
a, genTypeInfo(p.module, dest, info))
|
||||
|
||||
proc genOf(p: BProc, x: PNode, typ: PType, d: var TLoc) =
|
||||
var a: TLoc
|
||||
@@ -1317,9 +1344,9 @@ proc genOf(p: BProc, x: PNode, typ: PType, d: var TLoc) =
|
||||
globalError(x.info, errGenerated,
|
||||
"no 'of' operator available for pure objects")
|
||||
if nilCheck != nil:
|
||||
r = rfmt(p.module, "(($1) && ($2))", nilCheck, genOfHelper(p, dest, r))
|
||||
r = rfmt(p.module, "(($1) && ($2))", nilCheck, genOfHelper(p, dest, r, x.info))
|
||||
else:
|
||||
r = rfmt(p.module, "($1)", genOfHelper(p, dest, r))
|
||||
r = rfmt(p.module, "($1)", genOfHelper(p, dest, r, x.info))
|
||||
putIntoDest(p, d, x, r, a.storage)
|
||||
|
||||
proc genOf(p: BProc, n: PNode, d: var TLoc) =
|
||||
@@ -1342,12 +1369,12 @@ proc genRepr(p: BProc, e: PNode, d: var TLoc) =
|
||||
of tyEnum, tyOrdinal:
|
||||
putIntoDest(p, d, e,
|
||||
ropecg(p.module, "#reprEnum((NI)$1, $2)", [
|
||||
rdLoc(a), genTypeInfo(p.module, t)]), a.storage)
|
||||
rdLoc(a), genTypeInfo(p.module, t, e.info)]), a.storage)
|
||||
of tyString:
|
||||
putIntoDest(p, d, e, ropecg(p.module, "#reprStr($1)", [rdLoc(a)]), a.storage)
|
||||
of tySet:
|
||||
putIntoDest(p, d, e, ropecg(p.module, "#reprSet($1, $2)", [
|
||||
addrLoc(a), genTypeInfo(p.module, t)]), a.storage)
|
||||
addrLoc(a), genTypeInfo(p.module, t, e.info)]), a.storage)
|
||||
of tyOpenArray, tyVarargs:
|
||||
var b: TLoc
|
||||
case a.t.kind
|
||||
@@ -1362,22 +1389,22 @@ proc genRepr(p: BProc, e: PNode, d: var TLoc) =
|
||||
else: internalError(e.sons[0].info, "genRepr()")
|
||||
putIntoDest(p, d, e,
|
||||
ropecg(p.module, "#reprOpenArray($1, $2)", [rdLoc(b),
|
||||
genTypeInfo(p.module, elemType(t))]), a.storage)
|
||||
genTypeInfo(p.module, elemType(t), e.info)]), a.storage)
|
||||
of tyCString, tyArray, tyRef, tyPtr, tyPointer, tyNil, tySequence:
|
||||
putIntoDest(p, d, e,
|
||||
ropecg(p.module, "#reprAny($1, $2)", [
|
||||
rdLoc(a), genTypeInfo(p.module, t)]), a.storage)
|
||||
rdLoc(a), genTypeInfo(p.module, t, e.info)]), a.storage)
|
||||
of tyEmpty, tyVoid:
|
||||
localError(e.info, "'repr' doesn't support 'void' type")
|
||||
else:
|
||||
putIntoDest(p, d, e, ropecg(p.module, "#reprAny($1, $2)",
|
||||
[addrLoc(a), genTypeInfo(p.module, t)]),
|
||||
[addrLoc(a), genTypeInfo(p.module, t, e.info)]),
|
||||
a.storage)
|
||||
gcUsage(e)
|
||||
|
||||
proc genGetTypeInfo(p: BProc, e: PNode, d: var TLoc) =
|
||||
let t = e.sons[1].typ
|
||||
putIntoDest(p, d, e, genTypeInfo(p.module, t))
|
||||
putIntoDest(p, d, e, genTypeInfo(p.module, t, e.info))
|
||||
|
||||
proc genDollar(p: BProc, n: PNode, d: var TLoc, frmt: string) =
|
||||
var a: TLoc
|
||||
@@ -1959,10 +1986,10 @@ proc upConv(p: BProc, n: PNode, d: var TLoc) =
|
||||
t = skipTypes(t.sons[0], skipPtrs)
|
||||
if nilCheck != nil:
|
||||
linefmt(p, cpsStmts, "if ($1) #chckObj($2.m_type, $3);$n",
|
||||
nilCheck, r, genTypeInfo(p.module, dest))
|
||||
nilCheck, r, genTypeInfo(p.module, dest, n.info))
|
||||
else:
|
||||
linefmt(p, cpsStmts, "#chckObj($1.m_type, $2);$n",
|
||||
r, genTypeInfo(p.module, dest))
|
||||
r, genTypeInfo(p.module, dest, n.info))
|
||||
if n.sons[0].typ.kind != tyObject:
|
||||
putIntoDest(p, d, n,
|
||||
"(($1) ($2))" % [getTypeDesc(p.module, n.typ), rdLoc(a)], a.storage)
|
||||
@@ -2245,7 +2272,7 @@ proc getDefaultValue(p: BProc; typ: PType; info: TLineInfo): Rope =
|
||||
result = rope"{NIM_NIL, NIM_NIL}"
|
||||
of tyObject:
|
||||
if not isObjLackingTypeField(t) and not p.module.compileToCpp:
|
||||
result = "{{$1}}" % [genTypeInfo(p.module, t)]
|
||||
result = "{{$1}}" % [genTypeInfo(p.module, t, info)]
|
||||
else:
|
||||
result = rope"{}"
|
||||
of tyArray, tyTuple: result = rope"{}"
|
||||
@@ -2290,7 +2317,7 @@ proc getNullValueAuxT(p: BProc; orig, t: PType; obj, cons: PNode, result: var Ro
|
||||
base = skipTypes(base, skipPtrs)
|
||||
getNullValueAuxT(p, orig, base, base.n, cons, result, count)
|
||||
elif not isObjLackingTypeField(t) and not p.module.compileToCpp:
|
||||
addf(result, "$1", [genTypeInfo(p.module, orig)])
|
||||
addf(result, "$1", [genTypeInfo(p.module, orig, obj.info)])
|
||||
inc count
|
||||
getNullValueAux(p, t, obj, cons, result, count)
|
||||
# do not emit '{}' as that is not valid C:
|
||||
|
||||
@@ -20,7 +20,7 @@ proc registerGcRoot(p: BProc, v: PSym) =
|
||||
containsGarbageCollectedRef(v.loc.t):
|
||||
# we register a specialized marked proc here; this has the advantage
|
||||
# that it works out of the box for thread local storage then :-)
|
||||
let prc = genTraverseProcForGlobal(p.module, v)
|
||||
let prc = genTraverseProcForGlobal(p.module, v, v.info)
|
||||
appcg(p.module, p.module.initProc.procSec(cpsInit),
|
||||
"#nimRegisterGlobalMarker($1);$n", [prc])
|
||||
|
||||
@@ -141,9 +141,13 @@ template preserveBreakIdx(body: untyped): untyped =
|
||||
p.breakIdx = oldBreakIdx
|
||||
|
||||
proc genState(p: BProc, n: PNode) =
|
||||
internalAssert n.len == 1 and n.sons[0].kind == nkIntLit
|
||||
let idx = n.sons[0].intVal
|
||||
linefmt(p, cpsStmts, "STATE$1: ;$n", idx.rope)
|
||||
internalAssert n.len == 1
|
||||
let n0 = n[0]
|
||||
if n0.kind == nkIntLit:
|
||||
let idx = n.sons[0].intVal
|
||||
linefmt(p, cpsStmts, "STATE$1: ;$n", idx.rope)
|
||||
elif n0.kind == nkStrLit:
|
||||
linefmt(p, cpsStmts, "$1: ;$n", n0.strVal.rope)
|
||||
|
||||
proc genGotoState(p: BProc, n: PNode) =
|
||||
# we resist the temptation to translate it into duff's device as it later
|
||||
@@ -156,8 +160,13 @@ proc genGotoState(p: BProc, n: PNode) =
|
||||
lineF(p, cpsStmts, "switch ($1) {$n", [rdLoc(a)])
|
||||
p.beforeRetNeeded = true
|
||||
lineF(p, cpsStmts, "case -1: goto BeforeRet_;$n", [])
|
||||
for i in 0 .. lastOrd(n.sons[0].typ):
|
||||
lineF(p, cpsStmts, "case $1: goto STATE$1;$n", [rope(i)])
|
||||
var statesCounter = lastOrd(n.sons[0].typ)
|
||||
if n.len >= 2 and n[1].kind == nkIntLit:
|
||||
statesCounter = n[1].intVal
|
||||
let prefix = if n.len == 3 and n[2].kind == nkStrLit: n[2].strVal.rope
|
||||
else: rope"STATE"
|
||||
for i in 0 .. statesCounter:
|
||||
lineF(p, cpsStmts, "case $2: goto $1$2;$n", [prefix, rope(i)])
|
||||
lineF(p, cpsStmts, "}$n", [])
|
||||
|
||||
proc genBreakState(p: BProc, n: PNode) =
|
||||
@@ -226,7 +235,7 @@ proc genSingleVar(p: BProc, a: PNode) =
|
||||
var params: Rope
|
||||
let typ = skipTypes(value.sons[0].typ, abstractInst)
|
||||
assert(typ.kind == tyProc)
|
||||
for i in 1.. <value.len:
|
||||
for i in 1..<value.len:
|
||||
if params != nil: params.add(~", ")
|
||||
assert(sonsLen(typ) == sonsLen(typ.n))
|
||||
add(params, genOtherArg(p, value, i, typ))
|
||||
@@ -377,7 +386,7 @@ proc genReturnStmt(p: BProc, t: PNode) =
|
||||
lineF(p, cpsStmts, "goto BeforeRet_;$n", [])
|
||||
|
||||
proc genGotoForCase(p: BProc; caseStmt: PNode) =
|
||||
for i in 1 .. <caseStmt.len:
|
||||
for i in 1 ..< caseStmt.len:
|
||||
startBlock(p)
|
||||
let it = caseStmt.sons[i]
|
||||
for j in 0 .. it.len-2:
|
||||
@@ -393,7 +402,7 @@ proc genComputedGoto(p: BProc; n: PNode) =
|
||||
# first pass: Generate array of computed labels:
|
||||
var casePos = -1
|
||||
var arraySize: int
|
||||
for i in 0 .. <n.len:
|
||||
for i in 0 ..< n.len:
|
||||
let it = n.sons[i]
|
||||
if it.kind == nkCaseStmt:
|
||||
if lastSon(it).kind != nkOfBranch:
|
||||
@@ -423,7 +432,7 @@ proc genComputedGoto(p: BProc; n: PNode) =
|
||||
let oldBody = p.blocks[topBlock].sections[cpsStmts]
|
||||
p.blocks[topBlock].sections[cpsStmts] = nil
|
||||
|
||||
for j in casePos+1 .. <n.len: genStmts(p, n.sons[j])
|
||||
for j in casePos+1 ..< n.len: genStmts(p, n.sons[j])
|
||||
let tailB = p.blocks[topBlock].sections[cpsStmts]
|
||||
|
||||
p.blocks[topBlock].sections[cpsStmts] = nil
|
||||
@@ -438,7 +447,7 @@ proc genComputedGoto(p: BProc; n: PNode) =
|
||||
# first goto:
|
||||
lineF(p, cpsStmts, "goto *$#[$#];$n", [tmp, a.rdLoc])
|
||||
|
||||
for i in 1 .. <caseStmt.len:
|
||||
for i in 1 ..< caseStmt.len:
|
||||
startBlock(p)
|
||||
let it = caseStmt.sons[i]
|
||||
for j in 0 .. it.len-2:
|
||||
@@ -448,7 +457,7 @@ proc genComputedGoto(p: BProc; n: PNode) =
|
||||
let val = getOrdValue(it.sons[j])
|
||||
lineF(p, cpsStmts, "TMP$#_:$n", [intLiteral(val+id+1)])
|
||||
genStmts(p, it.lastSon)
|
||||
#for j in casePos+1 .. <n.len: genStmts(p, n.sons[j]) # tailB
|
||||
#for j in casePos+1 ..< n.len: genStmts(p, n.sons[j]) # tailB
|
||||
#for j in 0 .. casePos-1: genStmts(p, n.sons[j]) # tailA
|
||||
add(p.s(cpsStmts), tailB)
|
||||
add(p.s(cpsStmts), tailA)
|
||||
@@ -735,7 +744,7 @@ proc genOrdinalCase(p: BProc, n: PNode, d: var TLoc) =
|
||||
if splitPoint+1 < n.len:
|
||||
lineF(p, cpsStmts, "switch ($1) {$n", [rdCharLoc(a)])
|
||||
var hasDefault = false
|
||||
for i in splitPoint+1 .. < n.len:
|
||||
for i in splitPoint+1 ..< n.len:
|
||||
# bug #4230: avoid false sharing between branches:
|
||||
if d.k == locTemp and isEmptyType(n.typ): d.k = locNone
|
||||
var branch = n[i]
|
||||
@@ -826,7 +835,7 @@ proc genTryCpp(p: BProc, t: PNode, d: var TLoc) =
|
||||
if orExpr != nil: add(orExpr, "||")
|
||||
appcg(p.module, orExpr,
|
||||
"#isObj($1.exp->m_type, $2)",
|
||||
[exc, genTypeInfo(p.module, t.sons[i].sons[j].typ)])
|
||||
[exc, genTypeInfo(p.module, t[i][j].typ, t[i][j].info)])
|
||||
lineF(p, cpsStmts, "if ($1) ", [orExpr])
|
||||
startBlock(p)
|
||||
expr(p, t.sons[i].sons[blen-1], d)
|
||||
@@ -935,7 +944,7 @@ proc genTry(p: BProc, t: PNode, d: var TLoc) =
|
||||
"#isObj(#getCurrentException()->Sup.m_type, $1)"
|
||||
else: "#isObj(#getCurrentException()->m_type, $1)"
|
||||
appcg(p.module, orExpr, isObjFormat,
|
||||
[genTypeInfo(p.module, t.sons[i].sons[j].typ)])
|
||||
[genTypeInfo(p.module, t[i][j].typ, t[i][j].info)])
|
||||
if i > 1: line(p, cpsStmts, "else ")
|
||||
startBlock(p, "if ($1) {$n", [orExpr])
|
||||
linefmt(p, cpsStmts, "$1.status = 0;$n", safePoint)
|
||||
@@ -1053,7 +1062,7 @@ proc genWatchpoint(p: BProc, n: PNode) =
|
||||
let typ = skipTypes(n.sons[1].typ, abstractVarRange)
|
||||
lineCg(p, cpsStmts, "#dbgRegisterWatchpoint($1, (NCSTRING)$2, $3);$n",
|
||||
[a.addrLoc, makeCString(renderTree(n.sons[1])),
|
||||
genTypeInfo(p.module, typ)])
|
||||
genTypeInfo(p.module, typ, n.info)])
|
||||
|
||||
proc genPragma(p: BProc, n: PNode) =
|
||||
for i in countup(0, sonsLen(n) - 1):
|
||||
@@ -1083,7 +1092,7 @@ proc genDiscriminantCheck(p: BProc, a, tmp: TLoc, objtype: PType,
|
||||
field: PSym) =
|
||||
var t = skipTypes(objtype, abstractVar)
|
||||
assert t.kind == tyObject
|
||||
discard genTypeInfo(p.module, t)
|
||||
discard genTypeInfo(p.module, t, a.lode.info)
|
||||
var L = lengthOrd(field.typ)
|
||||
if not containsOrIncl(p.module.declaredThings, field.id):
|
||||
appcg(p.module, cfsVars, "extern $1",
|
||||
@@ -1103,19 +1112,46 @@ proc asgnFieldDiscriminant(p: BProc, e: PNode) =
|
||||
genDiscriminantCheck(p, a, tmp, dotExpr.sons[0].typ, dotExpr.sons[1].sym)
|
||||
genAssignment(p, a, tmp, {})
|
||||
|
||||
proc patchAsgnStmtListExpr(father, orig, n: PNode) =
|
||||
case n.kind
|
||||
of nkDerefExpr, nkHiddenDeref:
|
||||
let asgn = copyNode(orig)
|
||||
asgn.add orig[0]
|
||||
asgn.add n
|
||||
father.add asgn
|
||||
of nkStmtList, nkStmtListExpr:
|
||||
for x in n:
|
||||
patchAsgnStmtListExpr(father, orig, x)
|
||||
else:
|
||||
father.add n
|
||||
|
||||
proc genAsgn(p: BProc, e: PNode, fastAsgn: bool) =
|
||||
if e.sons[0].kind == nkSym and sfGoto in e.sons[0].sym.flags:
|
||||
genLineDir(p, e)
|
||||
genGotoVar(p, e.sons[1])
|
||||
elif not fieldDiscriminantCheckNeeded(p, e):
|
||||
# this fixes bug #6422 but we really need to change the representation of
|
||||
# arrays in the backend...
|
||||
let le = e[0]
|
||||
let ri = e[1]
|
||||
var needsRepair = false
|
||||
var it = ri
|
||||
while it.kind in {nkStmtList, nkStmtListExpr}:
|
||||
it = it.lastSon
|
||||
needsRepair = true
|
||||
if it.kind in {nkDerefExpr, nkHiddenDeref} and needsRepair:
|
||||
var patchedTree = newNodeI(nkStmtList, e.info)
|
||||
patchAsgnStmtListExpr(patchedTree, e, ri)
|
||||
genStmts(p, patchedTree)
|
||||
return
|
||||
|
||||
var a: TLoc
|
||||
if e[0].kind in {nkDerefExpr, nkHiddenDeref}:
|
||||
genDeref(p, e[0], a, enforceDeref=true)
|
||||
if le.kind in {nkDerefExpr, nkHiddenDeref}:
|
||||
genDeref(p, le, a, enforceDeref=true)
|
||||
else:
|
||||
initLocExpr(p, e.sons[0], a)
|
||||
initLocExpr(p, le, a)
|
||||
if fastAsgn: incl(a.flags, lfNoDeepCopy)
|
||||
assert(a.t != nil)
|
||||
let ri = e.sons[1]
|
||||
genLineDir(p, ri)
|
||||
loadInto(p, e.sons[0], ri, a)
|
||||
else:
|
||||
|
||||
@@ -66,7 +66,7 @@ proc genTraverseProc(c: var TTraversalClosure, accessor: Rope, typ: PType) =
|
||||
|
||||
var p = c.p
|
||||
case typ.kind
|
||||
of tyGenericInst, tyGenericBody, tyTypeDesc, tyAlias, tyDistinct:
|
||||
of tyGenericInst, tyGenericBody, tyTypeDesc, tyAlias, tyDistinct, tyInferred:
|
||||
genTraverseProc(c, accessor, lastSon(typ))
|
||||
of tyArray:
|
||||
let arraySize = lengthOrd(typ.sons[0])
|
||||
@@ -151,8 +151,8 @@ proc genTraverseProc(m: BModule, origTyp: PType; sig: SigHash;
|
||||
m.s[cfsProcHeaders].addf("$1;$n", [header])
|
||||
m.s[cfsProcs].add(generatedProc)
|
||||
|
||||
proc genTraverseProcForGlobal(m: BModule, s: PSym): Rope =
|
||||
discard genTypeInfo(m, s.loc.t)
|
||||
proc genTraverseProcForGlobal(m: BModule, s: PSym; info: TLineInfo): Rope =
|
||||
discard genTypeInfo(m, s.loc.t, info)
|
||||
|
||||
var c: TTraversalClosure
|
||||
var p = newProc(nil, m)
|
||||
|
||||
@@ -92,7 +92,7 @@ proc mangleParamName(m: BModule; s: PSym): Rope =
|
||||
|
||||
proc mangleLocalName(p: BProc; s: PSym): Rope =
|
||||
assert s.kind in skLocalVars+{skTemp}
|
||||
assert sfGlobal notin s.flags
|
||||
#assert sfGlobal notin s.flags
|
||||
result = s.loc.r
|
||||
if result == nil:
|
||||
var key = s.name.s.mangle
|
||||
@@ -119,7 +119,7 @@ proc scopeMangledParam(p: BProc; param: PSym) =
|
||||
|
||||
const
|
||||
irrelevantForBackend = {tyGenericBody, tyGenericInst, tyGenericInvocation,
|
||||
tyDistinct, tyRange, tyStatic, tyAlias}
|
||||
tyDistinct, tyRange, tyStatic, tyAlias, tyInferred}
|
||||
|
||||
proc typeName(typ: PType): Rope =
|
||||
let typ = typ.skipTypes(irrelevantForBackend)
|
||||
@@ -278,7 +278,10 @@ proc ccgIntroducedPtr(s: PSym): bool =
|
||||
elif tfByCopy in pt.flags: return false
|
||||
case pt.kind
|
||||
of tyObject:
|
||||
if (optByRef in s.options) or (getSize(pt) > platform.floatSize * 2):
|
||||
if s.typ.sym != nil and sfForward in s.typ.sym.flags:
|
||||
# forwarded objects are *always* passed by pointers for consistency!
|
||||
result = true
|
||||
elif (optByRef in s.options) or (getSize(pt) > platform.floatSize * 3):
|
||||
result = true # requested anyway
|
||||
elif (tfFinal in pt.flags) and (pt.sons[0] == nil):
|
||||
result = false # no need, because no subtyping possible
|
||||
@@ -286,7 +289,7 @@ proc ccgIntroducedPtr(s: PSym): bool =
|
||||
result = true # ordinary objects are always passed by reference,
|
||||
# otherwise casting doesn't work
|
||||
of tyTuple:
|
||||
result = (getSize(pt) > platform.floatSize*2) or (optByRef in s.options)
|
||||
result = (getSize(pt) > platform.floatSize*3) or (optByRef in s.options)
|
||||
else: result = false
|
||||
|
||||
proc fillResult(param: PNode) =
|
||||
@@ -806,7 +809,7 @@ proc getTypeDescAux(m: BModule, origTyp: PType, check: var IntSet): Rope =
|
||||
var chunkStart = 0
|
||||
while i < cppName.data.len:
|
||||
if cppName.data[i] == '\'':
|
||||
var chunkEnd = <i
|
||||
var chunkEnd = i-1
|
||||
var idx, stars: int
|
||||
if scanCppGenericSlot(cppName.data, i, idx, stars):
|
||||
result.add cppName.data.substr(chunkStart, chunkEnd)
|
||||
@@ -854,11 +857,12 @@ proc getTypeDescAux(m: BModule, origTyp: PType, check: var IntSet): Rope =
|
||||
[structOrUnion(t), result])
|
||||
assert m.forwTypeCache[sig] == result
|
||||
m.typeCache[sig] = result # always call for sideeffects:
|
||||
let recdesc = if t.kind != tyTuple: getRecordDesc(m, t, result, check)
|
||||
else: getTupleDesc(m, t, result, check)
|
||||
if not isImportedType(t):
|
||||
add(m.s[cfsTypes], recdesc)
|
||||
elif tfIncompleteStruct notin t.flags: addAbiCheck(m, t, result)
|
||||
if not incompleteType(t):
|
||||
let recdesc = if t.kind != tyTuple: getRecordDesc(m, t, result, check)
|
||||
else: getTupleDesc(m, t, result, check)
|
||||
if not isImportedType(t):
|
||||
add(m.s[cfsTypes], recdesc)
|
||||
elif tfIncompleteStruct notin t.flags: addAbiCheck(m, t, result)
|
||||
of tySet:
|
||||
result = $t.kind & '_' & getTypeName(m, t.lastSon, hashType t.lastSon)
|
||||
m.typeCache[sig] = result
|
||||
@@ -935,12 +939,13 @@ proc genProcHeader(m: BModule, prc: PSym): Rope =
|
||||
|
||||
# ------------------ type info generation -------------------------------------
|
||||
|
||||
proc genTypeInfo(m: BModule, t: PType): Rope
|
||||
proc genTypeInfo(m: BModule, t: PType; info: TLineInfo): Rope
|
||||
proc getNimNode(m: BModule): Rope =
|
||||
result = "$1[$2]" % [m.typeNodesName, rope(m.typeNodes)]
|
||||
inc(m.typeNodes)
|
||||
|
||||
proc genTypeInfoAuxBase(m: BModule; typ, origType: PType; name, base: Rope) =
|
||||
proc genTypeInfoAuxBase(m: BModule; typ, origType: PType;
|
||||
name, base: Rope; info: TLineInfo) =
|
||||
var nimtypeKind: int
|
||||
#allocMemTI(m, typ, name)
|
||||
if isObjLackingTypeField(typ):
|
||||
@@ -970,15 +975,19 @@ proc genTypeInfoAuxBase(m: BModule; typ, origType: PType; name, base: Rope) =
|
||||
[name])
|
||||
addf(m.s[cfsVars], "TNimType $1;$n", [name])
|
||||
|
||||
proc genTypeInfoAux(m: BModule, typ, origType: PType, name: Rope) =
|
||||
proc genTypeInfoAux(m: BModule, typ, origType: PType, name: Rope;
|
||||
info: TLineInfo) =
|
||||
var base: Rope
|
||||
if sonsLen(typ) > 0 and typ.lastSon != nil:
|
||||
var x = typ.lastSon
|
||||
if typ.kind == tyObject: x = x.skipTypes(skipPtrs)
|
||||
base = genTypeInfo(m, x)
|
||||
if typ.kind == tyPtr and x.kind == tyObject and incompleteType(x):
|
||||
base = rope("0")
|
||||
else:
|
||||
base = genTypeInfo(m, x, info)
|
||||
else:
|
||||
base = rope("0")
|
||||
genTypeInfoAuxBase(m, typ, origType, name, base)
|
||||
genTypeInfoAuxBase(m, typ, origType, name, base, info)
|
||||
|
||||
proc discriminatorTableName(m: BModule, objtype: PType, d: PSym): Rope =
|
||||
# bugfix: we need to search the type that contains the discriminator:
|
||||
@@ -994,19 +1003,20 @@ proc discriminatorTableDecl(m: BModule, objtype: PType, d: PSym): Rope =
|
||||
var tmp = discriminatorTableName(m, objtype, d)
|
||||
result = "TNimNode* $1[$2];$n" % [tmp, rope(lengthOrd(d.typ)+1)]
|
||||
|
||||
proc genObjectFields(m: BModule, typ, origType: PType, n: PNode, expr: Rope) =
|
||||
proc genObjectFields(m: BModule, typ, origType: PType, n: PNode, expr: Rope;
|
||||
info: TLineInfo) =
|
||||
case n.kind
|
||||
of nkRecList:
|
||||
var L = sonsLen(n)
|
||||
if L == 1:
|
||||
genObjectFields(m, typ, origType, n.sons[0], expr)
|
||||
genObjectFields(m, typ, origType, n.sons[0], expr, info)
|
||||
elif L > 0:
|
||||
var tmp = getTempName(m)
|
||||
addf(m.s[cfsTypeInit1], "static TNimNode* $1[$2];$n", [tmp, rope(L)])
|
||||
for i in countup(0, L-1):
|
||||
var tmp2 = getNimNode(m)
|
||||
addf(m.s[cfsTypeInit3], "$1[$2] = &$3;$n", [tmp, rope(i), tmp2])
|
||||
genObjectFields(m, typ, origType, n.sons[i], tmp2)
|
||||
genObjectFields(m, typ, origType, n.sons[i], tmp2, info)
|
||||
addf(m.s[cfsTypeInit3], "$1.len = $2; $1.kind = 2; $1.sons = &$3[0];$n",
|
||||
[expr, rope(L), tmp])
|
||||
else:
|
||||
@@ -1024,14 +1034,14 @@ proc genObjectFields(m: BModule, typ, origType: PType, n: PNode, expr: Rope) =
|
||||
"$1.offset = offsetof($2, $3);$n" & "$1.typ = $4;$n" &
|
||||
"$1.name = $5;$n" & "$1.sons = &$6[0];$n" &
|
||||
"$1.len = $7;$n", [expr, getTypeDesc(m, origType), field.loc.r,
|
||||
genTypeInfo(m, field.typ),
|
||||
genTypeInfo(m, field.typ, info),
|
||||
makeCString(field.name.s),
|
||||
tmp, rope(L)])
|
||||
addf(m.s[cfsData], "TNimNode* $1[$2];$n", [tmp, rope(L+1)])
|
||||
for i in countup(1, sonsLen(n)-1):
|
||||
var b = n.sons[i] # branch
|
||||
var tmp2 = getNimNode(m)
|
||||
genObjectFields(m, typ, origType, lastSon(b), tmp2)
|
||||
genObjectFields(m, typ, origType, lastSon(b), tmp2, info)
|
||||
case b.kind
|
||||
of nkOfBranch:
|
||||
if sonsLen(b) < 2:
|
||||
@@ -1059,15 +1069,20 @@ proc genObjectFields(m: BModule, typ, origType: PType, n: PNode, expr: Rope) =
|
||||
addf(m.s[cfsTypeInit3], "$1.kind = 1;$n" &
|
||||
"$1.offset = offsetof($2, $3);$n" & "$1.typ = $4;$n" &
|
||||
"$1.name = $5;$n", [expr, getTypeDesc(m, origType),
|
||||
field.loc.r, genTypeInfo(m, field.typ), makeCString(field.name.s)])
|
||||
field.loc.r, genTypeInfo(m, field.typ, info), makeCString(field.name.s)])
|
||||
else: internalError(n.info, "genObjectFields")
|
||||
|
||||
proc genObjectInfo(m: BModule, typ, origType: PType, name: Rope) =
|
||||
if typ.kind == tyObject: genTypeInfoAux(m, typ, origType, name)
|
||||
else: genTypeInfoAuxBase(m, typ, origType, name, rope("0"))
|
||||
proc genObjectInfo(m: BModule, typ, origType: PType, name: Rope; info: TLineInfo) =
|
||||
if typ.kind == tyObject:
|
||||
if incompleteType(typ):
|
||||
localError(info, "request for RTTI generation for incomplete object: " &
|
||||
typeToString(typ))
|
||||
genTypeInfoAux(m, typ, origType, name, info)
|
||||
else:
|
||||
genTypeInfoAuxBase(m, typ, origType, name, rope("0"), info)
|
||||
var tmp = getNimNode(m)
|
||||
if not isImportedType(typ):
|
||||
genObjectFields(m, typ, origType, typ.n, tmp)
|
||||
genObjectFields(m, typ, origType, typ.n, tmp, info)
|
||||
addf(m.s[cfsTypeInit3], "$1.node = &$2;$n", [name, tmp])
|
||||
var t = typ.sons[0]
|
||||
while t != nil:
|
||||
@@ -1075,8 +1090,8 @@ proc genObjectInfo(m: BModule, typ, origType: PType, name: Rope) =
|
||||
t.flags.incl tfObjHasKids
|
||||
t = t.sons[0]
|
||||
|
||||
proc genTupleInfo(m: BModule, typ, origType: PType, name: Rope) =
|
||||
genTypeInfoAuxBase(m, typ, typ, name, rope("0"))
|
||||
proc genTupleInfo(m: BModule, typ, origType: PType, name: Rope; info: TLineInfo) =
|
||||
genTypeInfoAuxBase(m, typ, typ, name, rope("0"), info)
|
||||
var expr = getNimNode(m)
|
||||
var length = sonsLen(typ)
|
||||
if length > 0:
|
||||
@@ -1090,7 +1105,7 @@ proc genTupleInfo(m: BModule, typ, origType: PType, name: Rope) =
|
||||
"$1.offset = offsetof($2, Field$3);$n" &
|
||||
"$1.typ = $4;$n" &
|
||||
"$1.name = \"Field$3\";$n",
|
||||
[tmp2, getTypeDesc(m, origType), rope(i), genTypeInfo(m, a)])
|
||||
[tmp2, getTypeDesc(m, origType), rope(i), genTypeInfo(m, a, info)])
|
||||
addf(m.s[cfsTypeInit3], "$1.len = $2; $1.kind = 2; $1.sons = &$3[0];$n",
|
||||
[expr, rope(length), tmp])
|
||||
else:
|
||||
@@ -1098,12 +1113,12 @@ proc genTupleInfo(m: BModule, typ, origType: PType, name: Rope) =
|
||||
[expr, rope(length)])
|
||||
addf(m.s[cfsTypeInit3], "$1.node = &$2;$n", [name, expr])
|
||||
|
||||
proc genEnumInfo(m: BModule, typ: PType, name: Rope) =
|
||||
proc genEnumInfo(m: BModule, typ: PType, name: Rope; info: TLineInfo) =
|
||||
# Type information for enumerations is quite heavy, so we do some
|
||||
# optimizations here: The ``typ`` field is never set, as it is redundant
|
||||
# anyway. We generate a cstring array and a loop over it. Exceptional
|
||||
# positions will be reset after the loop.
|
||||
genTypeInfoAux(m, typ, typ, name)
|
||||
genTypeInfoAux(m, typ, typ, name, info)
|
||||
var nodePtrs = getTempName(m)
|
||||
var length = sonsLen(typ.n)
|
||||
addf(m.s[cfsTypeInit1], "static TNimNode* $1[$2];$n",
|
||||
@@ -1141,15 +1156,15 @@ proc genEnumInfo(m: BModule, typ: PType, name: Rope) =
|
||||
# 1 << 2 is {ntfEnumHole}
|
||||
addf(m.s[cfsTypeInit3], "$1.flags = 1<<2;$n", [name])
|
||||
|
||||
proc genSetInfo(m: BModule, typ: PType, name: Rope) =
|
||||
proc genSetInfo(m: BModule, typ: PType, name: Rope; info: TLineInfo) =
|
||||
assert(typ.sons[0] != nil)
|
||||
genTypeInfoAux(m, typ, typ, name)
|
||||
genTypeInfoAux(m, typ, typ, name, info)
|
||||
var tmp = getNimNode(m)
|
||||
addf(m.s[cfsTypeInit3], "$1.len = $2; $1.kind = 0;$n" & "$3.node = &$1;$n",
|
||||
[tmp, rope(firstOrd(typ)), name])
|
||||
|
||||
proc genArrayInfo(m: BModule, typ: PType, name: Rope) =
|
||||
genTypeInfoAuxBase(m, typ, typ, name, genTypeInfo(m, typ.sons[1]))
|
||||
proc genArrayInfo(m: BModule, typ: PType, name: Rope; info: TLineInfo) =
|
||||
genTypeInfoAuxBase(m, typ, typ, name, genTypeInfo(m, typ.sons[1], info), info)
|
||||
|
||||
proc fakeClosureType(owner: PSym): PType =
|
||||
# we generate the same RTTI as for a tuple[pointer, ref tuple[]]
|
||||
@@ -1171,11 +1186,11 @@ proc genDeepCopyProc(m: BModule; s: PSym; result: Rope) =
|
||||
addf(m.s[cfsTypeInit3], "$1.deepcopy =(void* (N_RAW_NIMCALL*)(void*))$2;$n",
|
||||
[result, s.loc.r])
|
||||
|
||||
proc genTypeInfo(m: BModule, t: PType): Rope =
|
||||
proc genTypeInfo(m: BModule, t: PType; info: TLineInfo): Rope =
|
||||
let origType = t
|
||||
var t = skipTypes(origType, irrelevantForBackend + tyUserTypeClasses)
|
||||
if t.kind == tyOpt:
|
||||
return genTypeInfo(m, optLowering(t))
|
||||
return genTypeInfo(m, optLowering(t), info)
|
||||
|
||||
let sig = hashType(origType)
|
||||
result = m.typeInfoMarker.getOrDefault(sig)
|
||||
@@ -1197,7 +1212,7 @@ proc genTypeInfo(m: BModule, t: PType): Rope =
|
||||
let owner = t.skipTypes(typedescPtrs).owner.getModule
|
||||
if owner != m.module:
|
||||
# make sure the type info is created in the owner module
|
||||
discard genTypeInfo(m.g.modules[owner.position], origType)
|
||||
discard genTypeInfo(m.g.modules[owner.position], origType, info)
|
||||
# reference the type info as extern here
|
||||
discard cgsym(m, "TNimType")
|
||||
discard cgsym(m, "TNimNode")
|
||||
@@ -1208,35 +1223,35 @@ proc genTypeInfo(m: BModule, t: PType): Rope =
|
||||
case t.kind
|
||||
of tyEmpty, tyVoid: result = rope"0"
|
||||
of tyPointer, tyBool, tyChar, tyCString, tyString, tyInt..tyUInt64, tyVar:
|
||||
genTypeInfoAuxBase(m, t, t, result, rope"0")
|
||||
genTypeInfoAuxBase(m, t, t, result, rope"0", info)
|
||||
of tyStatic:
|
||||
if t.n != nil: result = genTypeInfo(m, lastSon t)
|
||||
if t.n != nil: result = genTypeInfo(m, lastSon t, info)
|
||||
else: internalError("genTypeInfo(" & $t.kind & ')')
|
||||
of tyUserTypeClasses:
|
||||
internalAssert t.isResolvedUserTypeClass
|
||||
return genTypeInfo(m, t.lastSon)
|
||||
return genTypeInfo(m, t.lastSon, info)
|
||||
of tyProc:
|
||||
if t.callConv != ccClosure:
|
||||
genTypeInfoAuxBase(m, t, t, result, rope"0")
|
||||
genTypeInfoAuxBase(m, t, t, result, rope"0", info)
|
||||
else:
|
||||
let x = fakeClosureType(t.owner)
|
||||
genTupleInfo(m, x, x, result)
|
||||
genTupleInfo(m, x, x, result, info)
|
||||
of tySequence, tyRef, tyOptAsRef:
|
||||
genTypeInfoAux(m, t, t, result)
|
||||
genTypeInfoAux(m, t, t, result, info)
|
||||
if gSelectedGC >= gcMarkAndSweep:
|
||||
let markerProc = genTraverseProc(m, origType, sig, tiNew)
|
||||
addf(m.s[cfsTypeInit3], "$1.marker = $2;$n", [result, markerProc])
|
||||
of tyPtr, tyRange: genTypeInfoAux(m, t, t, result)
|
||||
of tyArray: genArrayInfo(m, t, result)
|
||||
of tySet: genSetInfo(m, t, result)
|
||||
of tyEnum: genEnumInfo(m, t, result)
|
||||
of tyObject: genObjectInfo(m, t, origType, result)
|
||||
of tyPtr, tyRange: genTypeInfoAux(m, t, t, result, info)
|
||||
of tyArray: genArrayInfo(m, t, result, info)
|
||||
of tySet: genSetInfo(m, t, result, info)
|
||||
of tyEnum: genEnumInfo(m, t, result, info)
|
||||
of tyObject: genObjectInfo(m, t, origType, result, info)
|
||||
of tyTuple:
|
||||
# if t.n != nil: genObjectInfo(m, t, result)
|
||||
# else:
|
||||
# BUGFIX: use consistently RTTI without proper field names; otherwise
|
||||
# results are not deterministic!
|
||||
genTupleInfo(m, t, origType, result)
|
||||
genTupleInfo(m, t, origType, result, info)
|
||||
else: internalError("genTypeInfo(" & $t.kind & ')')
|
||||
if t.deepCopy != nil:
|
||||
genDeepCopyProc(m, t.deepCopy, result)
|
||||
|
||||
@@ -16,11 +16,11 @@ import
|
||||
proc getPragmaStmt*(n: PNode, w: TSpecialWord): PNode =
|
||||
case n.kind
|
||||
of nkStmtList:
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
result = getPragmaStmt(n[i], w)
|
||||
if result != nil: break
|
||||
of nkPragma:
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
if whichPragma(n[i]) == w: return n[i]
|
||||
else: discard
|
||||
|
||||
|
||||
@@ -271,11 +271,11 @@ proc genObjectInit(p: BProc, section: TCProcSection, t: PType, a: TLoc,
|
||||
while (s.kind == tyObject) and (s.sons[0] != nil):
|
||||
add(r, ".Sup")
|
||||
s = skipTypes(s.sons[0], skipPtrs)
|
||||
linefmt(p, section, "$1.m_type = $2;$n", r, genTypeInfo(p.module, t))
|
||||
linefmt(p, section, "$1.m_type = $2;$n", r, genTypeInfo(p.module, t, a.lode.info))
|
||||
of frEmbedded:
|
||||
# worst case for performance:
|
||||
var r = if takeAddr: addrLoc(a) else: rdLoc(a)
|
||||
linefmt(p, section, "#objectInit($1, $2);$n", r, genTypeInfo(p.module, t))
|
||||
linefmt(p, section, "#objectInit($1, $2);$n", r, genTypeInfo(p.module, t, a.lode.info))
|
||||
|
||||
type
|
||||
TAssignmentFlag = enum
|
||||
@@ -306,7 +306,7 @@ proc resetLoc(p: BProc, loc: var TLoc) =
|
||||
linefmt(p, cpsStmts, "#chckNil((void*)$1);$n", addrLoc(loc))
|
||||
if loc.storage != OnStack:
|
||||
linefmt(p, cpsStmts, "#genericReset((void*)$1, $2);$n",
|
||||
addrLoc(loc), genTypeInfo(p.module, loc.t))
|
||||
addrLoc(loc), genTypeInfo(p.module, loc.t, loc.lode.info))
|
||||
# XXX: generated reset procs should not touch the m_type
|
||||
# field, so disabling this should be safe:
|
||||
genObjectInit(p, cpsStmts, loc.t, loc, true)
|
||||
@@ -381,7 +381,7 @@ proc localDebugInfo(p: BProc, s: PSym) =
|
||||
lineF(p, cpsInit,
|
||||
"FR_.s[$1].address = (void*)$3; FR_.s[$1].typ = $4; FR_.s[$1].name = $2;$n",
|
||||
[p.maxFrameLen.rope, makeCString(normalize(s.name.s)), a,
|
||||
genTypeInfo(p.module, s.loc.t)])
|
||||
genTypeInfo(p.module, s.loc.t, s.info)])
|
||||
inc(p.maxFrameLen)
|
||||
inc p.blocks[p.blocks.len-1].frameLen
|
||||
|
||||
@@ -451,7 +451,7 @@ proc assignGlobalVar(p: BProc, n: PNode) =
|
||||
appcg(p.module, p.module.s[cfsDebugInit],
|
||||
"#dbgRegisterGlobal($1, &$2, $3);$n",
|
||||
[makeCString(normalize(s.owner.name.s & '.' & s.name.s)),
|
||||
s.loc.r, genTypeInfo(p.module, s.typ)])
|
||||
s.loc.r, genTypeInfo(p.module, s.typ, n.info)])
|
||||
|
||||
proc assignParam(p: BProc, s: PSym) =
|
||||
assert(s.loc.r != nil)
|
||||
@@ -663,6 +663,13 @@ proc closureSetup(p: BProc, prc: PSym) =
|
||||
linefmt(p, cpsStmts, "$1 = ($2) ClE_0;$n",
|
||||
rdLoc(env.loc), getTypeDesc(p.module, env.typ))
|
||||
|
||||
proc containsResult(n: PNode): bool =
|
||||
if n.kind == nkSym and n.sym.kind == skResult:
|
||||
result = true
|
||||
else:
|
||||
for i in 0..<n.safeLen:
|
||||
if containsResult(n[i]): return true
|
||||
|
||||
proc easyResultAsgn(n: PNode): PNode =
|
||||
const harmless = {nkConstSection, nkTypeSection, nkEmpty, nkCommentStmt} +
|
||||
declarativeDefs
|
||||
@@ -672,7 +679,7 @@ proc easyResultAsgn(n: PNode): PNode =
|
||||
while i < n.len and n[i].kind in harmless: inc i
|
||||
if i < n.len: result = easyResultAsgn(n[i])
|
||||
of nkAsgn, nkFastAsgn:
|
||||
if n[0].kind == nkSym and skResult == n[0].sym.kind:
|
||||
if n[0].kind == nkSym and n[0].sym.kind == skResult and not containsResult(n[1]):
|
||||
incl n.flags, nfPreventCg
|
||||
return n[1]
|
||||
of nkReturnStmt:
|
||||
@@ -913,7 +920,7 @@ proc getFileHeader(cfile: Cfile): Rope =
|
||||
proc genFilenames(m: BModule): Rope =
|
||||
discard cgsym(m, "dbgRegisterFilename")
|
||||
result = nil
|
||||
for i in 0.. <fileInfos.len:
|
||||
for i in 0..<fileInfos.len:
|
||||
result.addf("dbgRegisterFilename($1);$N", [fileInfos[i].projPath.makeCString])
|
||||
|
||||
proc genMainProc(m: BModule) =
|
||||
@@ -1006,10 +1013,9 @@ proc genMainProc(m: BModule) =
|
||||
ComponentConstruct =
|
||||
"void Libc::Component::construct(Libc::Env &env) {$N" &
|
||||
"\tgenodeEnv = &env;$N" &
|
||||
"\tLibc::with_libc([&] () {$n\t" &
|
||||
"\tLibc::with_libc([&] () {$N\t" &
|
||||
MainProcs &
|
||||
"\t});$N" &
|
||||
"\tenv.parent().exit(0);$N" &
|
||||
"}$N$N"
|
||||
|
||||
var nimMain, otherMain: FormatStr
|
||||
|
||||
@@ -154,7 +154,7 @@ proc includeHeader*(this: BModule; header: string) =
|
||||
|
||||
proc s*(p: BProc, s: TCProcSection): var Rope {.inline.} =
|
||||
# section in the current block
|
||||
result = p.blocks[^1].sections[s]
|
||||
result = p.blocks[p.blocks.len-1].sections[s]
|
||||
|
||||
proc procSec*(p: BProc, s: TCProcSection): var Rope {.inline.} =
|
||||
# top level proc sections
|
||||
|
||||
@@ -81,15 +81,17 @@ proc writeVersionInfo(pass: TCmdLinePass) =
|
||||
if pass == passCmd1:
|
||||
msgWriteln(`%`(HelpMessage, [VersionAsString,
|
||||
platform.OS[platform.hostOS].name,
|
||||
CPU[platform.hostCPU].name]))
|
||||
CPU[platform.hostCPU].name]),
|
||||
{msgStdout})
|
||||
|
||||
const gitHash = gorge("git log -n 1 --format=%H").strip
|
||||
when gitHash.len == 40:
|
||||
msgWriteln("git hash: " & gitHash)
|
||||
msgWriteln("git hash: " & gitHash, {msgStdout})
|
||||
|
||||
msgWriteln("active boot switches:" & usedRelease & usedAvoidTimeMachine &
|
||||
usedTinyC & usedGnuReadline & usedNativeStacktrace & usedNoCaas &
|
||||
usedFFI & usedBoehm & usedMarkAndSweep & usedGenerational & usedGoGC & usedNoGC)
|
||||
usedFFI & usedBoehm & usedMarkAndSweep & usedGenerational & usedGoGC & usedNoGC,
|
||||
{msgStdout})
|
||||
msgQuit(0)
|
||||
|
||||
var
|
||||
@@ -249,6 +251,7 @@ proc testCompileOption*(switch: string, info: TLineInfo): bool =
|
||||
result = gOptions * {optNaNCheck, optInfCheck} == {optNaNCheck, optInfCheck}
|
||||
of "infchecks": result = contains(gOptions, optInfCheck)
|
||||
of "nanchecks": result = contains(gOptions, optNaNCheck)
|
||||
of "nilchecks": result = contains(gOptions, optNilCheck)
|
||||
of "objchecks": result = contains(gOptions, optObjCheck)
|
||||
of "fieldchecks": result = contains(gOptions, optFieldCheck)
|
||||
of "rangechecks": result = contains(gOptions, optRangeCheck)
|
||||
@@ -340,7 +343,9 @@ proc processSwitch(switch, arg: string, pass: TCmdLinePass, info: TLineInfo;
|
||||
# keep the old name for compat
|
||||
if pass in {passCmd2, passPP} and not options.gNoNimblePath:
|
||||
expectArg(switch, arg, pass, info)
|
||||
let path = processPath(arg, info, notRelativeToProj=true)
|
||||
var path = processPath(arg, info, notRelativeToProj=true)
|
||||
let nimbleDir = getEnv("NIMBLE_DIR")
|
||||
if nimbleDir.len > 0 and pass == passPP: path = nimbleDir / "pkgs"
|
||||
nimblePath(path, info)
|
||||
of "nonimblepath", "nobabelpath":
|
||||
expectNoArg(switch, arg, pass, info)
|
||||
@@ -471,6 +476,7 @@ proc processSwitch(switch, arg: string, pass: TCmdLinePass, info: TLineInfo;
|
||||
processOnOffSwitch({optNaNCheck, optInfCheck}, arg, pass, info)
|
||||
of "infchecks": processOnOffSwitch({optInfCheck}, arg, pass, info)
|
||||
of "nanchecks": processOnOffSwitch({optNaNCheck}, arg, pass, info)
|
||||
of "nilchecks": processOnOffSwitch({optNilCheck}, arg, pass, info)
|
||||
of "objchecks": processOnOffSwitch({optObjCheck}, arg, pass, info)
|
||||
of "fieldchecks": processOnOffSwitch({optFieldCheck}, arg, pass, info)
|
||||
of "rangechecks": processOnOffSwitch({optRangeCheck}, arg, pass, info)
|
||||
@@ -662,6 +668,10 @@ proc processSwitch(switch, arg: string, pass: TCmdLinePass, info: TLineInfo;
|
||||
expectArg(switch, arg, pass, info)
|
||||
if config != nil:
|
||||
config.cppDefine(arg)
|
||||
of "newruntime":
|
||||
expectNoArg(switch, arg, pass, info)
|
||||
newDestructors = true
|
||||
defineSymbol("nimNewRuntime")
|
||||
else:
|
||||
if strutils.find(switch, '.') >= 0: options.setConfigVar(switch, arg)
|
||||
else: invalidCmdLineOption(pass, switch, info)
|
||||
|
||||
@@ -108,3 +108,4 @@ proc initDefines*() =
|
||||
defineSymbol("nimHasCppDefine")
|
||||
defineSymbol("nimGenericInOutFlags")
|
||||
when false: defineSymbol("nimHasOpt")
|
||||
defineSymbol("nimNoArrayToCstringConversion")
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
# This module implements a dependency file generator.
|
||||
|
||||
import
|
||||
os, options, ast, astalgo, msgs, ropes, idents, passes, importer
|
||||
os, options, ast, astalgo, msgs, ropes, idents, passes, modulepaths
|
||||
|
||||
from modulegraphs import ModuleGraph
|
||||
|
||||
|
||||
318
compiler/destroyer.nim
Normal file
318
compiler/destroyer.nim
Normal file
@@ -0,0 +1,318 @@
|
||||
#
|
||||
#
|
||||
# The Nim Compiler
|
||||
# (c) Copyright 2017 Andreas Rumpf
|
||||
#
|
||||
# See the file "copying.txt", included in this
|
||||
# distribution, for details about the copyright.
|
||||
#
|
||||
|
||||
## Injects destructor calls into Nim code as well as
|
||||
## an optimizer that optimizes copies to moves. This is implemented as an
|
||||
## AST to AST transformation so that every backend benefits from it.
|
||||
|
||||
## Rules for destructor injections:
|
||||
##
|
||||
## foo(bar(X(), Y()))
|
||||
## X and Y get destroyed after bar completes:
|
||||
##
|
||||
## foo( (tmpX = X(); tmpY = Y(); tmpBar = bar(tmpX, tmpY);
|
||||
## destroy(tmpX); destroy(tmpY);
|
||||
## tmpBar))
|
||||
## destroy(tmpBar)
|
||||
##
|
||||
## var x = f()
|
||||
## body
|
||||
##
|
||||
## is the same as:
|
||||
##
|
||||
## var x;
|
||||
## try:
|
||||
## move(x, f())
|
||||
## finally:
|
||||
## destroy(x)
|
||||
##
|
||||
## But this really just an optimization that tries to avoid to
|
||||
## introduce too many temporaries, the 'destroy' is caused by
|
||||
## the 'f()' call. No! That is not true for 'result = f()'!
|
||||
##
|
||||
## x = y where y is read only once
|
||||
## is the same as: move(x, y)
|
||||
##
|
||||
## Actually the more general rule is: The *last* read of ``y``
|
||||
## can become a move if ``y`` is the result of a construction.
|
||||
##
|
||||
## We also need to keep in mind here that the number of reads is
|
||||
## control flow dependent:
|
||||
## let x = foo()
|
||||
## while true:
|
||||
## y = x # only one read, but the 2nd iteration will fail!
|
||||
## This also affects recursions! Only usages that do not cross
|
||||
## a loop boundary (scope) and are not used in function calls
|
||||
## are safe.
|
||||
##
|
||||
##
|
||||
## x = f() is the same as: move(x, f())
|
||||
##
|
||||
## x = y
|
||||
## is the same as: copy(x, y)
|
||||
##
|
||||
## Reassignment works under this scheme:
|
||||
## var x = f()
|
||||
## x = y
|
||||
##
|
||||
## is the same as:
|
||||
##
|
||||
## var x;
|
||||
## try:
|
||||
## move(x, f())
|
||||
## copy(x, y)
|
||||
## finally:
|
||||
## destroy(x)
|
||||
##
|
||||
## result = f() must not destroy 'result'!
|
||||
##
|
||||
## The produced temporaries clutter up the code and might lead to
|
||||
## inefficiencies. A better strategy is to collect all the temporaries
|
||||
## in a single object that we put into a single try-finally that
|
||||
## surrounds the proc body. This means the code stays quite efficient
|
||||
## when compiled to C. In fact, we do the same for variables, so
|
||||
## destructors are called when the proc returns, not at scope exit!
|
||||
## This makes certains idioms easier to support. (Taking the slice
|
||||
## of a temporary object.)
|
||||
##
|
||||
## foo(bar(X(), Y()))
|
||||
## X and Y get destroyed after bar completes:
|
||||
##
|
||||
## var tmp: object
|
||||
## foo( (move tmp.x, X(); move tmp.y, Y(); tmp.bar = bar(tmpX, tmpY);
|
||||
## tmp.bar))
|
||||
## destroy(tmp.bar)
|
||||
## destroy(tmp.x); destroy(tmp.y)
|
||||
|
||||
|
||||
import
|
||||
intsets, ast, astalgo, msgs, renderer, magicsys, types, idents, trees,
|
||||
strutils, options, dfa, lowerings, rodread
|
||||
|
||||
const
|
||||
InterestingSyms = {skVar, skResult, skLet}
|
||||
|
||||
type
|
||||
Con = object
|
||||
owner: PSym
|
||||
g: ControlFlowGraph
|
||||
jumpTargets: IntSet
|
||||
tmpObj: PType
|
||||
tmp: PSym
|
||||
destroys, topLevelVars: PNode
|
||||
|
||||
proc isHarmlessVar*(s: PSym; c: Con): bool =
|
||||
# 's' is harmless if it used only once and its
|
||||
# definition/usage are not split by any labels:
|
||||
#
|
||||
# let s = foo()
|
||||
# while true:
|
||||
# a[i] = s
|
||||
#
|
||||
# produces:
|
||||
#
|
||||
# def s
|
||||
# L1:
|
||||
# use s
|
||||
# goto L1
|
||||
#
|
||||
# let s = foo()
|
||||
# if cond:
|
||||
# a[i] = s
|
||||
# else:
|
||||
# a[j] = s
|
||||
#
|
||||
# produces:
|
||||
#
|
||||
# def s
|
||||
# fork L2
|
||||
# use s
|
||||
# goto L3
|
||||
# L2:
|
||||
# use s
|
||||
# L3
|
||||
#
|
||||
# So this analysis is for now overly conservative, but correct.
|
||||
var defsite = -1
|
||||
var usages = 0
|
||||
for i in 0..<c.g.len:
|
||||
case c.g[i].kind
|
||||
of def:
|
||||
if c.g[i].sym == s:
|
||||
if defsite < 0: defsite = i
|
||||
else: return false
|
||||
of use:
|
||||
if c.g[i].sym == s:
|
||||
if defsite < 0: return false
|
||||
for j in defsite .. i:
|
||||
# not within the same basic block?
|
||||
if j in c.jumpTargets: return false
|
||||
# if we want to die after the first 'use':
|
||||
if usages > 1: return false
|
||||
inc usages
|
||||
of useWithinCall:
|
||||
if c.g[i].sym == s: return false
|
||||
of goto, fork:
|
||||
discard "we do not perform an abstract interpretation yet"
|
||||
|
||||
template interestingSym(s: PSym): bool =
|
||||
s.owner == c.owner and s.kind in InterestingSyms and hasDestructor(s.typ)
|
||||
|
||||
proc patchHead(n: PNode) =
|
||||
if n.kind in nkCallKinds and n[0].kind == nkSym and n.len > 1:
|
||||
let s = n[0].sym
|
||||
if sfFromGeneric in s.flags and s.name.s[0] == '=' and
|
||||
s.name.s in ["=sink", "=", "=destroy"]:
|
||||
excl(s.flags, sfFromGeneric)
|
||||
patchHead(s.getBody)
|
||||
let t = n[1].typ.skipTypes({tyVar, tyGenericInst, tyAlias, tyInferred})
|
||||
template patch(op, field) =
|
||||
if s.name.s == op and field != nil and field != s:
|
||||
n.sons[0].sym = field
|
||||
patch "=sink", t.sink
|
||||
patch "=", t.assignment
|
||||
patch "=destroy", t.destructor
|
||||
for x in n:
|
||||
patchHead(x)
|
||||
|
||||
proc genSink(t: PType; dest: PNode): PNode =
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias})
|
||||
let op = if t.sink != nil: t.sink else: t.assignment
|
||||
assert op != nil
|
||||
patchHead op.ast[bodyPos]
|
||||
result = newTree(nkCall, newSymNode(op), newTree(nkHiddenAddr, dest))
|
||||
|
||||
proc genCopy(t: PType; dest: PNode): PNode =
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias})
|
||||
assert t.assignment != nil
|
||||
patchHead t.assignment.ast[bodyPos]
|
||||
result = newTree(nkCall, newSymNode(t.assignment), newTree(nkHiddenAddr, dest))
|
||||
|
||||
proc genDestroy(t: PType; dest: PNode): PNode =
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias})
|
||||
assert t.destructor != nil
|
||||
patchHead t.destructor.ast[bodyPos]
|
||||
result = newTree(nkCall, newSymNode(t.destructor), newTree(nkHiddenAddr, dest))
|
||||
|
||||
proc addTopVar(c: var Con; v: PNode) =
|
||||
c.topLevelVars.add newTree(nkIdentDefs, v, emptyNode, emptyNode)
|
||||
|
||||
proc p(n: PNode; c: var Con): PNode
|
||||
|
||||
template recurse(n, dest) =
|
||||
for i in 0..<n.len:
|
||||
dest.add p(n[i], c)
|
||||
|
||||
proc moveOrCopy(dest, ri: PNode; c: var Con): PNode =
|
||||
if ri.kind in nkCallKinds:
|
||||
result = genSink(ri.typ, dest)
|
||||
# watch out and no not transform 'ri' twice if it's a call:
|
||||
let ri2 = copyNode(ri)
|
||||
recurse(ri, ri2)
|
||||
result.add ri2
|
||||
elif ri.kind == nkSym and isHarmlessVar(ri.sym, c):
|
||||
result = genSink(ri.typ, dest)
|
||||
result.add p(ri, c)
|
||||
else:
|
||||
result = genCopy(ri.typ, dest)
|
||||
result.add p(ri, c)
|
||||
|
||||
proc p(n: PNode; c: var Con): PNode =
|
||||
case n.kind
|
||||
of nkVarSection, nkLetSection:
|
||||
discard "transform; var x = y to var x; x op y where op is a move or copy"
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
|
||||
for i in 0..<n.len:
|
||||
let it = n[i]
|
||||
let L = it.len-1
|
||||
let ri = it[L]
|
||||
if it.kind == nkVarTuple and hasDestructor(ri.typ):
|
||||
let x = lowerTupleUnpacking(it, c.owner)
|
||||
result.add p(x, c)
|
||||
elif it.kind == nkIdentDefs and hasDestructor(it[0].typ):
|
||||
for j in 0..L-2:
|
||||
let v = it[j]
|
||||
doAssert v.kind == nkSym
|
||||
# move the variable declaration to the top of the frame:
|
||||
c.addTopVar v
|
||||
# make sure it's destroyed at the end of the proc:
|
||||
c.destroys.add genDestroy(v.typ, v)
|
||||
if ri.kind != nkEmpty:
|
||||
let r = moveOrCopy(v, ri, c)
|
||||
result.add r
|
||||
else:
|
||||
# keep it, but transform 'ri':
|
||||
var varSection = copyNode(n)
|
||||
var itCopy = copyNode(it)
|
||||
for j in 0..L-1:
|
||||
itCopy.add it[j]
|
||||
itCopy.add p(ri, c)
|
||||
varSection.add itCopy
|
||||
result.add varSection
|
||||
of nkCallKinds:
|
||||
if n.typ != nil and hasDestructor(n.typ):
|
||||
discard "produce temp creation"
|
||||
result = newNodeIT(nkStmtListExpr, n.info, n.typ)
|
||||
let f = newSym(skField, getIdent(":d" & $c.tmpObj.n.len), c.owner, n.info)
|
||||
f.typ = n.typ
|
||||
rawAddField c.tmpObj, f
|
||||
var m = genSink(n.typ, rawDirectAccess(c.tmp, f))
|
||||
var call = copyNode(n)
|
||||
recurse(n, call)
|
||||
m.add call
|
||||
result.add m
|
||||
result.add rawDirectAccess(c.tmp, f)
|
||||
c.destroys.add genDestroy(n.typ, rawDirectAccess(c.tmp, f))
|
||||
else:
|
||||
result = copyNode(n)
|
||||
recurse(n, result)
|
||||
of nkAsgn, nkFastAsgn:
|
||||
if hasDestructor(n[0].typ):
|
||||
result = moveOrCopy(n[0], n[1], c)
|
||||
else:
|
||||
result = copyNode(n)
|
||||
recurse(n, result)
|
||||
of nkNone..nkNilLit, nkTypeSection, nkProcDef, nkConverterDef, nkMethodDef,
|
||||
nkIteratorDef, nkMacroDef, nkTemplateDef, nkLambda, nkDo, nkFuncDef:
|
||||
result = n
|
||||
else:
|
||||
result = copyNode(n)
|
||||
recurse(n, result)
|
||||
|
||||
proc injectDestructorCalls*(owner: PSym; n: PNode): PNode =
|
||||
var c: Con
|
||||
c.owner = owner
|
||||
c.tmp = newSym(skTemp, getIdent":d", owner, n.info)
|
||||
c.tmpObj = createObj(owner, n.info)
|
||||
c.tmp.typ = c.tmpObj
|
||||
c.destroys = newNodeI(nkStmtList, n.info)
|
||||
c.topLevelVars = newNodeI(nkVarSection, n.info)
|
||||
let cfg = constructCfg(owner, n)
|
||||
shallowCopy(c.g, cfg)
|
||||
c.jumpTargets = initIntSet()
|
||||
for i in 0..<c.g.len:
|
||||
if c.g[i].kind in {goto, fork}:
|
||||
c.jumpTargets.incl(i+c.g[i].dest)
|
||||
let body = p(n, c)
|
||||
if c.tmp.typ.n.len > 0:
|
||||
c.addTopVar(newSymNode c.tmp)
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
if c.topLevelVars.len > 0:
|
||||
result.add c.topLevelVars
|
||||
if c.destroys.len > 0:
|
||||
result.add newTryFinally(body, c.destroys)
|
||||
else:
|
||||
result.add body
|
||||
|
||||
when defined(nimDebugDestroys):
|
||||
if owner.name.s == "createSeq":
|
||||
echo "------------------------------------"
|
||||
echo owner.name.s, " transformed to: "
|
||||
echo result
|
||||
439
compiler/dfa.nim
Normal file
439
compiler/dfa.nim
Normal file
@@ -0,0 +1,439 @@
|
||||
#
|
||||
#
|
||||
# The Nim Compiler
|
||||
# (c) Copyright 2017 Andreas Rumpf
|
||||
#
|
||||
# See the file "copying.txt", included in this
|
||||
# distribution, for details about the copyright.
|
||||
#
|
||||
|
||||
## Data flow analysis for Nim. For now the task is to prove that every
|
||||
## usage of a local variable 'v' is covered by an initialization to 'v'
|
||||
## first.
|
||||
## We transform the AST into a linear list of instructions first to
|
||||
## make this easier to handle: There are only 2 different branching
|
||||
## instructions: 'goto X' is an unconditional goto, 'fork X'
|
||||
## is a conditional goto (either the next instruction or 'X' can be
|
||||
## taken). Exhaustive case statements are translated
|
||||
## so that the last branch is transformed into an 'else' branch.
|
||||
## ``return`` and ``break`` are all covered by 'goto'.
|
||||
## The case to detect is ``use v`` that is not dominated by
|
||||
## a ``def v``.
|
||||
## The data structures and algorithms used here are inspired by
|
||||
## "A Graph–Free Approach to Data–Flow Analysis" by Markus Mohnen.
|
||||
## https://link.springer.com/content/pdf/10.1007/3-540-45937-5_6.pdf
|
||||
|
||||
import ast, astalgo, types, intsets, tables, msgs
|
||||
|
||||
type
|
||||
InstrKind* = enum
|
||||
goto, fork, def, use,
|
||||
useWithinCall # this strange special case is used to get more
|
||||
# move optimizations out of regular code
|
||||
# XXX This is still overly pessimistic in
|
||||
# call(let x = foo; bar(x))
|
||||
Instr* = object
|
||||
n*: PNode
|
||||
case kind*: InstrKind
|
||||
of def, use, useWithinCall: sym*: PSym
|
||||
of goto, fork: dest*: int
|
||||
|
||||
ControlFlowGraph* = seq[Instr]
|
||||
|
||||
TPosition = distinct int
|
||||
TBlock = object
|
||||
label: PSym
|
||||
fixups: seq[TPosition]
|
||||
|
||||
ValueKind = enum
|
||||
undef, value, valueOrUndef
|
||||
|
||||
Con = object
|
||||
code: ControlFlowGraph
|
||||
inCall: int
|
||||
blocks: seq[TBlock]
|
||||
|
||||
proc debugInfo(info: TLineInfo): string =
|
||||
result = info.toFilename & ":" & $info.line
|
||||
|
||||
proc codeListing(c: ControlFlowGraph, result: var string, start=0; last = -1) =
|
||||
# for debugging purposes
|
||||
# first iteration: compute all necessary labels:
|
||||
var jumpTargets = initIntSet()
|
||||
let last = if last < 0: c.len-1 else: min(last, c.len-1)
|
||||
for i in start..last:
|
||||
if c[i].kind in {goto, fork}:
|
||||
jumpTargets.incl(i+c[i].dest)
|
||||
var i = start
|
||||
while i <= last:
|
||||
if i in jumpTargets: result.add("L" & $i & ":\n")
|
||||
result.add "\t"
|
||||
result.add $c[i].kind
|
||||
result.add "\t"
|
||||
case c[i].kind
|
||||
of def, use, useWithinCall:
|
||||
result.add c[i].sym.name.s
|
||||
of goto, fork:
|
||||
result.add "L"
|
||||
result.add c[i].dest+i
|
||||
result.add("\t#")
|
||||
result.add(debugInfo(c[i].n.info))
|
||||
result.add("\n")
|
||||
inc i
|
||||
if i in jumpTargets: result.add("L" & $i & ": End\n")
|
||||
|
||||
|
||||
proc echoCfg*(c: ControlFlowGraph; start=0; last = -1) {.deprecated.} =
|
||||
## echos the ControlFlowGraph for debugging purposes.
|
||||
var buf = ""
|
||||
codeListing(c, buf, start, last)
|
||||
echo buf
|
||||
|
||||
proc forkI(c: var Con; n: PNode): TPosition =
|
||||
result = TPosition(c.code.len)
|
||||
c.code.add Instr(n: n, kind: fork, dest: 0)
|
||||
|
||||
proc gotoI(c: var Con; n: PNode): TPosition =
|
||||
result = TPosition(c.code.len)
|
||||
c.code.add Instr(n: n, kind: goto, dest: 0)
|
||||
|
||||
proc genLabel(c: Con): TPosition =
|
||||
result = TPosition(c.code.len)
|
||||
|
||||
proc jmpBack(c: var Con, n: PNode, p = TPosition(0)) =
|
||||
let dist = p.int - c.code.len
|
||||
internalAssert(-0x7fff < dist and dist < 0x7fff)
|
||||
c.code.add Instr(n: n, kind: goto, dest: dist)
|
||||
|
||||
proc patch(c: var Con, p: TPosition) =
|
||||
# patch with current index
|
||||
let p = p.int
|
||||
let diff = c.code.len - p
|
||||
internalAssert(-0x7fff < diff and diff < 0x7fff)
|
||||
c.code[p].dest = diff
|
||||
|
||||
proc popBlock(c: var Con; oldLen: int) =
|
||||
for f in c.blocks[oldLen].fixups:
|
||||
c.patch(f)
|
||||
c.blocks.setLen(oldLen)
|
||||
|
||||
template withBlock(labl: PSym; body: untyped) {.dirty.} =
|
||||
var oldLen {.gensym.} = c.blocks.len
|
||||
c.blocks.add TBlock(label: labl, fixups: @[])
|
||||
body
|
||||
popBlock(c, oldLen)
|
||||
|
||||
proc isTrue(n: PNode): bool =
|
||||
n.kind == nkSym and n.sym.kind == skEnumField and n.sym.position != 0 or
|
||||
n.kind == nkIntLit and n.intVal != 0
|
||||
|
||||
proc gen(c: var Con; n: PNode) # {.noSideEffect.}
|
||||
|
||||
proc genWhile(c: var Con; n: PNode) =
|
||||
# L1:
|
||||
# cond, tmp
|
||||
# fjmp tmp, L2
|
||||
# body
|
||||
# jmp L1
|
||||
# L2:
|
||||
let L1 = c.genLabel
|
||||
withBlock(nil):
|
||||
if isTrue(n.sons[0]):
|
||||
c.gen(n.sons[1])
|
||||
c.jmpBack(n, L1)
|
||||
else:
|
||||
c.gen(n.sons[0])
|
||||
let L2 = c.forkI(n)
|
||||
c.gen(n.sons[1])
|
||||
c.jmpBack(n, L1)
|
||||
c.patch(L2)
|
||||
|
||||
proc genBlock(c: var Con; n: PNode) =
|
||||
withBlock(n.sons[0].sym):
|
||||
c.gen(n.sons[1])
|
||||
|
||||
proc genBreak(c: var Con; n: PNode) =
|
||||
let L1 = c.gotoI(n)
|
||||
if n.sons[0].kind == nkSym:
|
||||
#echo cast[int](n.sons[0].sym)
|
||||
for i in countdown(c.blocks.len-1, 0):
|
||||
if c.blocks[i].label == n.sons[0].sym:
|
||||
c.blocks[i].fixups.add L1
|
||||
return
|
||||
globalError(n.info, errGenerated, "VM problem: cannot find 'break' target")
|
||||
else:
|
||||
c.blocks[c.blocks.high].fixups.add L1
|
||||
|
||||
proc genIf(c: var Con, n: PNode) =
|
||||
var endings: seq[TPosition] = @[]
|
||||
for i in countup(0, len(n) - 1):
|
||||
var it = n.sons[i]
|
||||
if it.len == 2:
|
||||
c.gen(it.sons[0].sons[1])
|
||||
var elsePos = c.forkI(it.sons[0].sons[1])
|
||||
c.gen(it.sons[1])
|
||||
if i < sonsLen(n)-1:
|
||||
endings.add(c.gotoI(it.sons[1]))
|
||||
c.patch(elsePos)
|
||||
else:
|
||||
c.gen(it.sons[0])
|
||||
for endPos in endings: c.patch(endPos)
|
||||
|
||||
proc genAndOr(c: var Con; n: PNode) =
|
||||
# asgn dest, a
|
||||
# fork L1
|
||||
# asgn dest, b
|
||||
# L1:
|
||||
c.gen(n.sons[1])
|
||||
let L1 = c.forkI(n)
|
||||
c.gen(n.sons[2])
|
||||
c.patch(L1)
|
||||
|
||||
proc genCase(c: var Con; n: PNode) =
|
||||
# if (!expr1) goto L1;
|
||||
# thenPart
|
||||
# goto LEnd
|
||||
# L1:
|
||||
# if (!expr2) goto L2;
|
||||
# thenPart2
|
||||
# goto LEnd
|
||||
# L2:
|
||||
# elsePart
|
||||
# Lend:
|
||||
var endings: seq[TPosition] = @[]
|
||||
c.gen(n.sons[0])
|
||||
for i in 1 ..< n.len:
|
||||
let it = n.sons[i]
|
||||
if it.len == 1:
|
||||
c.gen(it.sons[0])
|
||||
else:
|
||||
let elsePos = c.forkI(it.lastSon)
|
||||
c.gen(it.lastSon)
|
||||
if i < sonsLen(n)-1:
|
||||
endings.add(c.gotoI(it.lastSon))
|
||||
c.patch(elsePos)
|
||||
for endPos in endings: c.patch(endPos)
|
||||
|
||||
proc genTry(c: var Con; n: PNode) =
|
||||
var endings: seq[TPosition] = @[]
|
||||
let elsePos = c.forkI(n)
|
||||
c.gen(n.sons[0])
|
||||
c.patch(elsePos)
|
||||
for i in 1 ..< n.len:
|
||||
let it = n.sons[i]
|
||||
if it.kind != nkFinally:
|
||||
var blen = len(it)
|
||||
let endExcept = c.forkI(it)
|
||||
c.gen(it.lastSon)
|
||||
if i < sonsLen(n)-1:
|
||||
endings.add(c.gotoI(it))
|
||||
c.patch(endExcept)
|
||||
for endPos in endings: c.patch(endPos)
|
||||
let fin = lastSon(n)
|
||||
if fin.kind == nkFinally:
|
||||
c.gen(fin.sons[0])
|
||||
|
||||
proc genRaise(c: var Con; n: PNode) =
|
||||
gen(c, n.sons[0])
|
||||
c.code.add Instr(n: n, kind: goto, dest: high(int) - c.code.len)
|
||||
|
||||
proc genReturn(c: var Con; n: PNode) =
|
||||
if n.sons[0].kind != nkEmpty: gen(c, n.sons[0])
|
||||
c.code.add Instr(n: n, kind: goto, dest: high(int) - c.code.len)
|
||||
|
||||
const
|
||||
InterestingSyms = {skVar, skResult, skLet}
|
||||
|
||||
proc genUse(c: var Con; n: PNode) =
|
||||
var n = n
|
||||
while n.kind in {nkDotExpr, nkCheckedFieldExpr,
|
||||
nkBracketExpr, nkDerefExpr, nkHiddenDeref,
|
||||
nkAddr, nkHiddenAddr}:
|
||||
n = n[0]
|
||||
if n.kind == nkSym and n.sym.kind in InterestingSyms:
|
||||
if c.inCall > 0:
|
||||
c.code.add Instr(n: n, kind: useWithinCall, sym: n.sym)
|
||||
else:
|
||||
c.code.add Instr(n: n, kind: use, sym: n.sym)
|
||||
|
||||
proc genDef(c: var Con; n: PNode) =
|
||||
if n.kind == nkSym and n.sym.kind in InterestingSyms:
|
||||
c.code.add Instr(n: n, kind: def, sym: n.sym)
|
||||
|
||||
proc genCall(c: var Con; n: PNode) =
|
||||
gen(c, n[0])
|
||||
var t = n[0].typ
|
||||
if t != nil: t = t.skipTypes(abstractInst)
|
||||
inc c.inCall
|
||||
for i in 1..<n.len:
|
||||
gen(c, n[i])
|
||||
if t != nil and i < t.len and t.sons[i].kind == tyVar:
|
||||
genDef(c, n[i])
|
||||
dec c.inCall
|
||||
|
||||
proc genMagic(c: var Con; n: PNode; m: TMagic) =
|
||||
case m
|
||||
of mAnd, mOr: c.genAndOr(n)
|
||||
of mNew, mNewFinalize:
|
||||
genDef(c, n[1])
|
||||
for i in 2..<n.len: gen(c, n[i])
|
||||
of mExit:
|
||||
genCall(c, n)
|
||||
c.code.add Instr(n: n, kind: goto, dest: high(int) - c.code.len)
|
||||
else:
|
||||
genCall(c, n)
|
||||
|
||||
proc genVarSection(c: var Con; n: PNode) =
|
||||
for a in n:
|
||||
if a.kind == nkCommentStmt: continue
|
||||
if a.kind == nkVarTuple:
|
||||
gen(c, a.lastSon)
|
||||
for i in 0 .. a.len-3: genDef(c, a[i])
|
||||
else:
|
||||
gen(c, a.lastSon)
|
||||
if a.lastSon.kind != nkEmpty:
|
||||
genDef(c, a.sons[0])
|
||||
|
||||
proc gen(c: var Con; n: PNode) =
|
||||
case n.kind
|
||||
of nkSym: genUse(c, n)
|
||||
of nkCallKinds:
|
||||
if n.sons[0].kind == nkSym:
|
||||
let s = n.sons[0].sym
|
||||
if s.magic != mNone:
|
||||
genMagic(c, n, s.magic)
|
||||
else:
|
||||
genCall(c, n)
|
||||
else:
|
||||
genCall(c, n)
|
||||
of nkCharLit..nkNilLit: discard
|
||||
of nkAsgn, nkFastAsgn:
|
||||
gen(c, n[1])
|
||||
genDef(c, n[0])
|
||||
of nkDotExpr, nkCheckedFieldExpr, nkBracketExpr,
|
||||
nkDerefExpr, nkHiddenDeref, nkAddr, nkHiddenAddr:
|
||||
gen(c, n[0])
|
||||
of nkIfStmt, nkIfExpr: genIf(c, n)
|
||||
of nkWhenStmt:
|
||||
# This is "when nimvm" node. Chose the first branch.
|
||||
gen(c, n.sons[0].sons[1])
|
||||
of nkCaseStmt: genCase(c, n)
|
||||
of nkWhileStmt: genWhile(c, n)
|
||||
of nkBlockExpr, nkBlockStmt: genBlock(c, n)
|
||||
of nkReturnStmt: genReturn(c, n)
|
||||
of nkRaiseStmt: genRaise(c, n)
|
||||
of nkBreakStmt: genBreak(c, n)
|
||||
of nkTryStmt: genTry(c, n)
|
||||
of nkStmtList, nkStmtListExpr, nkChckRangeF, nkChckRange64, nkChckRange,
|
||||
nkBracket, nkCurly, nkPar, nkClosure, nkObjConstr:
|
||||
for x in n: gen(c, x)
|
||||
of nkPragmaBlock: gen(c, n.lastSon)
|
||||
of nkDiscardStmt: gen(c, n.sons[0])
|
||||
of nkHiddenStdConv, nkHiddenSubConv, nkConv, nkExprColonExpr, nkExprEqExpr,
|
||||
nkCast:
|
||||
gen(c, n.sons[1])
|
||||
of nkObjDownConv, nkStringToCString, nkCStringToString: gen(c, n.sons[0])
|
||||
of nkVarSection, nkLetSection: genVarSection(c, n)
|
||||
else: discard
|
||||
|
||||
proc dfa(code: seq[Instr]) =
|
||||
# We aggressively push 'undef' values for every 'use v' instruction
|
||||
# until they are eliminated via a 'def v' instructions.
|
||||
# If we manage to push one 'undef' to a 'use' instruction, we produce
|
||||
# an error:
|
||||
var undef = initIntSet()
|
||||
for i in 0..<code.len:
|
||||
if code[i].kind == use: undef.incl(code[i].sym.id)
|
||||
|
||||
var s = newSeq[IntSet](code.len)
|
||||
for i in 0..<code.len:
|
||||
assign(s[i], undef)
|
||||
|
||||
# In the original paper, W := {0,...,n} is done. This is wasteful, we
|
||||
# have no intention to analyse a program like
|
||||
#
|
||||
# return 3
|
||||
# echo a + b
|
||||
#
|
||||
# any further than necessary.
|
||||
var w = @[0]
|
||||
while w.len > 0:
|
||||
var pc = w[^1]
|
||||
# this simulates a single linear control flow execution:
|
||||
while true:
|
||||
# according to the paper, it is better to shrink the working set here
|
||||
# in this inner loop:
|
||||
let widx = w.find(pc)
|
||||
if widx >= 0: w.del(widx)
|
||||
# our interpretation ![I!]:
|
||||
var sid = -1
|
||||
case code[pc].kind
|
||||
of goto, fork: discard
|
||||
of use, useWithinCall:
|
||||
let sym = code[pc].sym
|
||||
if s[pc].contains(sym.id):
|
||||
localError(code[pc].n.info, "variable read before initialized: " & sym.name.s)
|
||||
of def:
|
||||
sid = code[pc].sym.id
|
||||
|
||||
var pc2: int
|
||||
if code[pc].kind == goto:
|
||||
pc2 = pc + code[pc].dest
|
||||
else:
|
||||
pc2 = pc + 1
|
||||
if code[pc].kind == fork:
|
||||
let l = pc + code[pc].dest
|
||||
if sid >= 0 and s[l].missingOrExcl(sid):
|
||||
w.add l
|
||||
|
||||
if sid >= 0 and s[pc2].missingOrExcl(sid):
|
||||
pc = pc2
|
||||
else:
|
||||
break
|
||||
if pc >= code.len: break
|
||||
|
||||
when false:
|
||||
case code[pc].kind
|
||||
of use:
|
||||
let s = code[pc].sym
|
||||
if undefB.contains(s.id):
|
||||
localError(code[pc].n.info, "variable read before initialized: " & s.name.s)
|
||||
break
|
||||
inc pc
|
||||
of def:
|
||||
let s = code[pc].sym
|
||||
# exclude 'undef' for s for this path through the graph.
|
||||
if not undefB.missingOrExcl(s.id):
|
||||
inc pc
|
||||
else:
|
||||
break
|
||||
#undefB.excl s.id
|
||||
#inc pc
|
||||
when false:
|
||||
let prev = bindings.getOrDefault(s.id)
|
||||
if prev != value:
|
||||
# well now it has a value and we made progress, so
|
||||
bindings[s.id] = value
|
||||
inc pc
|
||||
else:
|
||||
break
|
||||
of fork:
|
||||
let diff = code[pc].dest
|
||||
# we follow pc + 1 and remember the label for later:
|
||||
w.add pc+diff
|
||||
inc pc
|
||||
of goto:
|
||||
let diff = code[pc].dest
|
||||
pc = pc + diff
|
||||
if pc >= code.len: break
|
||||
|
||||
proc dataflowAnalysis*(s: PSym; body: PNode) =
|
||||
var c = Con(code: @[], blocks: @[])
|
||||
gen(c, body)
|
||||
#echoCfg(c.code)
|
||||
dfa(c.code)
|
||||
|
||||
proc constructCfg*(s: PSym; body: PNode): ControlFlowGraph =
|
||||
## constructs a control flow graph for ``body``.
|
||||
var c = Con(code: @[], blocks: @[])
|
||||
shallowCopy(result, c.code)
|
||||
@@ -15,8 +15,8 @@ import
|
||||
ast, strutils, strtabs, options, msgs, os, ropes, idents,
|
||||
wordrecg, syntaxes, renderer, lexer, packages/docutils/rstast,
|
||||
packages/docutils/rst, packages/docutils/rstgen, times,
|
||||
packages/docutils/highlite, importer, sempass2, json, xmltree, cgi,
|
||||
typesrenderer, astalgo
|
||||
packages/docutils/highlite, sempass2, json, xmltree, cgi,
|
||||
typesrenderer, astalgo, modulepaths
|
||||
|
||||
type
|
||||
TSections = array[TSymKind, Rope]
|
||||
@@ -252,7 +252,7 @@ proc getName(d: PDoc, n: PNode, splitAfter = -1): string =
|
||||
of nkIdent: result = esc(d.target, n.ident.s, splitAfter)
|
||||
of nkAccQuoted:
|
||||
result = esc(d.target, "`")
|
||||
for i in 0.. <n.len: result.add(getName(d, n[i], splitAfter))
|
||||
for i in 0..<n.len: result.add(getName(d, n[i], splitAfter))
|
||||
result.add esc(d.target, "`")
|
||||
of nkOpenSymChoice, nkClosedSymChoice:
|
||||
result = getName(d, n[0], splitAfter)
|
||||
@@ -268,7 +268,7 @@ proc getNameIdent(n: PNode): PIdent =
|
||||
of nkIdent: result = n.ident
|
||||
of nkAccQuoted:
|
||||
var r = ""
|
||||
for i in 0.. <n.len: r.add(getNameIdent(n[i]).s)
|
||||
for i in 0..<n.len: r.add(getNameIdent(n[i]).s)
|
||||
result = getIdent(r)
|
||||
of nkOpenSymChoice, nkClosedSymChoice:
|
||||
result = getNameIdent(n[0])
|
||||
@@ -283,7 +283,7 @@ proc getRstName(n: PNode): PRstNode =
|
||||
of nkIdent: result = newRstNode(rnLeaf, n.ident.s)
|
||||
of nkAccQuoted:
|
||||
result = getRstName(n.sons[0])
|
||||
for i in 1 .. <n.len: result.text.add(getRstName(n[i]).text)
|
||||
for i in 1 ..< n.len: result.text.add(getRstName(n[i]).text)
|
||||
of nkOpenSymChoice, nkClosedSymChoice:
|
||||
result = getRstName(n[0])
|
||||
else:
|
||||
|
||||
@@ -225,7 +225,7 @@ proc pack(v: PNode, typ: PType, res: pointer) =
|
||||
awr(pointer, res +! sizeof(pointer))
|
||||
of tyArray:
|
||||
let baseSize = typ.sons[1].getSize
|
||||
for i in 0 .. <v.len:
|
||||
for i in 0 ..< v.len:
|
||||
pack(v.sons[i], typ.sons[1], res +! i * baseSize)
|
||||
of tyObject, tyTuple:
|
||||
packObject(v, typ, res)
|
||||
@@ -291,7 +291,7 @@ proc unpackArray(x: pointer, typ: PType, n: PNode): PNode =
|
||||
if result.kind != nkBracket:
|
||||
globalError(n.info, "cannot map value from FFI")
|
||||
let baseSize = typ.sons[1].getSize
|
||||
for i in 0 .. < result.len:
|
||||
for i in 0 ..< result.len:
|
||||
result.sons[i] = unpack(x +! i * baseSize, typ.sons[1], result.sons[i])
|
||||
|
||||
proc canonNodeKind(k: TNodeKind): TNodeKind =
|
||||
|
||||
@@ -77,7 +77,7 @@ proc evalTemplateArgs(n: PNode, s: PSym; fromHlo: bool): PNode =
|
||||
# now that we have working untyped parameters.
|
||||
genericParams = if sfImmediate in s.flags or fromHlo: 0
|
||||
else: s.ast[genericParamsPos].len
|
||||
expectedRegularParams = <s.typ.len
|
||||
expectedRegularParams = s.typ.len-1
|
||||
givenRegularParams = totalParams - genericParams
|
||||
if givenRegularParams < 0: givenRegularParams = 0
|
||||
|
||||
|
||||
@@ -13,14 +13,10 @@ import
|
||||
llstream, os, wordrecg, idents, strutils, ast, astalgo, msgs, options,
|
||||
renderer, filters
|
||||
|
||||
proc filterTmpl*(stdin: PLLStream, filename: string, call: PNode): PLLStream
|
||||
# #! template(subsChar='$', metaChar='#') | standard(version="0.7.2")
|
||||
# implementation
|
||||
|
||||
type
|
||||
TParseState = enum
|
||||
psDirective, psTempl
|
||||
TTmplParser{.final.} = object
|
||||
TTmplParser = object
|
||||
inp: PLLStream
|
||||
state: TParseState
|
||||
info: TLineInfo
|
||||
@@ -61,6 +57,10 @@ proc scanPar(p: var TTmplParser, d: int) =
|
||||
proc withInExpr(p: TTmplParser): bool {.inline.} =
|
||||
result = p.par > 0 or p.bracket > 0 or p.curly > 0
|
||||
|
||||
const
|
||||
LineContinuationOprs = {'+', '-', '*', '/', '\\', '<', '>', '^',
|
||||
'|', '%', '&', '$', '@', '~', ','}
|
||||
|
||||
proc parseLine(p: var TTmplParser) =
|
||||
var j = 0
|
||||
while p.x[j] == ' ': inc(j)
|
||||
@@ -77,7 +77,7 @@ proc parseLine(p: var TTmplParser) =
|
||||
inc(j)
|
||||
|
||||
scanPar(p, j)
|
||||
p.pendingExprLine = withInExpr(p) or llstream.endsWithOpr(p.x)
|
||||
p.pendingExprLine = withInExpr(p) or p.x.endsWith(LineContinuationOprs)
|
||||
case keyw
|
||||
of "end":
|
||||
if p.indent >= 2:
|
||||
@@ -88,14 +88,14 @@ proc parseLine(p: var TTmplParser) =
|
||||
llStreamWrite(p.outp, spaces(p.indent))
|
||||
llStreamWrite(p.outp, "#end")
|
||||
of "if", "when", "try", "while", "for", "block", "case", "proc", "iterator",
|
||||
"converter", "macro", "template", "method":
|
||||
"converter", "macro", "template", "method", "func":
|
||||
llStreamWrite(p.outp, spaces(p.indent))
|
||||
llStreamWrite(p.outp, substr(p.x, d))
|
||||
inc(p.indent, 2)
|
||||
of "elif", "of", "else", "except", "finally":
|
||||
llStreamWrite(p.outp, spaces(p.indent - 2))
|
||||
llStreamWrite(p.outp, substr(p.x, d))
|
||||
of "wLet", "wVar", "wConst", "wType":
|
||||
of "let", "var", "const", "type":
|
||||
llStreamWrite(p.outp, spaces(p.indent))
|
||||
llStreamWrite(p.outp, substr(p.x, d))
|
||||
if not p.x.contains({':', '='}):
|
||||
@@ -199,7 +199,7 @@ proc parseLine(p: var TTmplParser) =
|
||||
inc(j)
|
||||
llStreamWrite(p.outp, "\\n\"")
|
||||
|
||||
proc filterTmpl(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
proc filterTmpl*(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
var p: TTmplParser
|
||||
p.info = newLineInfo(filename, 0, 0)
|
||||
p.outp = llStreamOpen("")
|
||||
|
||||
@@ -13,14 +13,6 @@ import
|
||||
llstream, os, wordrecg, idents, strutils, ast, astalgo, msgs, options,
|
||||
renderer
|
||||
|
||||
proc filterReplace*(stdin: PLLStream, filename: string, call: PNode): PLLStream
|
||||
proc filterStrip*(stdin: PLLStream, filename: string, call: PNode): PLLStream
|
||||
# helpers to retrieve arguments:
|
||||
proc charArg*(n: PNode, name: string, pos: int, default: char): char
|
||||
proc strArg*(n: PNode, name: string, pos: int, default: string): string
|
||||
proc boolArg*(n: PNode, name: string, pos: int, default: bool): bool
|
||||
# implementation
|
||||
|
||||
proc invalidPragma(n: PNode) =
|
||||
localError(n.info, errXNotAllowedHere, renderTree(n, {renderNoComments}))
|
||||
|
||||
@@ -35,26 +27,26 @@ proc getArg(n: PNode, name: string, pos: int): PNode =
|
||||
elif i == pos:
|
||||
return n.sons[i]
|
||||
|
||||
proc charArg(n: PNode, name: string, pos: int, default: char): char =
|
||||
proc charArg*(n: PNode, name: string, pos: int, default: char): char =
|
||||
var x = getArg(n, name, pos)
|
||||
if x == nil: result = default
|
||||
elif x.kind == nkCharLit: result = chr(int(x.intVal))
|
||||
else: invalidPragma(n)
|
||||
|
||||
proc strArg(n: PNode, name: string, pos: int, default: string): string =
|
||||
proc strArg*(n: PNode, name: string, pos: int, default: string): string =
|
||||
var x = getArg(n, name, pos)
|
||||
if x == nil: result = default
|
||||
elif x.kind in {nkStrLit..nkTripleStrLit}: result = x.strVal
|
||||
else: invalidPragma(n)
|
||||
|
||||
proc boolArg(n: PNode, name: string, pos: int, default: bool): bool =
|
||||
proc boolArg*(n: PNode, name: string, pos: int, default: bool): bool =
|
||||
var x = getArg(n, name, pos)
|
||||
if x == nil: result = default
|
||||
elif x.kind == nkIdent and cmpIgnoreStyle(x.ident.s, "true") == 0: result = true
|
||||
elif x.kind == nkIdent and cmpIgnoreStyle(x.ident.s, "false") == 0: result = false
|
||||
else: invalidPragma(n)
|
||||
|
||||
proc filterStrip(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
proc filterStrip*(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
var pattern = strArg(call, "startswith", 1, "")
|
||||
var leading = boolArg(call, "leading", 2, true)
|
||||
var trailing = boolArg(call, "trailing", 3, true)
|
||||
@@ -68,7 +60,7 @@ proc filterStrip(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
llStreamWriteln(result, line)
|
||||
llStreamClose(stdin)
|
||||
|
||||
proc filterReplace(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
proc filterReplace*(stdin: PLLStream, filename: string, call: PNode): PLLStream =
|
||||
var sub = strArg(call, "sub", 1, "")
|
||||
if len(sub) == 0: invalidPragma(call)
|
||||
var by = strArg(call, "by", 2, "")
|
||||
|
||||
@@ -45,13 +45,13 @@ proc counterInTree(n, loop: PNode; counter: PSym): bool =
|
||||
for it in n:
|
||||
if counterInTree(it.lastSon): return true
|
||||
else:
|
||||
for i in 0 .. <safeLen(n):
|
||||
for i in 0 ..< safeLen(n):
|
||||
if counterInTree(n[i], loop, counter): return true
|
||||
|
||||
proc copyExcept(n: PNode, x, dest: PNode) =
|
||||
if x == n: return
|
||||
if n.kind in {nkStmtList, nkStmtListExpr}:
|
||||
for i in 0 .. <n.len: copyExcept(n[i], x, dest)
|
||||
for i in 0 ..< n.len: copyExcept(n[i], x, dest)
|
||||
else:
|
||||
dest.add n
|
||||
|
||||
|
||||
@@ -7,8 +7,7 @@
|
||||
# distribution, for details about the copyright.
|
||||
#
|
||||
|
||||
## Module that implements ``gorge`` for the compiler as well as
|
||||
## the scriptable import mechanism.
|
||||
## Module that implements ``gorge`` for the compiler.
|
||||
|
||||
import msgs, securehash, os, osproc, streams, strutils, options
|
||||
|
||||
@@ -56,28 +55,3 @@ proc opGorge*(cmd, input, cache: string, info: TLineInfo): (string, int) =
|
||||
result = p.readOutput
|
||||
except IOError, OSError:
|
||||
result = ("", -1)
|
||||
|
||||
proc scriptableImport*(pkg, subdir: string; info: TLineInfo): string =
|
||||
var cmd = getConfigVar("resolver.exe")
|
||||
if cmd.len == 0: cmd = "nimresolve"
|
||||
else: cmd = quoteShell(cmd)
|
||||
cmd.add " --source:"
|
||||
cmd.add quoteShell(info.toFullPath())
|
||||
cmd.add " --stdlib:"
|
||||
cmd.add quoteShell(options.libpath)
|
||||
cmd.add " --project:"
|
||||
cmd.add quoteShell(gProjectFull)
|
||||
if subdir.len != 0:
|
||||
cmd.add " --subdir:"
|
||||
cmd.add quoteShell(subdir)
|
||||
if options.gNoNimblePath:
|
||||
cmd.add " --nonimblepath"
|
||||
cmd.add ' '
|
||||
cmd.add quoteShell(pkg)
|
||||
let (res, exitCode) = opGorge(cmd, "", cmd, info)
|
||||
if exitCode == 0:
|
||||
result = res.strip()
|
||||
elif res.len > 0:
|
||||
localError(info, res)
|
||||
else:
|
||||
localError(info, "cannot resolve: " & (pkg / subdir))
|
||||
|
||||
@@ -247,7 +247,7 @@ proc canon*(n: PNode): PNode =
|
||||
# XXX for now only the new code in 'semparallel' uses this
|
||||
if n.safeLen >= 1:
|
||||
result = shallowCopy(n)
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
result.sons[i] = canon(n.sons[i])
|
||||
elif n.kind == nkSym and n.sym.kind == skLet and
|
||||
n.sym.ast.getMagic in (someEq + someAdd + someMul + someMin +
|
||||
|
||||
@@ -36,7 +36,7 @@ proc applyPatterns(c: PContext, n: PNode): PNode =
|
||||
# we apply the last pattern first, so that pattern overriding is possible;
|
||||
# however the resulting AST would better not trigger the old rule then
|
||||
# anymore ;-)
|
||||
for i in countdown(<c.patterns.len, 0):
|
||||
for i in countdown(c.patterns.len-1, 0):
|
||||
let pattern = c.patterns[i]
|
||||
if not isNil(pattern):
|
||||
let x = applyRule(c, pattern, result)
|
||||
@@ -75,7 +75,7 @@ proc hlo(c: PContext, n: PNode): PNode =
|
||||
result = applyPatterns(c, n)
|
||||
if result == n:
|
||||
# no optimization applied, try subtrees:
|
||||
for i in 0 .. < safeLen(result):
|
||||
for i in 0 ..< safeLen(result):
|
||||
let a = result.sons[i]
|
||||
let h = hlo(c, a)
|
||||
if h != a: result.sons[i] = h
|
||||
|
||||
@@ -11,83 +11,11 @@
|
||||
|
||||
import
|
||||
intsets, strutils, os, ast, astalgo, msgs, options, idents, rodread, lookups,
|
||||
semdata, passes, renderer, gorgeimpl
|
||||
semdata, passes, renderer, modulepaths
|
||||
|
||||
proc evalImport*(c: PContext, n: PNode): PNode
|
||||
proc evalFrom*(c: PContext, n: PNode): PNode
|
||||
|
||||
proc lookupPackage(pkg, subdir: PNode): string =
|
||||
let sub = if subdir != nil: renderTree(subdir, {renderNoComments}).replace(" ") else: ""
|
||||
case pkg.kind
|
||||
of nkStrLit, nkRStrLit, nkTripleStrLit:
|
||||
result = scriptableImport(pkg.strVal, sub, pkg.info)
|
||||
of nkIdent:
|
||||
result = scriptableImport(pkg.ident.s, sub, pkg.info)
|
||||
else:
|
||||
localError(pkg.info, "package name must be an identifier or string literal")
|
||||
result = ""
|
||||
|
||||
proc getModuleName*(n: PNode): string =
|
||||
# This returns a short relative module name without the nim extension
|
||||
# e.g. like "system", "importer" or "somepath/module"
|
||||
# The proc won't perform any checks that the path is actually valid
|
||||
case n.kind
|
||||
of nkStrLit, nkRStrLit, nkTripleStrLit:
|
||||
try:
|
||||
result = pathSubs(n.strVal, n.info.toFullPath().splitFile().dir)
|
||||
except ValueError:
|
||||
localError(n.info, "invalid path: " & n.strVal)
|
||||
result = n.strVal
|
||||
of nkIdent:
|
||||
result = n.ident.s
|
||||
of nkSym:
|
||||
result = n.sym.name.s
|
||||
of nkInfix:
|
||||
let n0 = n[0]
|
||||
let n1 = n[1]
|
||||
if n0.kind == nkIdent and n0.ident.id == getIdent("as").id:
|
||||
# XXX hack ahead:
|
||||
n.kind = nkImportAs
|
||||
n.sons[0] = n.sons[1]
|
||||
n.sons[1] = n.sons[2]
|
||||
n.sons.setLen(2)
|
||||
return getModuleName(n.sons[0])
|
||||
if n1.kind == nkPrefix and n1[0].kind == nkIdent and n1[0].ident.s == "$":
|
||||
if n0.kind == nkIdent and n0.ident.s == "/":
|
||||
result = lookupPackage(n1[1], n[2])
|
||||
else:
|
||||
localError(n.info, "only '/' supported with $package notation")
|
||||
result = ""
|
||||
else:
|
||||
# hacky way to implement 'x / y /../ z':
|
||||
result = getModuleName(n1)
|
||||
result.add renderTree(n0, {renderNoComments})
|
||||
result.add getModuleName(n[2])
|
||||
of nkPrefix:
|
||||
if n.sons[0].kind == nkIdent and n.sons[0].ident.s == "$":
|
||||
result = lookupPackage(n[1], nil)
|
||||
else:
|
||||
# hacky way to implement 'x / y /../ z':
|
||||
result = renderTree(n, {renderNoComments}).replace(" ")
|
||||
of nkDotExpr:
|
||||
result = renderTree(n, {renderNoComments}).replace(".", "/")
|
||||
of nkImportAs:
|
||||
result = getModuleName(n.sons[0])
|
||||
else:
|
||||
localError(n.info, errGenerated, "invalid module name: '$1'" % n.renderTree)
|
||||
result = ""
|
||||
|
||||
proc checkModuleName*(n: PNode; doLocalError=true): int32 =
|
||||
# This returns the full canonical path for a given module import
|
||||
let modulename = n.getModuleName
|
||||
let fullPath = findModule(modulename, n.info.toFullPath)
|
||||
if fullPath.len == 0:
|
||||
if doLocalError:
|
||||
localError(n.info, errCannotOpenFile, modulename)
|
||||
result = InvalidFileIDX
|
||||
else:
|
||||
result = fullPath.fileInfoIdx
|
||||
|
||||
proc importPureEnumField*(c: PContext; s: PSym) =
|
||||
var check = strTableGet(c.importTable.symbols, s.name)
|
||||
if check == nil:
|
||||
|
||||
@@ -1363,7 +1363,7 @@ proc genPatternCall(p: PProc; n: PNode; pat: string; typ: PType;
|
||||
case pat[i]
|
||||
of '@':
|
||||
var generated = 0
|
||||
for k in j .. < n.len:
|
||||
for k in j ..< n.len:
|
||||
if generated > 0: add(r.res, ", ")
|
||||
genOtherArg(p, n, k, typ, generated, r)
|
||||
inc i
|
||||
@@ -1528,7 +1528,7 @@ proc createVar(p: PProc, typ: PType, indirect: bool): Rope =
|
||||
of tyTuple:
|
||||
if p.target == targetJS:
|
||||
result = rope("{")
|
||||
for i in 0.. <t.sonsLen:
|
||||
for i in 0..<t.sonsLen:
|
||||
if i > 0: add(result, ", ")
|
||||
addf(result, "Field$1: $2", [i.rope,
|
||||
createVar(p, t.sons[i], false)])
|
||||
@@ -1536,7 +1536,7 @@ proc createVar(p: PProc, typ: PType, indirect: bool): Rope =
|
||||
if indirect: result = "[$1]" % [result]
|
||||
else:
|
||||
result = rope("array(")
|
||||
for i in 0.. <t.sonsLen:
|
||||
for i in 0..<t.sonsLen:
|
||||
if i > 0: add(result, ", ")
|
||||
add(result, createVar(p, t.sons[i], false))
|
||||
add(result, ")")
|
||||
|
||||
@@ -84,7 +84,7 @@ proc genObjectInfo(p: PProc, typ: PType, name: Rope) =
|
||||
|
||||
proc genTupleFields(p: PProc, typ: PType): Rope =
|
||||
var s: Rope = nil
|
||||
for i in 0 .. <typ.len:
|
||||
for i in 0 ..< typ.len:
|
||||
if i > 0: add(s, ", " & tnl)
|
||||
s.addf("{kind: 1, offset: \"Field$1\", len: 0, " &
|
||||
"typ: $2, name: \"Field$1\", sons: null}",
|
||||
|
||||
@@ -126,6 +126,10 @@ type
|
||||
literal*: string # the parsed (string) literal; and
|
||||
# documentation comments are here too
|
||||
line*, col*: int
|
||||
when defined(nimpretty):
|
||||
offsetA*, offsetB*: int # used for pretty printing so that literals
|
||||
# like 0b01 or r"\L" are unaffected
|
||||
commentOffsetA*, commentOffsetB*: int
|
||||
|
||||
TErrorHandler* = proc (info: TLineInfo; msg: TMsgKind; arg: string)
|
||||
TLexer* = object of TBaseLexer
|
||||
@@ -141,10 +145,19 @@ type
|
||||
when defined(nimsuggest):
|
||||
previousToken: TLineInfo
|
||||
|
||||
when defined(nimpretty):
|
||||
var
|
||||
gIndentationWidth*: int
|
||||
|
||||
var gLinesCompiled*: int # all lines that have been compiled
|
||||
|
||||
proc getLineInfo*(L: TLexer, tok: TToken): TLineInfo {.inline.} =
|
||||
newLineInfo(L.fileIdx, tok.line, tok.col)
|
||||
result = newLineInfo(L.fileIdx, tok.line, tok.col)
|
||||
when defined(nimpretty):
|
||||
result.offsetA = tok.offsetA
|
||||
result.offsetB = tok.offsetB
|
||||
result.commentOffsetA = tok.commentOffsetA
|
||||
result.commentOffsetB = tok.commentOffsetB
|
||||
|
||||
proc isKeyword*(kind: TTokType): bool =
|
||||
result = (kind >= tokKeywordLow) and (kind <= tokKeywordHigh)
|
||||
@@ -192,6 +205,9 @@ proc initToken*(L: var TToken) =
|
||||
L.fNumber = 0.0
|
||||
L.base = base10
|
||||
L.ident = nil
|
||||
when defined(nimpretty):
|
||||
L.commentOffsetA = 0
|
||||
L.commentOffsetB = 0
|
||||
|
||||
proc fillToken(L: var TToken) =
|
||||
L.tokType = tkInvalid
|
||||
@@ -202,6 +218,9 @@ proc fillToken(L: var TToken) =
|
||||
L.fNumber = 0.0
|
||||
L.base = base10
|
||||
L.ident = nil
|
||||
when defined(nimpretty):
|
||||
L.commentOffsetA = 0
|
||||
L.commentOffsetB = 0
|
||||
|
||||
proc openLexer*(lex: var TLexer, fileIdx: int32, inputstream: PLLStream;
|
||||
cache: IdentCache) =
|
||||
@@ -245,11 +264,13 @@ proc lexMessagePos(L: var TLexer, msg: TMsgKind, pos: int, arg = "") =
|
||||
proc matchTwoChars(L: TLexer, first: char, second: set[char]): bool =
|
||||
result = (L.buf[L.bufpos] == first) and (L.buf[L.bufpos + 1] in second)
|
||||
|
||||
template tokenBegin(pos) {.dirty.} =
|
||||
template tokenBegin(tok, pos) {.dirty.} =
|
||||
when defined(nimsuggest):
|
||||
var colA = getColNumber(L, pos)
|
||||
when defined(nimpretty):
|
||||
tok.offsetA = L.offsetBase + pos
|
||||
|
||||
template tokenEnd(pos) {.dirty.} =
|
||||
template tokenEnd(tok, pos) {.dirty.} =
|
||||
when defined(nimsuggest):
|
||||
let colB = getColNumber(L, pos)+1
|
||||
if L.fileIdx == gTrackPos.fileIndex and gTrackPos.col in colA..colB and
|
||||
@@ -257,8 +278,10 @@ template tokenEnd(pos) {.dirty.} =
|
||||
L.cursor = CursorPosition.InToken
|
||||
gTrackPos.col = colA.int16
|
||||
colA = 0
|
||||
when defined(nimpretty):
|
||||
tok.offsetB = L.offsetBase + pos
|
||||
|
||||
template tokenEndIgnore(pos) =
|
||||
template tokenEndIgnore(tok, pos) =
|
||||
when defined(nimsuggest):
|
||||
let colB = getColNumber(L, pos)
|
||||
if L.fileIdx == gTrackPos.fileIndex and gTrackPos.col in colA..colB and
|
||||
@@ -266,8 +289,10 @@ template tokenEndIgnore(pos) =
|
||||
gTrackPos.fileIndex = trackPosInvalidFileIdx
|
||||
gTrackPos.line = -1
|
||||
colA = 0
|
||||
when defined(nimpretty):
|
||||
tok.offsetB = L.offsetBase + pos
|
||||
|
||||
template tokenEndPrevious(pos) =
|
||||
template tokenEndPrevious(tok, pos) =
|
||||
when defined(nimsuggest):
|
||||
# when we detect the cursor in whitespace, we attach the track position
|
||||
# to the token that came before that, but only if we haven't detected
|
||||
@@ -279,6 +304,8 @@ template tokenEndPrevious(pos) =
|
||||
gTrackPos = L.previousToken
|
||||
gTrackPosAttached = true
|
||||
colA = 0
|
||||
when defined(nimpretty):
|
||||
tok.offsetB = L.offsetBase + pos
|
||||
|
||||
{.push overflowChecks: off.}
|
||||
# We need to parse the largest uint literal without overflow checks
|
||||
@@ -363,7 +390,7 @@ proc getNumber(L: var TLexer, result: var TToken) =
|
||||
result.literal = ""
|
||||
result.base = base10
|
||||
startpos = L.bufpos
|
||||
tokenBegin(startPos)
|
||||
tokenBegin(result, startPos)
|
||||
|
||||
# First stage: find out base, make verifications, build token literal string
|
||||
if L.buf[L.bufpos] == '0' and L.buf[L.bufpos + 1] in baseCodeChars + {'O'}:
|
||||
@@ -573,7 +600,7 @@ proc getNumber(L: var TLexer, result: var TToken) =
|
||||
lexMessageLitNum(L, errInvalidNumber, startpos)
|
||||
except OverflowError, RangeError:
|
||||
lexMessageLitNum(L, errNumberOutOfRange, startpos)
|
||||
tokenEnd(postPos-1)
|
||||
tokenEnd(result, postPos-1)
|
||||
L.bufpos = postPos
|
||||
|
||||
proc handleHexChar(L: var TLexer, xi: var int) =
|
||||
@@ -666,7 +693,7 @@ proc getEscapedChar(L: var TLexer, tok: var TToken) =
|
||||
proc newString(s: cstring, len: int): string =
|
||||
## XXX, how come there is no support for this?
|
||||
result = newString(len)
|
||||
for i in 0 .. <len:
|
||||
for i in 0 ..< len:
|
||||
result[i] = s[i]
|
||||
|
||||
proc handleCRLF(L: var TLexer, pos: int): int =
|
||||
@@ -691,10 +718,11 @@ proc handleCRLF(L: var TLexer, pos: int): int =
|
||||
else: result = pos
|
||||
|
||||
proc getString(L: var TLexer, tok: var TToken, rawMode: bool) =
|
||||
var pos = L.bufpos + 1 # skip "
|
||||
var pos = L.bufpos
|
||||
var buf = L.buf # put `buf` in a register
|
||||
var line = L.lineNumber # save linenumber for better error message
|
||||
tokenBegin(pos)
|
||||
tokenBegin(tok, pos)
|
||||
inc pos # skip "
|
||||
if buf[pos] == '\"' and buf[pos+1] == '\"':
|
||||
tok.tokType = tkTripleStrLit # long string literal:
|
||||
inc(pos, 2) # skip ""
|
||||
@@ -710,18 +738,18 @@ proc getString(L: var TLexer, tok: var TToken, rawMode: bool) =
|
||||
of '\"':
|
||||
if buf[pos+1] == '\"' and buf[pos+2] == '\"' and
|
||||
buf[pos+3] != '\"':
|
||||
tokenEndIgnore(pos+2)
|
||||
tokenEndIgnore(tok, pos+2)
|
||||
L.bufpos = pos + 3 # skip the three """
|
||||
break
|
||||
add(tok.literal, '\"')
|
||||
inc(pos)
|
||||
of CR, LF:
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
pos = handleCRLF(L, pos)
|
||||
buf = L.buf
|
||||
add(tok.literal, tnl)
|
||||
of nimlexbase.EndOfFile:
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
var line2 = L.lineNumber
|
||||
L.lineNumber = line
|
||||
lexMessagePos(L, errClosingTripleQuoteExpected, L.lineStart)
|
||||
@@ -742,11 +770,11 @@ proc getString(L: var TLexer, tok: var TToken, rawMode: bool) =
|
||||
inc(pos, 2)
|
||||
add(tok.literal, '"')
|
||||
else:
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
inc(pos) # skip '"'
|
||||
break
|
||||
elif c in {CR, LF, nimlexbase.EndOfFile}:
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
lexMessage(L, errClosingQuoteExpected)
|
||||
break
|
||||
elif (c == '\\') and not rawMode:
|
||||
@@ -759,7 +787,7 @@ proc getString(L: var TLexer, tok: var TToken, rawMode: bool) =
|
||||
L.bufpos = pos
|
||||
|
||||
proc getCharacter(L: var TLexer, tok: var TToken) =
|
||||
tokenBegin(L.bufpos)
|
||||
tokenBegin(tok, L.bufpos)
|
||||
inc(L.bufpos) # skip '
|
||||
var c = L.buf[L.bufpos]
|
||||
case c
|
||||
@@ -769,14 +797,14 @@ proc getCharacter(L: var TLexer, tok: var TToken) =
|
||||
tok.literal = $c
|
||||
inc(L.bufpos)
|
||||
if L.buf[L.bufpos] != '\'': lexMessage(L, errMissingFinalQuote)
|
||||
tokenEndIgnore(L.bufpos)
|
||||
tokenEndIgnore(tok, L.bufpos)
|
||||
inc(L.bufpos) # skip '
|
||||
|
||||
proc getSymbol(L: var TLexer, tok: var TToken) =
|
||||
var h: Hash = 0
|
||||
var pos = L.bufpos
|
||||
var buf = L.buf
|
||||
tokenBegin(pos)
|
||||
tokenBegin(tok, pos)
|
||||
while true:
|
||||
var c = buf[pos]
|
||||
case c
|
||||
@@ -793,7 +821,7 @@ proc getSymbol(L: var TLexer, tok: var TToken) =
|
||||
break
|
||||
inc(pos)
|
||||
else: break
|
||||
tokenEnd(pos-1)
|
||||
tokenEnd(tok, pos-1)
|
||||
h = !$h
|
||||
tok.ident = L.cache.getIdent(addr(L.buf[L.bufpos]), pos - L.bufpos, h)
|
||||
L.bufpos = pos
|
||||
@@ -814,7 +842,7 @@ proc endOperator(L: var TLexer, tok: var TToken, pos: int,
|
||||
proc getOperator(L: var TLexer, tok: var TToken) =
|
||||
var pos = L.bufpos
|
||||
var buf = L.buf
|
||||
tokenBegin(pos)
|
||||
tokenBegin(tok, pos)
|
||||
var h: Hash = 0
|
||||
while true:
|
||||
var c = buf[pos]
|
||||
@@ -822,7 +850,7 @@ proc getOperator(L: var TLexer, tok: var TToken) =
|
||||
h = h !& ord(c)
|
||||
inc(pos)
|
||||
endOperator(L, tok, pos, h)
|
||||
tokenEnd(pos-1)
|
||||
tokenEnd(tok, pos-1)
|
||||
# advance pos but don't store it in L.bufpos so the next token (which might
|
||||
# be an operator too) gets the preceding spaces:
|
||||
tok.strongSpaceB = 0
|
||||
@@ -837,7 +865,7 @@ proc skipMultiLineComment(L: var TLexer; tok: var TToken; start: int;
|
||||
var pos = start
|
||||
var buf = L.buf
|
||||
var toStrip = 0
|
||||
tokenBegin(pos)
|
||||
tokenBegin(tok, pos)
|
||||
# detect the amount of indentation:
|
||||
if isDoc:
|
||||
toStrip = getColNumber(L, pos)
|
||||
@@ -864,36 +892,37 @@ proc skipMultiLineComment(L: var TLexer; tok: var TToken; start: int;
|
||||
if isDoc:
|
||||
if buf[pos+1] == '#' and buf[pos+2] == '#':
|
||||
if nesting == 0:
|
||||
tokenEndIgnore(pos+2)
|
||||
tokenEndIgnore(tok, pos+2)
|
||||
inc(pos, 3)
|
||||
break
|
||||
dec nesting
|
||||
tok.literal.add ']'
|
||||
elif buf[pos+1] == '#':
|
||||
if nesting == 0:
|
||||
tokenEndIgnore(pos+1)
|
||||
tokenEndIgnore(tok, pos+1)
|
||||
inc(pos, 2)
|
||||
break
|
||||
dec nesting
|
||||
inc pos
|
||||
of CR, LF:
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
pos = handleCRLF(L, pos)
|
||||
buf = L.buf
|
||||
# strip leading whitespace:
|
||||
when defined(nimpretty): tok.literal.add "\L"
|
||||
if isDoc:
|
||||
tok.literal.add "\n"
|
||||
when not defined(nimpretty): tok.literal.add "\n"
|
||||
inc tok.iNumber
|
||||
var c = toStrip
|
||||
while buf[pos] == ' ' and c > 0:
|
||||
inc pos
|
||||
dec c
|
||||
of nimlexbase.EndOfFile:
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
lexMessagePos(L, errGenerated, pos, "end of multiline comment expected")
|
||||
break
|
||||
else:
|
||||
if isDoc: tok.literal.add buf[pos]
|
||||
if isDoc or defined(nimpretty): tok.literal.add buf[pos]
|
||||
inc(pos)
|
||||
L.bufpos = pos
|
||||
|
||||
@@ -907,7 +936,7 @@ proc scanComment(L: var TLexer, tok: var TToken) =
|
||||
if buf[pos+2] == '[':
|
||||
skipMultiLineComment(L, tok, pos+3, true)
|
||||
return
|
||||
tokenBegin(pos)
|
||||
tokenBegin(tok, pos)
|
||||
inc(pos, 2)
|
||||
|
||||
var toStrip = 0
|
||||
@@ -921,7 +950,7 @@ proc scanComment(L: var TLexer, tok: var TToken) =
|
||||
if buf[pos] == '\\': lastBackslash = pos+1
|
||||
add(tok.literal, buf[pos])
|
||||
inc(pos)
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
pos = handleCRLF(L, pos)
|
||||
buf = L.buf
|
||||
var indent = 0
|
||||
@@ -940,14 +969,14 @@ proc scanComment(L: var TLexer, tok: var TToken) =
|
||||
else:
|
||||
if buf[pos] > ' ':
|
||||
L.indentAhead = indent
|
||||
tokenEndIgnore(pos)
|
||||
tokenEndIgnore(tok, pos)
|
||||
break
|
||||
L.bufpos = pos
|
||||
|
||||
proc skip(L: var TLexer, tok: var TToken) =
|
||||
var pos = L.bufpos
|
||||
var buf = L.buf
|
||||
tokenBegin(pos)
|
||||
tokenBegin(tok, pos)
|
||||
tok.strongSpaceA = 0
|
||||
while true:
|
||||
case buf[pos]
|
||||
@@ -958,7 +987,7 @@ proc skip(L: var TLexer, tok: var TToken) =
|
||||
if not L.allowTabs: lexMessagePos(L, errTabulatorsAreNotAllowed, pos)
|
||||
inc(pos)
|
||||
of CR, LF:
|
||||
tokenEndPrevious(pos)
|
||||
tokenEndPrevious(tok, pos)
|
||||
pos = handleCRLF(L, pos)
|
||||
buf = L.buf
|
||||
var indent = 0
|
||||
@@ -980,18 +1009,27 @@ proc skip(L: var TLexer, tok: var TToken) =
|
||||
of '#':
|
||||
# do not skip documentation comment:
|
||||
if buf[pos+1] == '#': break
|
||||
when defined(nimpretty):
|
||||
tok.commentOffsetA = L.offsetBase + pos
|
||||
if buf[pos+1] == '[':
|
||||
skipMultiLineComment(L, tok, pos+2, false)
|
||||
pos = L.bufpos
|
||||
buf = L.buf
|
||||
when defined(nimpretty):
|
||||
tok.commentOffsetB = L.offsetBase + pos
|
||||
else:
|
||||
tokenBegin(pos)
|
||||
tokenBegin(tok, pos)
|
||||
while buf[pos] notin {CR, LF, nimlexbase.EndOfFile}: inc(pos)
|
||||
tokenEndIgnore(pos+1)
|
||||
tokenEndIgnore(tok, pos+1)
|
||||
when defined(nimpretty):
|
||||
tok.commentOffsetB = L.offsetBase + pos + 1
|
||||
else:
|
||||
break # EndOfFile also leaves the loop
|
||||
tokenEndPrevious(pos-1)
|
||||
tokenEndPrevious(tok, pos-1)
|
||||
L.bufpos = pos
|
||||
when defined(nimpretty):
|
||||
if gIndentationWidth <= 0:
|
||||
gIndentationWidth = tok.indent
|
||||
|
||||
proc rawGetTok*(L: var TLexer, tok: var TToken) =
|
||||
template atTokenEnd() {.dirty.} =
|
||||
@@ -1014,7 +1052,7 @@ proc rawGetTok*(L: var TLexer, tok: var TToken) =
|
||||
var c = L.buf[L.bufpos]
|
||||
tok.line = L.lineNumber
|
||||
tok.col = getColNumber(L, L.bufpos)
|
||||
if c in SymStartChars - {'r', 'R', 'l'}:
|
||||
if c in SymStartChars - {'r', 'R'}:
|
||||
getSymbol(L, tok)
|
||||
else:
|
||||
case c
|
||||
@@ -1031,12 +1069,6 @@ proc rawGetTok*(L: var TLexer, tok: var TToken) =
|
||||
of ',':
|
||||
tok.tokType = tkComma
|
||||
inc(L.bufpos)
|
||||
of 'l':
|
||||
# if we parsed exactly one character and its a small L (l), this
|
||||
# is treated as a warning because it may be confused with the number 1
|
||||
if L.buf[L.bufpos+1] notin (SymChars + {'_'}):
|
||||
lexMessage(L, warnSmallLshouldNotBeUsed)
|
||||
getSymbol(L, tok)
|
||||
of 'r', 'R':
|
||||
if L.buf[L.bufpos + 1] == '\"':
|
||||
inc(L.bufpos)
|
||||
|
||||
@@ -84,7 +84,6 @@ const
|
||||
AdditionalLineContinuationOprs = {'#', ':', '='}
|
||||
|
||||
proc endsWithOpr*(x: string): bool =
|
||||
# also used by the standard template filter:
|
||||
result = x.endsWith(LineContinuationOprs)
|
||||
|
||||
proc continueLine(line: string, inTripleString: bool): bool {.inline.} =
|
||||
|
||||
@@ -39,14 +39,18 @@ proc considerQuotedIdent*(n: PNode, origin: PNode = nil): PIdent =
|
||||
of 1: result = considerQuotedIdent(n.sons[0], origin)
|
||||
else:
|
||||
var id = ""
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
let x = n.sons[i]
|
||||
case x.kind
|
||||
of nkIdent: id.add(x.ident.s)
|
||||
of nkSym: id.add(x.sym.name.s)
|
||||
else: handleError(n, origin)
|
||||
result = getIdent(id)
|
||||
of nkOpenSymChoice, nkClosedSymChoice: result = n.sons[0].sym.name
|
||||
of nkOpenSymChoice, nkClosedSymChoice:
|
||||
if n[0].kind == nkSym:
|
||||
result = n.sons[0].sym.name
|
||||
else:
|
||||
handleError(n, origin)
|
||||
else:
|
||||
handleError(n, origin)
|
||||
|
||||
@@ -155,7 +159,7 @@ proc ensureNoMissingOrUnusedSymbols(scope: PScope) =
|
||||
var s = initTabIter(it, scope.symbols)
|
||||
var missingImpls = 0
|
||||
while s != nil:
|
||||
if sfForward in s.flags:
|
||||
if sfForward in s.flags and s.kind != skType:
|
||||
# too many 'implementation of X' errors are annoying
|
||||
# and slow 'suggest' down:
|
||||
if missingImpls == 0:
|
||||
@@ -379,7 +383,11 @@ proc initOverloadIter*(o: var TOverloadIter, c: PContext, n: PNode): PSym =
|
||||
result = errorSym(c, n.sons[1])
|
||||
of nkClosedSymChoice, nkOpenSymChoice:
|
||||
o.mode = oimSymChoice
|
||||
result = n.sons[0].sym
|
||||
if n[0].kind == nkSym:
|
||||
result = n.sons[0].sym
|
||||
else:
|
||||
o.mode = oimDone
|
||||
return nil
|
||||
o.symChoiceIndex = 1
|
||||
o.inSymChoice = initIntSet()
|
||||
incl(o.inSymChoice, result.id)
|
||||
|
||||
@@ -70,6 +70,9 @@ proc newTupleAccessRaw*(tup: PNode, i: int): PNode =
|
||||
lit.intVal = i
|
||||
addSon(result, lit)
|
||||
|
||||
proc newTryFinally*(body, final: PNode): PNode =
|
||||
result = newTree(nkTryStmt, body, newTree(nkFinally, final))
|
||||
|
||||
proc lowerTupleUnpackingForAsgn*(n: PNode; owner: PSym): PNode =
|
||||
let value = n.lastSon
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
@@ -139,6 +142,14 @@ proc rawIndirectAccess*(a: PNode; field: PSym; info: TLineInfo): PNode =
|
||||
addSon(result, newSymNode(field))
|
||||
result.typ = field.typ
|
||||
|
||||
proc rawDirectAccess*(obj, field: PSym): PNode =
|
||||
# returns a.field as a node
|
||||
assert field.kind == skField
|
||||
result = newNodeI(nkDotExpr, field.info)
|
||||
addSon(result, newSymNode obj)
|
||||
addSon(result, newSymNode field)
|
||||
result.typ = field.typ
|
||||
|
||||
proc lookupInRecord(n: PNode, id: int): PSym =
|
||||
result = nil
|
||||
case n.kind
|
||||
@@ -171,8 +182,9 @@ proc addField*(obj: PType; s: PSym) =
|
||||
field.position = sonsLen(obj.n)
|
||||
addSon(obj.n, newSymNode(field))
|
||||
|
||||
proc addUniqueField*(obj: PType; s: PSym) =
|
||||
if lookupInRecord(obj.n, s.id) == nil:
|
||||
proc addUniqueField*(obj: PType; s: PSym): PSym {.discardable.} =
|
||||
result = lookupInRecord(obj.n, s.id)
|
||||
if result == nil:
|
||||
var field = newSym(skField, getIdent(s.name.s & $obj.n.len), s.owner, s.info)
|
||||
field.id = -s.id
|
||||
let t = skipIntLit(s.typ)
|
||||
@@ -180,6 +192,7 @@ proc addUniqueField*(obj: PType; s: PSym) =
|
||||
assert t.kind != tyStmt
|
||||
field.position = sonsLen(obj.n)
|
||||
addSon(obj.n, newSymNode(field))
|
||||
result = field
|
||||
|
||||
proc newDotExpr(obj, b: PSym): PNode =
|
||||
result = newNodeI(nkDotExpr, obj.info)
|
||||
@@ -452,7 +465,7 @@ proc setupArgsForConcurrency(n: PNode; objType: PType; scratchObj: PSym,
|
||||
varSection, varInit, result: PNode) =
|
||||
let formals = n[0].typ.n
|
||||
let tmpName = getIdent(genPrefix)
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
# we pick n's type here, which hopefully is 'tyArray' and not
|
||||
# 'tyOpenArray':
|
||||
var argType = n[i].typ.skipTypes(abstractInst)
|
||||
@@ -508,7 +521,7 @@ proc setupArgsForParallelism(n: PNode; objType: PType; scratchObj: PSym;
|
||||
let tmpName = getIdent(genPrefix)
|
||||
# we need to copy the foreign scratch object fields into local variables
|
||||
# for correctness: These are called 'threadLocal' here.
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
let n = n[i]
|
||||
let argType = skipTypes(if i < formals.len: formals[i].typ else: n.typ,
|
||||
abstractInst)
|
||||
|
||||
172
compiler/modulepaths.nim
Normal file
172
compiler/modulepaths.nim
Normal file
@@ -0,0 +1,172 @@
|
||||
#
|
||||
#
|
||||
# The Nim Compiler
|
||||
# (c) Copyright 2017 Contributors
|
||||
#
|
||||
# See the file "copying.txt", included in this
|
||||
# distribution, for details about the copyright.
|
||||
#
|
||||
|
||||
import ast, renderer, strutils, msgs, options, idents, os
|
||||
|
||||
import nimblecmd
|
||||
|
||||
const
|
||||
considerParentDirs = not defined(noParentProjects)
|
||||
considerNimbleDirs = not defined(noNimbleDirs)
|
||||
|
||||
proc findInNimbleDir(pkg, subdir, dir: string): string =
|
||||
var best = ""
|
||||
var bestv = ""
|
||||
for k, p in os.walkDir(dir, relative=true):
|
||||
if k == pcDir and p.len > pkg.len+1 and
|
||||
p[pkg.len] == '-' and p.startsWith(pkg):
|
||||
let (_, a) = getPathVersion(p)
|
||||
if bestv.len == 0 or bestv < a:
|
||||
bestv = a
|
||||
best = dir / p
|
||||
|
||||
if best.len > 0:
|
||||
var f: File
|
||||
if open(f, best / changeFileExt(pkg, ".nimble-link")):
|
||||
# the second line contains what we're interested in, see:
|
||||
# https://github.com/nim-lang/nimble#nimble-link
|
||||
var override = ""
|
||||
discard readLine(f, override)
|
||||
discard readLine(f, override)
|
||||
close(f)
|
||||
if not override.isAbsolute():
|
||||
best = best / override
|
||||
else:
|
||||
best = override
|
||||
let f = if subdir.len == 0: pkg else: subdir
|
||||
let res = addFileExt(best / f, "nim")
|
||||
if best.len > 0 and fileExists(res):
|
||||
result = res
|
||||
|
||||
const stdlibDirs = [
|
||||
"pure", "core", "arch",
|
||||
"pure/collections",
|
||||
"pure/concurrency", "impure",
|
||||
"wrappers", "wrappers/linenoise",
|
||||
"windows", "posix", "js"]
|
||||
|
||||
proc resolveDollar(project, source, pkg, subdir: string; info: TLineInfo): string =
|
||||
template attempt(a) =
|
||||
let x = addFileExt(a, "nim")
|
||||
if fileExists(x): return x
|
||||
|
||||
case pkg
|
||||
of "stdlib":
|
||||
if subdir.len == 0:
|
||||
return options.libpath
|
||||
else:
|
||||
for candidate in stdlibDirs:
|
||||
attempt(options.libpath / candidate / subdir)
|
||||
of "root":
|
||||
let root = project.splitFile.dir
|
||||
if subdir.len == 0:
|
||||
return root
|
||||
else:
|
||||
attempt(root / subdir)
|
||||
else:
|
||||
when considerParentDirs:
|
||||
var p = parentDir(source.splitFile.dir)
|
||||
# support 'import $karax':
|
||||
let f = if subdir.len == 0: pkg else: subdir
|
||||
|
||||
while p.len > 0:
|
||||
let dir = p / pkg
|
||||
if dirExists(dir):
|
||||
attempt(dir / f)
|
||||
# 2nd attempt: try to use 'karax/karax'
|
||||
attempt(dir / pkg / f)
|
||||
# 3rd attempt: try to use 'karax/src/karax'
|
||||
attempt(dir / "src" / f)
|
||||
attempt(dir / "src" / pkg / f)
|
||||
p = parentDir(p)
|
||||
|
||||
when considerNimbleDirs:
|
||||
if not options.gNoNimblePath:
|
||||
var nimbleDir = getEnv("NIMBLE_DIR")
|
||||
if nimbleDir.len == 0: nimbleDir = getHomeDir() / ".nimble"
|
||||
result = findInNimbleDir(pkg, subdir, nimbleDir / "pkgs")
|
||||
if result.len > 0: return result
|
||||
when not defined(windows):
|
||||
result = findInNimbleDir(pkg, subdir, "/opt/nimble/pkgs")
|
||||
if result.len > 0: return result
|
||||
|
||||
proc scriptableImport(pkg, sub: string; info: TLineInfo): string =
|
||||
result = resolveDollar(gProjectFull, info.toFullPath(), pkg, sub, info)
|
||||
|
||||
proc lookupPackage(pkg, subdir: PNode): string =
|
||||
let sub = if subdir != nil: renderTree(subdir, {renderNoComments}).replace(" ") else: ""
|
||||
case pkg.kind
|
||||
of nkStrLit, nkRStrLit, nkTripleStrLit:
|
||||
result = scriptableImport(pkg.strVal, sub, pkg.info)
|
||||
of nkIdent:
|
||||
result = scriptableImport(pkg.ident.s, sub, pkg.info)
|
||||
else:
|
||||
localError(pkg.info, "package name must be an identifier or string literal")
|
||||
result = ""
|
||||
|
||||
proc getModuleName*(n: PNode): string =
|
||||
# This returns a short relative module name without the nim extension
|
||||
# e.g. like "system", "importer" or "somepath/module"
|
||||
# The proc won't perform any checks that the path is actually valid
|
||||
case n.kind
|
||||
of nkStrLit, nkRStrLit, nkTripleStrLit:
|
||||
try:
|
||||
result = pathSubs(n.strVal, n.info.toFullPath().splitFile().dir)
|
||||
except ValueError:
|
||||
localError(n.info, "invalid path: " & n.strVal)
|
||||
result = n.strVal
|
||||
of nkIdent:
|
||||
result = n.ident.s
|
||||
of nkSym:
|
||||
result = n.sym.name.s
|
||||
of nkInfix:
|
||||
let n0 = n[0]
|
||||
let n1 = n[1]
|
||||
if n0.kind == nkIdent and n0.ident.id == getIdent("as").id:
|
||||
# XXX hack ahead:
|
||||
n.kind = nkImportAs
|
||||
n.sons[0] = n.sons[1]
|
||||
n.sons[1] = n.sons[2]
|
||||
n.sons.setLen(2)
|
||||
return getModuleName(n.sons[0])
|
||||
if n1.kind == nkPrefix and n1[0].kind == nkIdent and n1[0].ident.s == "$":
|
||||
if n0.kind == nkIdent and n0.ident.s == "/":
|
||||
result = lookupPackage(n1[1], n[2])
|
||||
else:
|
||||
localError(n.info, "only '/' supported with $package notation")
|
||||
result = ""
|
||||
else:
|
||||
# hacky way to implement 'x / y /../ z':
|
||||
result = getModuleName(n1)
|
||||
result.add renderTree(n0, {renderNoComments})
|
||||
result.add getModuleName(n[2])
|
||||
of nkPrefix:
|
||||
if n.sons[0].kind == nkIdent and n.sons[0].ident.s == "$":
|
||||
result = lookupPackage(n[1], nil)
|
||||
else:
|
||||
# hacky way to implement 'x / y /../ z':
|
||||
result = renderTree(n, {renderNoComments}).replace(" ")
|
||||
of nkDotExpr:
|
||||
result = renderTree(n, {renderNoComments}).replace(".", "/")
|
||||
of nkImportAs:
|
||||
result = getModuleName(n.sons[0])
|
||||
else:
|
||||
localError(n.info, errGenerated, "invalid module name: '$1'" % n.renderTree)
|
||||
result = ""
|
||||
|
||||
proc checkModuleName*(n: PNode; doLocalError=true): int32 =
|
||||
# This returns the full canonical path for a given module import
|
||||
let modulename = n.getModuleName
|
||||
let fullPath = findModule(modulename, n.info.toFullPath)
|
||||
if fullPath.len == 0:
|
||||
if doLocalError:
|
||||
localError(n.info, errCannotOpenFile, modulename)
|
||||
result = InvalidFileIDX
|
||||
else:
|
||||
result = fullPath.fileInfoIdx
|
||||
@@ -498,6 +498,9 @@ type
|
||||
# only 8 bytes.
|
||||
line*, col*: int16
|
||||
fileIndex*: int32
|
||||
when defined(nimpretty):
|
||||
offsetA*, offsetB*: int
|
||||
commentOffsetA*, commentOffsetB*: int
|
||||
|
||||
TErrorOutput* = enum
|
||||
eStdOut
|
||||
|
||||
@@ -46,6 +46,7 @@ type
|
||||
# private data:
|
||||
sentinel*: int
|
||||
lineStart*: int # index of last line start in buffer
|
||||
offsetBase*: int # use ``offsetBase + bufpos`` to get the offset
|
||||
|
||||
|
||||
proc openBaseLexer*(L: var TBaseLexer, inputstream: PLLStream,
|
||||
@@ -122,7 +123,8 @@ proc fillBaseLexer(L: var TBaseLexer, pos: int): int =
|
||||
result = pos + 1 # nothing to do
|
||||
else:
|
||||
fillBuffer(L)
|
||||
L.bufpos = 0 # XXX: is this really correct?
|
||||
L.offsetBase += pos + 1
|
||||
L.bufpos = 0
|
||||
result = 0
|
||||
L.lineStart = result
|
||||
|
||||
@@ -146,6 +148,7 @@ proc skipUTF8BOM(L: var TBaseLexer) =
|
||||
proc openBaseLexer(L: var TBaseLexer, inputstream: PLLStream, bufLen = 8192) =
|
||||
assert(bufLen > 0)
|
||||
L.bufpos = 0
|
||||
L.offsetBase = 0
|
||||
L.bufLen = bufLen
|
||||
L.buf = cast[cstring](alloc(bufLen * chrSize))
|
||||
L.sentinel = bufLen - 1
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
const
|
||||
MaxSetElements* = 1 shl 16 # (2^16) to support unicode character sets?
|
||||
VersionAsString* = system.NimVersion
|
||||
RodFileVersion* = "1222" # modify this if the rod-format changes!
|
||||
RodFileVersion* = "1223" # modify this if the rod-format changes!
|
||||
|
||||
|
||||
@@ -144,6 +144,7 @@ var
|
||||
gPreciseStack*: bool = false
|
||||
gNoNimblePath* = false
|
||||
gExperimentalMode*: bool
|
||||
newDestructors*: bool
|
||||
|
||||
proc importantComments*(): bool {.inline.} = gCmd in {cmdDoc, cmdIdeTools}
|
||||
proc usesNativeGC*(): bool {.inline.} = gSelectedGC >= gcRefc
|
||||
|
||||
@@ -122,7 +122,7 @@ proc semNodeKindConstraints*(p: PNode): PNode =
|
||||
result.strVal = newStringOfCap(10)
|
||||
result.strVal.add(chr(aqNone.ord))
|
||||
if p.len >= 2:
|
||||
for i in 1.. <p.len:
|
||||
for i in 1..<p.len:
|
||||
compileConstraints(p.sons[i], result.strVal)
|
||||
if result.strVal.len > MaxStackSize-1:
|
||||
internalError(p.info, "parameter pattern too complex")
|
||||
@@ -152,7 +152,7 @@ proc checkForSideEffects*(n: PNode): TSideEffectAnalysis =
|
||||
# indirect call: assume side effect:
|
||||
return seSideEffect
|
||||
# we need to check n[0] too: (FwithSideEffectButReturnsProcWithout)(args)
|
||||
for i in 0 .. <n.len:
|
||||
for i in 0 ..< n.len:
|
||||
let ret = checkForSideEffects(n.sons[i])
|
||||
if ret == seSideEffect: return ret
|
||||
elif ret == seUnknown and result == seNoSideEffect:
|
||||
@@ -163,7 +163,7 @@ proc checkForSideEffects*(n: PNode): TSideEffectAnalysis =
|
||||
else:
|
||||
# assume no side effect:
|
||||
result = seNoSideEffect
|
||||
for i in 0 .. <n.len:
|
||||
for i in 0 ..< n.len:
|
||||
let ret = checkForSideEffects(n.sons[i])
|
||||
if ret == seSideEffect: return ret
|
||||
elif ret == seUnknown and result == seNoSideEffect:
|
||||
|
||||
@@ -28,7 +28,7 @@ import
|
||||
llstream, lexer, idents, strutils, ast, astalgo, msgs
|
||||
|
||||
type
|
||||
TParser*{.final.} = object # A TParser object represents a file that
|
||||
TParser* = object # A TParser object represents a file that
|
||||
# is being parsed
|
||||
currInd: int # current indentation level
|
||||
firstTok, strongSpaces: bool # Has the first token been read?
|
||||
@@ -1905,7 +1905,7 @@ proc parseVariable(p: var TParser): PNode =
|
||||
#| variable = (varTuple / identColonEquals) colonBody? indAndComment
|
||||
if p.tok.tokType == tkParLe: result = parseVarTuple(p)
|
||||
else: result = parseIdentColonEquals(p, {withPragma, withDot})
|
||||
result{-1} = postExprBlocks(p, result{-1})
|
||||
result[^1] = postExprBlocks(p, result[^1])
|
||||
indAndComment(p, result)
|
||||
|
||||
proc parseBind(p: var TParser, k: TNodeKind): PNode =
|
||||
|
||||
@@ -15,6 +15,7 @@ import
|
||||
condsyms, idents, renderer, types, extccomp, math, magicsys, nversion,
|
||||
nimsets, syntaxes, times, rodread, idgen, modulegraphs, reorder
|
||||
|
||||
|
||||
type
|
||||
TPassContext* = object of RootObj # the pass's context
|
||||
fromCache*: bool # true if created by "openCached"
|
||||
@@ -211,7 +212,7 @@ proc processModule*(graph: ModuleGraph; module: PSym, stream: PLLStream,
|
||||
if n.kind == nkEmpty: break
|
||||
sl.add n
|
||||
if sfReorder in module.flags:
|
||||
sl = reorder sl
|
||||
sl = reorder(graph, sl, module, cache)
|
||||
discard processTopLevelStmt(sl, a)
|
||||
break
|
||||
elif not processTopLevelStmt(n, a): break
|
||||
|
||||
@@ -63,7 +63,7 @@ proc sameTrees(a, b: PNode): bool =
|
||||
|
||||
proc inSymChoice(sc, x: PNode): bool =
|
||||
if sc.kind == nkClosedSymChoice:
|
||||
for i in 0.. <sc.len:
|
||||
for i in 0..<sc.len:
|
||||
if sc.sons[i].sym == x.sym: return true
|
||||
elif sc.kind == nkOpenSymChoice:
|
||||
# same name suffices for open sym choices!
|
||||
@@ -83,7 +83,7 @@ proc isPatternParam(c: PPatternContext, p: PNode): bool {.inline.} =
|
||||
result = p.kind == nkSym and p.sym.kind == skParam and p.sym.owner == c.owner
|
||||
|
||||
proc matchChoice(c: PPatternContext, p, n: PNode): bool =
|
||||
for i in 1 .. <p.len:
|
||||
for i in 1 ..< p.len:
|
||||
if matches(c, p.sons[i], n): return true
|
||||
|
||||
proc bindOrCheck(c: PPatternContext, param: PSym, n: PNode): bool =
|
||||
@@ -115,7 +115,7 @@ proc matchNested(c: PPatternContext, p, n: PNode, rpn: bool): bool =
|
||||
if rpn: arglist.add(n.sons[0])
|
||||
elif n.kind == nkHiddenStdConv and n.sons[1].kind == nkBracket:
|
||||
let n = n.sons[1]
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
if not matchStarAux(c, op, n[i], arglist, rpn): return false
|
||||
elif checkTypes(c, p.sons[2].sym, n):
|
||||
add(arglist, n)
|
||||
@@ -186,7 +186,7 @@ proc matches(c: PPatternContext, p, n: PNode): bool =
|
||||
# unpack varargs:
|
||||
let n = lastSon(n).sons[1]
|
||||
arglist = newNodeI(nkArgList, n.info, n.len)
|
||||
for i in 0.. <n.len: arglist.sons[i] = n.sons[i]
|
||||
for i in 0..<n.len: arglist.sons[i] = n.sons[i]
|
||||
else:
|
||||
arglist = newNodeI(nkArgList, n.info, sonsLen(n) - plen + 1)
|
||||
# f(1, 2, 3)
|
||||
@@ -206,7 +206,7 @@ proc matches(c: PPatternContext, p, n: PNode): bool =
|
||||
|
||||
proc matchStmtList(c: PPatternContext, p, n: PNode): PNode =
|
||||
proc matchRange(c: PPatternContext, p, n: PNode, i: int): bool =
|
||||
for j in 0 .. <p.len:
|
||||
for j in 0 ..< p.len:
|
||||
if not matches(c, p.sons[j], n.sons[i+j]):
|
||||
# we need to undo any bindings:
|
||||
if not isNil(c.mapping): c.mapping = nil
|
||||
@@ -229,7 +229,7 @@ proc matchStmtList(c: PPatternContext, p, n: PNode): PNode =
|
||||
|
||||
proc aliasAnalysisRequested(params: PNode): bool =
|
||||
if params.len >= 2:
|
||||
for i in 1 .. < params.len:
|
||||
for i in 1 ..< params.len:
|
||||
let param = params.sons[i].sym
|
||||
if whichAlias(param) != aqNone: return true
|
||||
|
||||
@@ -237,7 +237,7 @@ proc addToArgList(result, n: PNode) =
|
||||
if n.typ != nil and n.typ.kind != tyStmt:
|
||||
if n.kind != nkArgList: result.add(n)
|
||||
else:
|
||||
for i in 0 .. <n.len: result.add(n.sons[i])
|
||||
for i in 0 ..< n.len: result.add(n.sons[i])
|
||||
|
||||
proc applyRule*(c: PContext, s: PSym, n: PNode): PNode =
|
||||
## returns a tree to semcheck if the rule triggered; nil otherwise
|
||||
@@ -256,7 +256,7 @@ proc applyRule*(c: PContext, s: PSym, n: PNode): PNode =
|
||||
var args: PNode
|
||||
if requiresAA:
|
||||
args = newNodeI(nkArgList, n.info)
|
||||
for i in 1 .. < params.len:
|
||||
for i in 1 ..< params.len:
|
||||
let param = params.sons[i].sym
|
||||
let x = getLazy(ctx, param)
|
||||
# couldn't bind parameter:
|
||||
@@ -265,7 +265,7 @@ proc applyRule*(c: PContext, s: PSym, n: PNode): PNode =
|
||||
if requiresAA: addToArgList(args, x)
|
||||
# perform alias analysis here:
|
||||
if requiresAA:
|
||||
for i in 1 .. < params.len:
|
||||
for i in 1 ..< params.len:
|
||||
var rs = result.sons[i]
|
||||
let param = params.sons[i].sym
|
||||
case whichAlias(param)
|
||||
|
||||
@@ -25,7 +25,7 @@ const
|
||||
wBorrow, wExtern, wImportCompilerProc, wThread, wImportCpp, wImportObjC,
|
||||
wAsmNoStackFrame, wError, wDiscardable, wNoInit, wDestructor, wCodegenDecl,
|
||||
wGensym, wInject, wRaises, wTags, wLocks, wDelegator, wGcSafe,
|
||||
wOverride, wConstructor, wExportNims, wUsed}
|
||||
wOverride, wConstructor, wExportNims, wUsed, wLiftLocals}
|
||||
converterPragmas* = procPragmas
|
||||
methodPragmas* = procPragmas+{wBase}-{wImportCpp}
|
||||
templatePragmas* = {wImmediate, wDeprecated, wError, wGensym, wInject, wDirty,
|
||||
@@ -55,7 +55,7 @@ const
|
||||
wPure, wHeader, wCompilerproc, wFinal, wSize, wExtern, wShallow,
|
||||
wImportCpp, wImportObjC, wError, wIncompleteStruct, wByCopy, wByRef,
|
||||
wInheritable, wGensym, wInject, wRequiresInit, wUnchecked, wUnion, wPacked,
|
||||
wBorrow, wGcSafe, wExportNims, wPartial, wUsed, wExplain}
|
||||
wBorrow, wGcSafe, wExportNims, wPartial, wUsed, wExplain, wPackage}
|
||||
fieldPragmas* = {wImportc, wExportc, wDeprecated, wExtern,
|
||||
wImportCpp, wImportObjC, wError, wGuard, wBitsize, wUsed}
|
||||
varPragmas* = {wImportc, wExportc, wVolatile, wRegister, wThreadVar, wNodecl,
|
||||
@@ -70,6 +70,14 @@ const
|
||||
wThread, wRaises, wLocks, wTags, wGcSafe}
|
||||
allRoutinePragmas* = methodPragmas + iteratorPragmas + lambdaPragmas
|
||||
|
||||
proc getPragmaVal*(procAst: PNode; name: TSpecialWord): PNode =
|
||||
let p = procAst[pragmasPos]
|
||||
if p.kind == nkEmpty: return nil
|
||||
for it in p:
|
||||
if it.kind == nkExprColonExpr and it[0].kind == nkIdent and
|
||||
it[0].ident.id == ord(name):
|
||||
return it[1]
|
||||
|
||||
proc pragma*(c: PContext, sym: PSym, n: PNode, validPragmas: TSpecialWords)
|
||||
# implementation
|
||||
|
||||
@@ -575,7 +583,7 @@ proc pragmaLockStmt(c: PContext; it: PNode) =
|
||||
if n.kind != nkBracket:
|
||||
localError(n.info, errGenerated, "locks pragma takes a list of expressions")
|
||||
else:
|
||||
for i in 0 .. <n.len:
|
||||
for i in 0 ..< n.len:
|
||||
n.sons[i] = c.semExpr(c, n.sons[i])
|
||||
|
||||
proc pragmaLocks(c: PContext, it: PNode): TLockLevel =
|
||||
@@ -799,6 +807,10 @@ proc singlePragma(c: PContext, sym: PSym, n: PNode, i: int,
|
||||
noVal(it)
|
||||
if sym.typ == nil or tfFinal in sym.typ.flags: invalidPragma(it)
|
||||
else: incl(sym.typ.flags, tfInheritable)
|
||||
of wPackage:
|
||||
noVal(it)
|
||||
if sym.typ == nil: invalidPragma(it)
|
||||
else: incl(sym.flags, sfForward)
|
||||
of wAcyclic:
|
||||
noVal(it)
|
||||
if sym.typ == nil: invalidPragma(it)
|
||||
@@ -974,6 +986,7 @@ proc singlePragma(c: PContext, sym: PSym, n: PNode, i: int,
|
||||
noVal(it)
|
||||
if sym == nil: invalidPragma(it)
|
||||
else: sym.flags.incl sfUsed
|
||||
of wLiftLocals: discard
|
||||
else: invalidPragma(it)
|
||||
else: invalidPragma(it)
|
||||
|
||||
|
||||
@@ -17,12 +17,12 @@ type
|
||||
renderNone, renderNoBody, renderNoComments, renderDocComments,
|
||||
renderNoPragmas, renderIds, renderNoProcDefs
|
||||
TRenderFlags* = set[TRenderFlag]
|
||||
TRenderTok*{.final.} = object
|
||||
TRenderTok* = object
|
||||
kind*: TTokType
|
||||
length*: int16
|
||||
|
||||
TRenderTokSeq* = seq[TRenderTok]
|
||||
TSrcGen*{.final.} = object
|
||||
TSrcGen* = object
|
||||
indent*: int
|
||||
lineLen*: int
|
||||
pos*: int # current position for iteration over the buffer
|
||||
@@ -37,15 +37,11 @@ type
|
||||
inGenericParams: bool
|
||||
checkAnon: bool # we're in a context that can contain sfAnon
|
||||
inPragma: int
|
||||
when defined(nimpretty):
|
||||
pendingNewlineCount: int
|
||||
origContent: string
|
||||
|
||||
|
||||
proc renderModule*(n: PNode, filename: string, renderFlags: TRenderFlags = {})
|
||||
proc renderTree*(n: PNode, renderFlags: TRenderFlags = {}): string
|
||||
proc initTokRender*(r: var TSrcGen, n: PNode, renderFlags: TRenderFlags = {})
|
||||
proc getNextTok*(r: var TSrcGen, kind: var TTokType, literal: var string)
|
||||
|
||||
proc `$`*(n: PNode): string = n.renderTree
|
||||
# implementation
|
||||
# We render the source code in a two phases: The first
|
||||
# determines how long the subtree will likely be, the second
|
||||
# phase appends to a buffer that will be the output.
|
||||
@@ -67,9 +63,31 @@ proc renderDefinitionName*(s: PSym, noQuotes = false): string =
|
||||
else:
|
||||
result = '`' & x & '`'
|
||||
|
||||
when not defined(nimpretty):
|
||||
const
|
||||
IndentWidth = 2
|
||||
longIndentWid = IndentWidth * 2
|
||||
else:
|
||||
template IndentWidth: untyped = lexer.gIndentationWidth
|
||||
template longIndentWid: untyped = IndentWidth() * 2
|
||||
|
||||
proc minmaxLine(n: PNode): (int, int) =
|
||||
case n.kind
|
||||
of nkTripleStrLit:
|
||||
result = (n.info.line.int, n.info.line.int + countLines(n.strVal))
|
||||
of nkCommentStmt:
|
||||
result = (n.info.line.int, n.info.line.int + countLines(n.comment))
|
||||
else:
|
||||
result = (n.info.line.int, n.info.line.int)
|
||||
for i in 0 ..< safeLen(n):
|
||||
let (currMin, currMax) = minmaxLine(n[i])
|
||||
if currMin < result[0]: result[0] = currMin
|
||||
if currMax > result[1]: result[1] = currMax
|
||||
|
||||
proc lineDiff(a, b: PNode): int =
|
||||
result = minmaxLine(b)[0] - minmaxLine(a)[1]
|
||||
|
||||
const
|
||||
IndentWidth = 2
|
||||
longIndentWid = 4
|
||||
MaxLineLen = 80
|
||||
LineCommentColumn = 30
|
||||
|
||||
@@ -95,7 +113,11 @@ proc addTok(g: var TSrcGen, kind: TTokType, s: string) =
|
||||
|
||||
proc addPendingNL(g: var TSrcGen) =
|
||||
if g.pendingNL >= 0:
|
||||
addTok(g, tkSpaces, "\n" & spaces(g.pendingNL))
|
||||
when defined(nimpretty):
|
||||
let newlines = repeat("\n", clamp(g.pendingNewlineCount, 1, 3))
|
||||
else:
|
||||
const newlines = "\n"
|
||||
addTok(g, tkSpaces, newlines & spaces(g.pendingNL))
|
||||
g.lineLen = g.pendingNL
|
||||
g.pendingNL = - 1
|
||||
g.pendingWhitespace = -1
|
||||
@@ -119,11 +141,17 @@ proc putNL(g: var TSrcGen) =
|
||||
|
||||
proc optNL(g: var TSrcGen, indent: int) =
|
||||
g.pendingNL = indent
|
||||
g.lineLen = indent # BUGFIX
|
||||
g.lineLen = indent
|
||||
when defined(nimpretty): g.pendingNewlineCount = 0
|
||||
|
||||
proc optNL(g: var TSrcGen) =
|
||||
optNL(g, g.indent)
|
||||
|
||||
proc optNL(g: var TSrcGen; a, b: PNode) =
|
||||
g.pendingNL = g.indent
|
||||
g.lineLen = g.indent
|
||||
when defined(nimpretty): g.pendingNewlineCount = lineDiff(a, b)
|
||||
|
||||
proc indentNL(g: var TSrcGen) =
|
||||
inc(g.indent, IndentWidth)
|
||||
g.pendingNL = g.indent
|
||||
@@ -284,8 +312,8 @@ proc gcoms(g: var TSrcGen) =
|
||||
for i in countup(0, high(g.comStack)): gcom(g, g.comStack[i])
|
||||
popAllComs(g)
|
||||
|
||||
proc lsub(n: PNode): int
|
||||
proc litAux(n: PNode, x: BiggestInt, size: int): string =
|
||||
proc lsub(g: TSrcGen; n: PNode): int
|
||||
proc litAux(g: TSrcGen; n: PNode, x: BiggestInt, size: int): string =
|
||||
proc skip(t: PType): PType =
|
||||
result = t
|
||||
while result.kind in {tyGenericInst, tyRange, tyVar, tyDistinct,
|
||||
@@ -302,14 +330,23 @@ proc litAux(n: PNode, x: BiggestInt, size: int): string =
|
||||
elif nfBase16 in n.flags: result = "0x" & toHex(x, size * 2)
|
||||
else: result = $x
|
||||
|
||||
proc ulitAux(n: PNode, x: BiggestInt, size: int): string =
|
||||
proc ulitAux(g: TSrcGen; n: PNode, x: BiggestInt, size: int): string =
|
||||
if nfBase2 in n.flags: result = "0b" & toBin(x, size * 8)
|
||||
elif nfBase8 in n.flags: result = "0o" & toOct(x, size * 3)
|
||||
elif nfBase16 in n.flags: result = "0x" & toHex(x, size * 2)
|
||||
else: result = $x
|
||||
# XXX proper unsigned output!
|
||||
|
||||
proc atom(n: PNode): string =
|
||||
proc atom(g: TSrcGen; n: PNode): string =
|
||||
when defined(nimpretty):
|
||||
let comment = if n.info.commentOffsetA < n.info.commentOffsetB:
|
||||
" " & substr(g.origContent, n.info.commentOffsetA, n.info.commentOffsetB)
|
||||
else:
|
||||
""
|
||||
if n.info.offsetA <= n.info.offsetB:
|
||||
# for some constructed tokens this can not be the case and we're better
|
||||
# off to not mess with the offset then.
|
||||
return substr(g.origContent, n.info.offsetA, n.info.offsetB) & comment
|
||||
var f: float32
|
||||
case n.kind
|
||||
of nkEmpty: result = ""
|
||||
@@ -319,30 +356,30 @@ proc atom(n: PNode): string =
|
||||
of nkRStrLit: result = "r\"" & replace(n.strVal, "\"", "\"\"") & '\"'
|
||||
of nkTripleStrLit: result = "\"\"\"" & n.strVal & "\"\"\""
|
||||
of nkCharLit: result = '\'' & toNimChar(chr(int(n.intVal))) & '\''
|
||||
of nkIntLit: result = litAux(n, n.intVal, 4)
|
||||
of nkInt8Lit: result = litAux(n, n.intVal, 1) & "\'i8"
|
||||
of nkInt16Lit: result = litAux(n, n.intVal, 2) & "\'i16"
|
||||
of nkInt32Lit: result = litAux(n, n.intVal, 4) & "\'i32"
|
||||
of nkInt64Lit: result = litAux(n, n.intVal, 8) & "\'i64"
|
||||
of nkUIntLit: result = ulitAux(n, n.intVal, 4) & "\'u"
|
||||
of nkUInt8Lit: result = ulitAux(n, n.intVal, 1) & "\'u8"
|
||||
of nkUInt16Lit: result = ulitAux(n, n.intVal, 2) & "\'u16"
|
||||
of nkUInt32Lit: result = ulitAux(n, n.intVal, 4) & "\'u32"
|
||||
of nkUInt64Lit: result = ulitAux(n, n.intVal, 8) & "\'u64"
|
||||
of nkIntLit: result = litAux(g, n, n.intVal, 4)
|
||||
of nkInt8Lit: result = litAux(g, n, n.intVal, 1) & "\'i8"
|
||||
of nkInt16Lit: result = litAux(g, n, n.intVal, 2) & "\'i16"
|
||||
of nkInt32Lit: result = litAux(g, n, n.intVal, 4) & "\'i32"
|
||||
of nkInt64Lit: result = litAux(g, n, n.intVal, 8) & "\'i64"
|
||||
of nkUIntLit: result = ulitAux(g, n, n.intVal, 4) & "\'u"
|
||||
of nkUInt8Lit: result = ulitAux(g, n, n.intVal, 1) & "\'u8"
|
||||
of nkUInt16Lit: result = ulitAux(g, n, n.intVal, 2) & "\'u16"
|
||||
of nkUInt32Lit: result = ulitAux(g, n, n.intVal, 4) & "\'u32"
|
||||
of nkUInt64Lit: result = ulitAux(g, n, n.intVal, 8) & "\'u64"
|
||||
of nkFloatLit:
|
||||
if n.flags * {nfBase2, nfBase8, nfBase16} == {}: result = $(n.floatVal)
|
||||
else: result = litAux(n, (cast[PInt64](addr(n.floatVal)))[] , 8)
|
||||
else: result = litAux(g, n, (cast[PInt64](addr(n.floatVal)))[] , 8)
|
||||
of nkFloat32Lit:
|
||||
if n.flags * {nfBase2, nfBase8, nfBase16} == {}:
|
||||
result = $n.floatVal & "\'f32"
|
||||
else:
|
||||
f = n.floatVal.float32
|
||||
result = litAux(n, (cast[PInt32](addr(f)))[], 4) & "\'f32"
|
||||
result = litAux(g, n, (cast[PInt32](addr(f)))[], 4) & "\'f32"
|
||||
of nkFloat64Lit:
|
||||
if n.flags * {nfBase2, nfBase8, nfBase16} == {}:
|
||||
result = $n.floatVal & "\'f64"
|
||||
else:
|
||||
result = litAux(n, (cast[PInt64](addr(n.floatVal)))[], 8) & "\'f64"
|
||||
result = litAux(g, n, (cast[PInt64](addr(n.floatVal)))[], 8) & "\'f64"
|
||||
of nkNilLit: result = "nil"
|
||||
of nkType:
|
||||
if (n.typ != nil) and (n.typ.sym != nil): result = n.typ.sym.name.s
|
||||
@@ -351,21 +388,21 @@ proc atom(n: PNode): string =
|
||||
internalError("rnimsyn.atom " & $n.kind)
|
||||
result = ""
|
||||
|
||||
proc lcomma(n: PNode, start: int = 0, theEnd: int = - 1): int =
|
||||
proc lcomma(g: TSrcGen; n: PNode, start: int = 0, theEnd: int = - 1): int =
|
||||
assert(theEnd < 0)
|
||||
result = 0
|
||||
for i in countup(start, sonsLen(n) + theEnd):
|
||||
inc(result, lsub(n.sons[i]))
|
||||
inc(result, lsub(g, n.sons[i]))
|
||||
inc(result, 2) # for ``, ``
|
||||
if result > 0:
|
||||
dec(result, 2) # last does not get a comma!
|
||||
|
||||
proc lsons(n: PNode, start: int = 0, theEnd: int = - 1): int =
|
||||
proc lsons(g: TSrcGen; n: PNode, start: int = 0, theEnd: int = - 1): int =
|
||||
assert(theEnd < 0)
|
||||
result = 0
|
||||
for i in countup(start, sonsLen(n) + theEnd): inc(result, lsub(n.sons[i]))
|
||||
for i in countup(start, sonsLen(n) + theEnd): inc(result, lsub(g, n.sons[i]))
|
||||
|
||||
proc lsub(n: PNode): int =
|
||||
proc lsub(g: TSrcGen; n: PNode): int =
|
||||
# computes the length of a tree
|
||||
if isNil(n): return 0
|
||||
if n.comment != nil: return MaxLineLen + 1
|
||||
@@ -373,108 +410,108 @@ proc lsub(n: PNode): int =
|
||||
of nkEmpty: result = 0
|
||||
of nkTripleStrLit:
|
||||
if containsNL(n.strVal): result = MaxLineLen + 1
|
||||
else: result = len(atom(n))
|
||||
else: result = len(atom(g, n))
|
||||
of succ(nkEmpty)..pred(nkTripleStrLit), succ(nkTripleStrLit)..nkNilLit:
|
||||
result = len(atom(n))
|
||||
result = len(atom(g, n))
|
||||
of nkCall, nkBracketExpr, nkCurlyExpr, nkConv, nkPattern, nkObjConstr:
|
||||
result = lsub(n.sons[0]) + lcomma(n, 1) + 2
|
||||
of nkHiddenStdConv, nkHiddenSubConv, nkHiddenCallConv: result = lsub(n[1])
|
||||
of nkCast: result = lsub(n.sons[0]) + lsub(n.sons[1]) + len("cast[]()")
|
||||
of nkAddr: result = (if n.len>0: lsub(n.sons[0]) + len("addr()") else: 4)
|
||||
of nkStaticExpr: result = lsub(n.sons[0]) + len("static_")
|
||||
of nkHiddenAddr, nkHiddenDeref: result = lsub(n.sons[0])
|
||||
of nkCommand: result = lsub(n.sons[0]) + lcomma(n, 1) + 1
|
||||
of nkExprEqExpr, nkAsgn, nkFastAsgn: result = lsons(n) + 3
|
||||
of nkPar, nkCurly, nkBracket, nkClosure: result = lcomma(n) + 2
|
||||
of nkArgList: result = lcomma(n)
|
||||
result = lsub(g, n.sons[0]) + lcomma(g, n, 1) + 2
|
||||
of nkHiddenStdConv, nkHiddenSubConv, nkHiddenCallConv: result = lsub(g, n[1])
|
||||
of nkCast: result = lsub(g, n.sons[0]) + lsub(g, n.sons[1]) + len("cast[]()")
|
||||
of nkAddr: result = (if n.len>0: lsub(g, n.sons[0]) + len("addr()") else: 4)
|
||||
of nkStaticExpr: result = lsub(g, n.sons[0]) + len("static_")
|
||||
of nkHiddenAddr, nkHiddenDeref: result = lsub(g, n.sons[0])
|
||||
of nkCommand: result = lsub(g, n.sons[0]) + lcomma(g, n, 1) + 1
|
||||
of nkExprEqExpr, nkAsgn, nkFastAsgn: result = lsons(g, n) + 3
|
||||
of nkPar, nkCurly, nkBracket, nkClosure: result = lcomma(g, n) + 2
|
||||
of nkArgList: result = lcomma(g, n)
|
||||
of nkTableConstr:
|
||||
result = if n.len > 0: lcomma(n) + 2 else: len("{:}")
|
||||
result = if n.len > 0: lcomma(g, n) + 2 else: len("{:}")
|
||||
of nkClosedSymChoice, nkOpenSymChoice:
|
||||
result = lsons(n) + len("()") + sonsLen(n) - 1
|
||||
of nkTupleTy: result = lcomma(n) + len("tuple[]")
|
||||
result = lsons(g, n) + len("()") + sonsLen(n) - 1
|
||||
of nkTupleTy: result = lcomma(g, n) + len("tuple[]")
|
||||
of nkTupleClassTy: result = len("tuple")
|
||||
of nkDotExpr: result = lsons(n) + 1
|
||||
of nkBind: result = lsons(n) + len("bind_")
|
||||
of nkBindStmt: result = lcomma(n) + len("bind_")
|
||||
of nkMixinStmt: result = lcomma(n) + len("mixin_")
|
||||
of nkCheckedFieldExpr: result = lsub(n.sons[0])
|
||||
of nkLambda: result = lsons(n) + len("proc__=_")
|
||||
of nkDo: result = lsons(n) + len("do__:_")
|
||||
of nkDotExpr: result = lsons(g, n) + 1
|
||||
of nkBind: result = lsons(g, n) + len("bind_")
|
||||
of nkBindStmt: result = lcomma(g, n) + len("bind_")
|
||||
of nkMixinStmt: result = lcomma(g, n) + len("mixin_")
|
||||
of nkCheckedFieldExpr: result = lsub(g, n.sons[0])
|
||||
of nkLambda: result = lsons(g, n) + len("proc__=_")
|
||||
of nkDo: result = lsons(g, n) + len("do__:_")
|
||||
of nkConstDef, nkIdentDefs:
|
||||
result = lcomma(n, 0, - 3)
|
||||
result = lcomma(g, n, 0, - 3)
|
||||
var L = sonsLen(n)
|
||||
if n.sons[L - 2].kind != nkEmpty: result = result + lsub(n.sons[L - 2]) + 2
|
||||
if n.sons[L - 1].kind != nkEmpty: result = result + lsub(n.sons[L - 1]) + 3
|
||||
of nkVarTuple: result = lcomma(n, 0, - 3) + len("() = ") + lsub(lastSon(n))
|
||||
of nkChckRangeF: result = len("chckRangeF") + 2 + lcomma(n)
|
||||
of nkChckRange64: result = len("chckRange64") + 2 + lcomma(n)
|
||||
of nkChckRange: result = len("chckRange") + 2 + lcomma(n)
|
||||
if n.sons[L - 2].kind != nkEmpty: result = result + lsub(g, n.sons[L - 2]) + 2
|
||||
if n.sons[L - 1].kind != nkEmpty: result = result + lsub(g, n.sons[L - 1]) + 3
|
||||
of nkVarTuple: result = lcomma(g, n, 0, - 3) + len("() = ") + lsub(g, lastSon(n))
|
||||
of nkChckRangeF: result = len("chckRangeF") + 2 + lcomma(g, n)
|
||||
of nkChckRange64: result = len("chckRange64") + 2 + lcomma(g, n)
|
||||
of nkChckRange: result = len("chckRange") + 2 + lcomma(g, n)
|
||||
of nkObjDownConv, nkObjUpConv, nkStringToCString, nkCStringToString:
|
||||
result = 2
|
||||
if sonsLen(n) >= 1: result = result + lsub(n.sons[0])
|
||||
result = result + lcomma(n, 1)
|
||||
of nkExprColonExpr: result = lsons(n) + 2
|
||||
of nkInfix: result = lsons(n) + 2
|
||||
if sonsLen(n) >= 1: result = result + lsub(g, n.sons[0])
|
||||
result = result + lcomma(g, n, 1)
|
||||
of nkExprColonExpr: result = lsons(g, n) + 2
|
||||
of nkInfix: result = lsons(g, n) + 2
|
||||
of nkPrefix:
|
||||
result = lsons(n)+1+(if n.len > 0 and n.sons[1].kind == nkInfix: 2 else: 0)
|
||||
of nkPostfix: result = lsons(n)
|
||||
of nkCallStrLit: result = lsons(n)
|
||||
of nkPragmaExpr: result = lsub(n.sons[0]) + lcomma(n, 1)
|
||||
of nkRange: result = lsons(n) + 2
|
||||
of nkDerefExpr: result = lsub(n.sons[0]) + 2
|
||||
of nkAccQuoted: result = lsons(n) + 2
|
||||
result = lsons(g, n)+1+(if n.len > 0 and n.sons[1].kind == nkInfix: 2 else: 0)
|
||||
of nkPostfix: result = lsons(g, n)
|
||||
of nkCallStrLit: result = lsons(g, n)
|
||||
of nkPragmaExpr: result = lsub(g, n.sons[0]) + lcomma(g, n, 1)
|
||||
of nkRange: result = lsons(g, n) + 2
|
||||
of nkDerefExpr: result = lsub(g, n.sons[0]) + 2
|
||||
of nkAccQuoted: result = lsons(g, n) + 2
|
||||
of nkIfExpr:
|
||||
result = lsub(n.sons[0].sons[0]) + lsub(n.sons[0].sons[1]) + lsons(n, 1) +
|
||||
result = lsub(g, n.sons[0].sons[0]) + lsub(g, n.sons[0].sons[1]) + lsons(g, n, 1) +
|
||||
len("if_:_")
|
||||
of nkElifExpr: result = lsons(n) + len("_elif_:_")
|
||||
of nkElseExpr: result = lsub(n.sons[0]) + len("_else:_") # type descriptions
|
||||
of nkTypeOfExpr: result = (if n.len > 0: lsub(n.sons[0]) else: 0)+len("type()")
|
||||
of nkRefTy: result = (if n.len > 0: lsub(n.sons[0])+1 else: 0) + len("ref")
|
||||
of nkPtrTy: result = (if n.len > 0: lsub(n.sons[0])+1 else: 0) + len("ptr")
|
||||
of nkVarTy: result = (if n.len > 0: lsub(n.sons[0])+1 else: 0) + len("var")
|
||||
of nkElifExpr: result = lsons(g, n) + len("_elif_:_")
|
||||
of nkElseExpr: result = lsub(g, n.sons[0]) + len("_else:_") # type descriptions
|
||||
of nkTypeOfExpr: result = (if n.len > 0: lsub(g, n.sons[0]) else: 0)+len("type()")
|
||||
of nkRefTy: result = (if n.len > 0: lsub(g, n.sons[0])+1 else: 0) + len("ref")
|
||||
of nkPtrTy: result = (if n.len > 0: lsub(g, n.sons[0])+1 else: 0) + len("ptr")
|
||||
of nkVarTy: result = (if n.len > 0: lsub(g, n.sons[0])+1 else: 0) + len("var")
|
||||
of nkDistinctTy:
|
||||
result = len("distinct") + (if n.len > 0: lsub(n.sons[0])+1 else: 0)
|
||||
result = len("distinct") + (if n.len > 0: lsub(g, n.sons[0])+1 else: 0)
|
||||
if n.len > 1:
|
||||
result += (if n[1].kind == nkWith: len("_with_") else: len("_without_"))
|
||||
result += lcomma(n[1])
|
||||
of nkStaticTy: result = (if n.len > 0: lsub(n.sons[0]) else: 0) +
|
||||
result += lcomma(g, n[1])
|
||||
of nkStaticTy: result = (if n.len > 0: lsub(g, n.sons[0]) else: 0) +
|
||||
len("static[]")
|
||||
of nkTypeDef: result = lsons(n) + 3
|
||||
of nkOfInherit: result = lsub(n.sons[0]) + len("of_")
|
||||
of nkProcTy: result = lsons(n) + len("proc_")
|
||||
of nkIteratorTy: result = lsons(n) + len("iterator_")
|
||||
of nkSharedTy: result = lsons(n) + len("shared_")
|
||||
of nkTypeDef: result = lsons(g, n) + 3
|
||||
of nkOfInherit: result = lsub(g, n.sons[0]) + len("of_")
|
||||
of nkProcTy: result = lsons(g, n) + len("proc_")
|
||||
of nkIteratorTy: result = lsons(g, n) + len("iterator_")
|
||||
of nkSharedTy: result = lsons(g, n) + len("shared_")
|
||||
of nkEnumTy:
|
||||
if sonsLen(n) > 0:
|
||||
result = lsub(n.sons[0]) + lcomma(n, 1) + len("enum_")
|
||||
result = lsub(g, n.sons[0]) + lcomma(g, n, 1) + len("enum_")
|
||||
else:
|
||||
result = len("enum")
|
||||
of nkEnumFieldDef: result = lsons(n) + 3
|
||||
of nkEnumFieldDef: result = lsons(g, n) + 3
|
||||
of nkVarSection, nkLetSection:
|
||||
if sonsLen(n) > 1: result = MaxLineLen + 1
|
||||
else: result = lsons(n) + len("var_")
|
||||
else: result = lsons(g, n) + len("var_")
|
||||
of nkUsingStmt:
|
||||
if sonsLen(n) > 1: result = MaxLineLen + 1
|
||||
else: result = lsons(n) + len("using_")
|
||||
of nkReturnStmt: result = lsub(n.sons[0]) + len("return_")
|
||||
of nkRaiseStmt: result = lsub(n.sons[0]) + len("raise_")
|
||||
of nkYieldStmt: result = lsub(n.sons[0]) + len("yield_")
|
||||
of nkDiscardStmt: result = lsub(n.sons[0]) + len("discard_")
|
||||
of nkBreakStmt: result = lsub(n.sons[0]) + len("break_")
|
||||
of nkContinueStmt: result = lsub(n.sons[0]) + len("continue_")
|
||||
of nkPragma: result = lcomma(n) + 4
|
||||
else: result = lsons(g, n) + len("using_")
|
||||
of nkReturnStmt: result = lsub(g, n.sons[0]) + len("return_")
|
||||
of nkRaiseStmt: result = lsub(g, n.sons[0]) + len("raise_")
|
||||
of nkYieldStmt: result = lsub(g, n.sons[0]) + len("yield_")
|
||||
of nkDiscardStmt: result = lsub(g, n.sons[0]) + len("discard_")
|
||||
of nkBreakStmt: result = lsub(g, n.sons[0]) + len("break_")
|
||||
of nkContinueStmt: result = lsub(g, n.sons[0]) + len("continue_")
|
||||
of nkPragma: result = lcomma(g, n) + 4
|
||||
of nkCommentStmt: result = if n.comment.isNil: 0 else: len(n.comment)
|
||||
of nkOfBranch: result = lcomma(n, 0, - 2) + lsub(lastSon(n)) + len("of_:_")
|
||||
of nkImportAs: result = lsub(n.sons[0]) + len("_as_") + lsub(n.sons[1])
|
||||
of nkElifBranch: result = lsons(n) + len("elif_:_")
|
||||
of nkElse: result = lsub(n.sons[0]) + len("else:_")
|
||||
of nkFinally: result = lsub(n.sons[0]) + len("finally:_")
|
||||
of nkGenericParams: result = lcomma(n) + 2
|
||||
of nkOfBranch: result = lcomma(g, n, 0, - 2) + lsub(g, lastSon(n)) + len("of_:_")
|
||||
of nkImportAs: result = lsub(g, n.sons[0]) + len("_as_") + lsub(g, n.sons[1])
|
||||
of nkElifBranch: result = lsons(g, n) + len("elif_:_")
|
||||
of nkElse: result = lsub(g, n.sons[0]) + len("else:_")
|
||||
of nkFinally: result = lsub(g, n.sons[0]) + len("finally:_")
|
||||
of nkGenericParams: result = lcomma(g, n) + 2
|
||||
of nkFormalParams:
|
||||
result = lcomma(n, 1) + 2
|
||||
if n.sons[0].kind != nkEmpty: result = result + lsub(n.sons[0]) + 2
|
||||
result = lcomma(g, n, 1) + 2
|
||||
if n.sons[0].kind != nkEmpty: result = result + lsub(g, n.sons[0]) + 2
|
||||
of nkExceptBranch:
|
||||
result = lcomma(n, 0, -2) + lsub(lastSon(n)) + len("except_:_")
|
||||
result = lcomma(g, n, 0, -2) + lsub(g, lastSon(n)) + len("except_:_")
|
||||
else: result = MaxLineLen + 1
|
||||
|
||||
proc fits(g: TSrcGen, x: int): bool =
|
||||
@@ -517,7 +554,7 @@ proc gcommaAux(g: var TSrcGen, n: PNode, ind: int, start: int = 0,
|
||||
theEnd: int = - 1, separator = tkComma) =
|
||||
for i in countup(start, sonsLen(n) + theEnd):
|
||||
var c = i < sonsLen(n) + theEnd
|
||||
var sublen = lsub(n.sons[i]) + ord(c)
|
||||
var sublen = lsub(g, n.sons[i]) + ord(c)
|
||||
if not fits(g, sublen) and (ind + sublen < MaxLineLen): optNL(g, ind)
|
||||
let oldLen = g.tokens.len
|
||||
gsub(g, n.sons[i])
|
||||
@@ -564,12 +601,12 @@ proc gsection(g: var TSrcGen, n: PNode, c: TContext, kind: TTokType,
|
||||
gcoms(g)
|
||||
dedent(g)
|
||||
|
||||
proc longMode(n: PNode, start: int = 0, theEnd: int = - 1): bool =
|
||||
proc longMode(g: TSrcGen; n: PNode, start: int = 0, theEnd: int = - 1): bool =
|
||||
result = n.comment != nil
|
||||
if not result:
|
||||
# check further
|
||||
for i in countup(start, sonsLen(n) + theEnd):
|
||||
if (lsub(n.sons[i]) > MaxLineLen):
|
||||
if (lsub(g, n.sons[i]) > MaxLineLen):
|
||||
result = true
|
||||
break
|
||||
|
||||
@@ -577,12 +614,16 @@ proc gstmts(g: var TSrcGen, n: PNode, c: TContext, doIndent=true) =
|
||||
if n.kind == nkEmpty: return
|
||||
if n.kind in {nkStmtList, nkStmtListExpr, nkStmtListType}:
|
||||
if doIndent: indentNL(g)
|
||||
for i in countup(0, sonsLen(n) - 1):
|
||||
optNL(g)
|
||||
if n.sons[i].kind in {nkStmtList, nkStmtListExpr, nkStmtListType}:
|
||||
gstmts(g, n.sons[i], c, doIndent=false)
|
||||
let L = n.len
|
||||
for i in 0 .. L-1:
|
||||
if i > 0:
|
||||
optNL(g, n[i-1], n[i])
|
||||
else:
|
||||
gsub(g, n.sons[i])
|
||||
optNL(g)
|
||||
if n[i].kind in {nkStmtList, nkStmtListExpr, nkStmtListType}:
|
||||
gstmts(g, n[i], c, doIndent=false)
|
||||
else:
|
||||
gsub(g, n[i])
|
||||
gcoms(g)
|
||||
if doIndent: dedent(g)
|
||||
else:
|
||||
@@ -597,7 +638,7 @@ proc gif(g: var TSrcGen, n: PNode) =
|
||||
gsub(g, n.sons[0].sons[0])
|
||||
initContext(c)
|
||||
putWithSpace(g, tkColon, ":")
|
||||
if longMode(n) or (lsub(n.sons[0].sons[1]) + g.lineLen > MaxLineLen):
|
||||
if longMode(g, n) or (lsub(g, n.sons[0].sons[1]) + g.lineLen > MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcoms(g) # a good place for comments
|
||||
gstmts(g, n.sons[0].sons[1], c)
|
||||
@@ -612,7 +653,7 @@ proc gwhile(g: var TSrcGen, n: PNode) =
|
||||
gsub(g, n.sons[0])
|
||||
putWithSpace(g, tkColon, ":")
|
||||
initContext(c)
|
||||
if longMode(n) or (lsub(n.sons[1]) + g.lineLen > MaxLineLen):
|
||||
if longMode(g, n) or (lsub(g, n.sons[1]) + g.lineLen > MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcoms(g) # a good place for comments
|
||||
gstmts(g, n.sons[1], c)
|
||||
@@ -621,7 +662,7 @@ proc gpattern(g: var TSrcGen, n: PNode) =
|
||||
var c: TContext
|
||||
put(g, tkCurlyLe, "{")
|
||||
initContext(c)
|
||||
if longMode(n) or (lsub(n.sons[0]) + g.lineLen > MaxLineLen):
|
||||
if longMode(g, n) or (lsub(g, n.sons[0]) + g.lineLen > MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcoms(g) # a good place for comments
|
||||
gstmts(g, n, c)
|
||||
@@ -632,7 +673,7 @@ proc gpragmaBlock(g: var TSrcGen, n: PNode) =
|
||||
gsub(g, n.sons[0])
|
||||
putWithSpace(g, tkColon, ":")
|
||||
initContext(c)
|
||||
if longMode(n) or (lsub(n.sons[1]) + g.lineLen > MaxLineLen):
|
||||
if longMode(g, n) or (lsub(g, n.sons[1]) + g.lineLen > MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcoms(g) # a good place for comments
|
||||
gstmts(g, n.sons[1], c)
|
||||
@@ -642,7 +683,7 @@ proc gtry(g: var TSrcGen, n: PNode) =
|
||||
put(g, tkTry, "try")
|
||||
putWithSpace(g, tkColon, ":")
|
||||
initContext(c)
|
||||
if longMode(n) or (lsub(n.sons[0]) + g.lineLen > MaxLineLen):
|
||||
if longMode(g, n) or (lsub(g, n.sons[0]) + g.lineLen > MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcoms(g) # a good place for comments
|
||||
gstmts(g, n.sons[0], c)
|
||||
@@ -653,8 +694,8 @@ proc gfor(g: var TSrcGen, n: PNode) =
|
||||
var length = sonsLen(n)
|
||||
putWithSpace(g, tkFor, "for")
|
||||
initContext(c)
|
||||
if longMode(n) or
|
||||
(lsub(n.sons[length - 1]) + lsub(n.sons[length - 2]) + 6 + g.lineLen >
|
||||
if longMode(g, n) or
|
||||
(lsub(g, n.sons[length - 1]) + lsub(g, n.sons[length - 2]) + 6 + g.lineLen >
|
||||
MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcomma(g, n, c, 0, - 3)
|
||||
@@ -670,7 +711,7 @@ proc gcase(g: var TSrcGen, n: PNode) =
|
||||
initContext(c)
|
||||
var length = sonsLen(n)
|
||||
var last = if n.sons[length-1].kind == nkElse: -2 else: -1
|
||||
if longMode(n, 0, last): incl(c.flags, rfLongMode)
|
||||
if longMode(g, n, 0, last): incl(c.flags, rfLongMode)
|
||||
putWithSpace(g, tkCase, "case")
|
||||
gsub(g, n.sons[0])
|
||||
gcoms(g)
|
||||
@@ -678,7 +719,7 @@ proc gcase(g: var TSrcGen, n: PNode) =
|
||||
gsons(g, n, c, 1, last)
|
||||
if last == - 2:
|
||||
initContext(c)
|
||||
if longMode(n.sons[length - 1]): incl(c.flags, rfLongMode)
|
||||
if longMode(g, n.sons[length - 1]): incl(c.flags, rfLongMode)
|
||||
gsub(g, n.sons[length - 1], c)
|
||||
|
||||
proc gproc(g: var TSrcGen, n: PNode) =
|
||||
@@ -740,7 +781,7 @@ proc gblock(g: var TSrcGen, n: PNode) =
|
||||
else:
|
||||
put(g, tkBlock, "block")
|
||||
putWithSpace(g, tkColon, ":")
|
||||
if longMode(n) or (lsub(n.sons[1]) + g.lineLen > MaxLineLen):
|
||||
if longMode(g, n) or (lsub(g, n.sons[1]) + g.lineLen > MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcoms(g)
|
||||
# XXX I don't get why this is needed here! gstmts should already handle this!
|
||||
@@ -753,7 +794,7 @@ proc gstaticStmt(g: var TSrcGen, n: PNode) =
|
||||
putWithSpace(g, tkStatic, "static")
|
||||
putWithSpace(g, tkColon, ":")
|
||||
initContext(c)
|
||||
if longMode(n) or (lsub(n.sons[0]) + g.lineLen > MaxLineLen):
|
||||
if longMode(g, n) or (lsub(g, n.sons[0]) + g.lineLen > MaxLineLen):
|
||||
incl(c.flags, rfLongMode)
|
||||
gcoms(g) # a good place for comments
|
||||
gstmts(g, n.sons[0], c)
|
||||
@@ -771,7 +812,7 @@ proc gident(g: var TSrcGen, n: PNode) =
|
||||
(n.typ != nil and tfImplicitTypeParam in n.typ.flags): return
|
||||
|
||||
var t: TTokType
|
||||
var s = atom(n)
|
||||
var s = atom(g, n)
|
||||
if (s[0] in lexer.SymChars):
|
||||
if (n.kind == nkIdent):
|
||||
if (n.ident.id < ord(tokKeywordLow) - ord(tkSymbol)) or
|
||||
@@ -818,26 +859,26 @@ proc gsub(g: var TSrcGen, n: PNode, c: TContext) =
|
||||
case n.kind # atoms:
|
||||
of nkTripleStrLit: putRawStr(g, tkTripleStrLit, n.strVal)
|
||||
of nkEmpty: discard
|
||||
of nkType: put(g, tkInvalid, atom(n))
|
||||
of nkType: put(g, tkInvalid, atom(g, n))
|
||||
of nkSym, nkIdent: gident(g, n)
|
||||
of nkIntLit: put(g, tkIntLit, atom(n))
|
||||
of nkInt8Lit: put(g, tkInt8Lit, atom(n))
|
||||
of nkInt16Lit: put(g, tkInt16Lit, atom(n))
|
||||
of nkInt32Lit: put(g, tkInt32Lit, atom(n))
|
||||
of nkInt64Lit: put(g, tkInt64Lit, atom(n))
|
||||
of nkUIntLit: put(g, tkUIntLit, atom(n))
|
||||
of nkUInt8Lit: put(g, tkUInt8Lit, atom(n))
|
||||
of nkUInt16Lit: put(g, tkUInt16Lit, atom(n))
|
||||
of nkUInt32Lit: put(g, tkUInt32Lit, atom(n))
|
||||
of nkUInt64Lit: put(g, tkUInt64Lit, atom(n))
|
||||
of nkFloatLit: put(g, tkFloatLit, atom(n))
|
||||
of nkFloat32Lit: put(g, tkFloat32Lit, atom(n))
|
||||
of nkFloat64Lit: put(g, tkFloat64Lit, atom(n))
|
||||
of nkFloat128Lit: put(g, tkFloat128Lit, atom(n))
|
||||
of nkStrLit: put(g, tkStrLit, atom(n))
|
||||
of nkRStrLit: put(g, tkRStrLit, atom(n))
|
||||
of nkCharLit: put(g, tkCharLit, atom(n))
|
||||
of nkNilLit: put(g, tkNil, atom(n)) # complex expressions
|
||||
of nkIntLit: put(g, tkIntLit, atom(g, n))
|
||||
of nkInt8Lit: put(g, tkInt8Lit, atom(g, n))
|
||||
of nkInt16Lit: put(g, tkInt16Lit, atom(g, n))
|
||||
of nkInt32Lit: put(g, tkInt32Lit, atom(g, n))
|
||||
of nkInt64Lit: put(g, tkInt64Lit, atom(g, n))
|
||||
of nkUIntLit: put(g, tkUIntLit, atom(g, n))
|
||||
of nkUInt8Lit: put(g, tkUInt8Lit, atom(g, n))
|
||||
of nkUInt16Lit: put(g, tkUInt16Lit, atom(g, n))
|
||||
of nkUInt32Lit: put(g, tkUInt32Lit, atom(g, n))
|
||||
of nkUInt64Lit: put(g, tkUInt64Lit, atom(g, n))
|
||||
of nkFloatLit: put(g, tkFloatLit, atom(g, n))
|
||||
of nkFloat32Lit: put(g, tkFloat32Lit, atom(g, n))
|
||||
of nkFloat64Lit: put(g, tkFloat64Lit, atom(g, n))
|
||||
of nkFloat128Lit: put(g, tkFloat128Lit, atom(g, n))
|
||||
of nkStrLit: put(g, tkStrLit, atom(g, n))
|
||||
of nkRStrLit: put(g, tkRStrLit, atom(g, n))
|
||||
of nkCharLit: put(g, tkCharLit, atom(g, n))
|
||||
of nkNilLit: put(g, tkNil, atom(g, n)) # complex expressions
|
||||
of nkCall, nkConv, nkDotCall, nkPattern, nkObjConstr:
|
||||
if n.len > 0 and isBracket(n[0]):
|
||||
gsub(g, n, 1)
|
||||
@@ -1003,7 +1044,7 @@ proc gsub(g: var TSrcGen, n: PNode, c: TContext) =
|
||||
gsub(g, n, 1)
|
||||
put(g, tkSpaces, Space)
|
||||
gsub(g, n, 0) # binary operator
|
||||
if not fits(g, lsub(n.sons[2]) + lsub(n.sons[0]) + 1):
|
||||
if not fits(g, lsub(g, n.sons[2]) + lsub(g, n.sons[0]) + 1):
|
||||
optNL(g, g.indent + longIndentWid)
|
||||
else:
|
||||
put(g, tkSpaces, Space)
|
||||
@@ -1011,7 +1052,11 @@ proc gsub(g: var TSrcGen, n: PNode, c: TContext) =
|
||||
of nkPrefix:
|
||||
gsub(g, n, 0)
|
||||
if n.len > 1:
|
||||
put(g, tkSpaces, Space)
|
||||
let opr = if n[0].kind == nkIdent: n[0].ident
|
||||
elif n[0].kind == nkSym: n[0].sym.name
|
||||
else: nil
|
||||
if n[1].kind == nkPrefix or (opr != nil and renderer.isKeyword(opr)):
|
||||
put(g, tkSpaces, Space)
|
||||
if n.sons[1].kind == nkInfix:
|
||||
put(g, tkParLe, "(")
|
||||
gsub(g, n.sons[1])
|
||||
@@ -1031,7 +1076,7 @@ proc gsub(g: var TSrcGen, n: PNode, c: TContext) =
|
||||
of nkAccQuoted:
|
||||
put(g, tkAccent, "`")
|
||||
if n.len > 0: gsub(g, n.sons[0])
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
put(g, tkSpaces, Space)
|
||||
gsub(g, n.sons[i])
|
||||
put(g, tkAccent, "`")
|
||||
@@ -1316,8 +1361,11 @@ proc gsub(g: var TSrcGen, n: PNode, c: TContext) =
|
||||
gstmts(g, n.sons[0], c)
|
||||
of nkExceptBranch:
|
||||
optNL(g)
|
||||
putWithSpace(g, tkExcept, "except")
|
||||
gcomma(g, n, 0, - 2)
|
||||
if n.len != 1:
|
||||
putWithSpace(g, tkExcept, "except")
|
||||
else:
|
||||
put(g, tkExcept, "except")
|
||||
gcomma(g, n, 0, -2)
|
||||
putWithSpace(g, tkColon, ":")
|
||||
gcoms(g)
|
||||
gstmts(g, lastSon(n), c)
|
||||
@@ -1363,7 +1411,7 @@ proc gsub(g: var TSrcGen, n: PNode, c: TContext) =
|
||||
#nkNone, nkExplicitTypeListCall:
|
||||
internalError(n.info, "rnimsyn.gsub(" & $n.kind & ')')
|
||||
|
||||
proc renderTree(n: PNode, renderFlags: TRenderFlags = {}): string =
|
||||
proc renderTree*(n: PNode, renderFlags: TRenderFlags = {}): string =
|
||||
var g: TSrcGen
|
||||
initSrcGen(g, renderFlags)
|
||||
# do not indent the initial statement list so that
|
||||
@@ -1375,12 +1423,20 @@ proc renderTree(n: PNode, renderFlags: TRenderFlags = {}): string =
|
||||
gsub(g, n)
|
||||
result = g.buf
|
||||
|
||||
proc renderModule(n: PNode, filename: string,
|
||||
renderFlags: TRenderFlags = {}) =
|
||||
proc `$`*(n: PNode): string = n.renderTree
|
||||
|
||||
proc renderModule*(n: PNode, infile, outfile: string,
|
||||
renderFlags: TRenderFlags = {}) =
|
||||
var
|
||||
f: File
|
||||
g: TSrcGen
|
||||
initSrcGen(g, renderFlags)
|
||||
when defined(nimpretty):
|
||||
try:
|
||||
g.origContent = readFile(infile)
|
||||
except IOError:
|
||||
rawMessage(errCannotOpenFile, infile)
|
||||
|
||||
for i in countup(0, sonsLen(n) - 1):
|
||||
gsub(g, n.sons[i])
|
||||
optNL(g)
|
||||
@@ -1391,17 +1447,17 @@ proc renderModule(n: PNode, filename: string,
|
||||
gcoms(g)
|
||||
if optStdout in gGlobalOptions:
|
||||
write(stdout, g.buf)
|
||||
elif open(f, filename, fmWrite):
|
||||
elif open(f, outfile, fmWrite):
|
||||
write(f, g.buf)
|
||||
close(f)
|
||||
else:
|
||||
rawMessage(errCannotOpenFile, filename)
|
||||
rawMessage(errCannotOpenFile, outfile)
|
||||
|
||||
proc initTokRender(r: var TSrcGen, n: PNode, renderFlags: TRenderFlags = {}) =
|
||||
proc initTokRender*(r: var TSrcGen, n: PNode, renderFlags: TRenderFlags = {}) =
|
||||
initSrcGen(r, renderFlags)
|
||||
gsub(r, n)
|
||||
|
||||
proc getNextTok(r: var TSrcGen, kind: var TTokType, literal: var string) =
|
||||
proc getNextTok*(r: var TSrcGen, kind: var TTokType, literal: var string) =
|
||||
if r.idx < len(r.tokens):
|
||||
kind = r.tokens[r.idx].kind
|
||||
var length = r.tokens[r.idx].length.int
|
||||
|
||||
@@ -1,13 +1,40 @@
|
||||
|
||||
import intsets, tables, ast, idents, renderer
|
||||
import
|
||||
intsets, ast, idents, algorithm, renderer, parser, ospaths, strutils,
|
||||
sequtils, msgs, modulegraphs, syntaxes, options, modulepaths, tables
|
||||
|
||||
const
|
||||
nfTempMark = nfTransf
|
||||
nfPermMark = nfNoRewrite
|
||||
type
|
||||
DepN = ref object
|
||||
pnode: PNode
|
||||
id, idx, lowLink: int
|
||||
onStack: bool
|
||||
kids: seq[DepN]
|
||||
hAQ, hIS, hB, hCmd: int
|
||||
when not defined(release):
|
||||
expls: seq[string]
|
||||
DepG = seq[DepN]
|
||||
|
||||
when not defined(release):
|
||||
var idNames = newTable[int, string]()
|
||||
|
||||
proc newDepN(id: int, pnode: PNode): DepN =
|
||||
new(result)
|
||||
result.id = id
|
||||
result.pnode = pnode
|
||||
result.idx = -1
|
||||
result.lowLink = -1
|
||||
result.onStack = false
|
||||
result.kids = @[]
|
||||
result.hAQ = -1
|
||||
result.hIS = -1
|
||||
result.hB = -1
|
||||
result.hCmd = -1
|
||||
when not defined(release):
|
||||
result.expls = @[]
|
||||
|
||||
proc accQuoted(n: PNode): PIdent =
|
||||
var id = ""
|
||||
for i in 0 .. <n.len:
|
||||
for i in 0 ..< n.len:
|
||||
let x = n[i]
|
||||
case x.kind
|
||||
of nkIdent: id.add(x.ident.s)
|
||||
@@ -21,10 +48,19 @@ proc addDecl(n: PNode; declares: var IntSet) =
|
||||
of nkPragmaExpr: addDecl(n[0], declares)
|
||||
of nkIdent:
|
||||
declares.incl n.ident.id
|
||||
when not defined(release):
|
||||
idNames[n.ident.id] = n.ident.s
|
||||
of nkSym:
|
||||
declares.incl n.sym.name.id
|
||||
when not defined(release):
|
||||
idNames[n.sym.name.id] = n.sym.name.s
|
||||
of nkAccQuoted:
|
||||
declares.incl accQuoted(n).id
|
||||
let a = accQuoted(n)
|
||||
declares.incl a.id
|
||||
when not defined(release):
|
||||
idNames[a.id] = a.s
|
||||
of nkEnumFieldDef:
|
||||
addDecl(n[0], declares)
|
||||
else: discard
|
||||
|
||||
proc computeDeps(n: PNode, declares, uses: var IntSet; topLevel: bool) =
|
||||
@@ -32,7 +68,7 @@ proc computeDeps(n: PNode, declares, uses: var IntSet; topLevel: bool) =
|
||||
template decl(n) =
|
||||
if topLevel: addDecl(n, declares)
|
||||
case n.kind
|
||||
of procDefs:
|
||||
of procDefs, nkMacroDef, nkTemplateDef:
|
||||
decl(n[0])
|
||||
for i in 1..bodyPos: deps(n[i])
|
||||
of nkLetSection, nkVarSection, nkUsingStmt:
|
||||
@@ -44,43 +80,358 @@ proc computeDeps(n: PNode, declares, uses: var IntSet; topLevel: bool) =
|
||||
for a in n:
|
||||
if a.len >= 3:
|
||||
decl(a[0])
|
||||
for i in 1..<a.len: deps(a[i])
|
||||
for i in 1..<a.len:
|
||||
if a[i].kind == nkEnumTy:
|
||||
# declare enum members
|
||||
for b in a[i]:
|
||||
decl(b)
|
||||
else:
|
||||
deps(a[i])
|
||||
of nkIdentDefs:
|
||||
for i in 1..<n.len: # avoid members identifiers in object definition
|
||||
deps(n[i])
|
||||
of nkIdent: uses.incl n.ident.id
|
||||
of nkSym: uses.incl n.sym.name.id
|
||||
of nkAccQuoted: uses.incl accQuoted(n).id
|
||||
of nkOpenSymChoice, nkClosedSymChoice:
|
||||
uses.incl n.sons[0].sym.name.id
|
||||
of nkStmtList, nkStmtListExpr, nkWhenStmt, nkElifBranch, nkElse:
|
||||
of nkStmtList, nkStmtListExpr, nkWhenStmt, nkElifBranch, nkElse, nkStaticStmt:
|
||||
for i in 0..<len(n): computeDeps(n[i], declares, uses, topLevel)
|
||||
of nkPragma:
|
||||
let a = n.sons[0]
|
||||
if a.kind == nkExprColonExpr and a.sons[0].kind == nkIdent and
|
||||
a.sons[0].ident.s == "pragma":
|
||||
# user defined pragma
|
||||
decl(a.sons[1])
|
||||
else:
|
||||
for i in 0..<safeLen(n): deps(n[i])
|
||||
else:
|
||||
for i in 0..<safeLen(n): deps(n[i])
|
||||
|
||||
proc visit(i: int; all, res: PNode; deps: var seq[(IntSet, IntSet)]): bool =
|
||||
let n = all[i]
|
||||
if nfTempMark in n.flags:
|
||||
# not a DAG!
|
||||
proc cleanPath(s: string): string =
|
||||
# Here paths may have the form A / B or "A/B"
|
||||
result = ""
|
||||
for c in s:
|
||||
if c != ' ' and c != '\"':
|
||||
result.add c
|
||||
|
||||
proc joinPath(parts: seq[string]): string =
|
||||
let nb = parts.len
|
||||
assert nb > 0
|
||||
if nb == 1:
|
||||
return parts[0]
|
||||
result = parts[0] / parts[1]
|
||||
for i in 2..<parts.len:
|
||||
result = result / parts[i]
|
||||
|
||||
proc getIncludePath(n: PNode, modulePath: string): string =
|
||||
let istr = n.renderTree.cleanPath
|
||||
let (pdir, _) = modulePath.splitPath
|
||||
let p = istr.split('/').joinPath.addFileExt("nim")
|
||||
result = pdir / p
|
||||
|
||||
proc hasIncludes(n:PNode): bool =
|
||||
for a in n:
|
||||
if a.kind == nkIncludeStmt:
|
||||
return true
|
||||
|
||||
proc includeModule*(graph: ModuleGraph; s: PSym, fileIdx: int32;
|
||||
cache: IdentCache): PNode {.procvar.} =
|
||||
result = syntaxes.parseFile(fileIdx, cache)
|
||||
graph.addDep(s, fileIdx)
|
||||
graph.addIncludeDep(s.position.int32, fileIdx)
|
||||
|
||||
proc expandIncludes(graph: ModuleGraph, module: PSym, n: PNode,
|
||||
modulePath: string, includedFiles: var IntSet,
|
||||
cache: IdentCache): PNode =
|
||||
# Parses includes and injects them in the current tree
|
||||
if not n.hasIncludes:
|
||||
return n
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
for a in n:
|
||||
if a.kind == nkIncludeStmt:
|
||||
for i in 0..<a.len:
|
||||
var f = checkModuleName(a.sons[i])
|
||||
if f != InvalidFileIDX:
|
||||
if containsOrIncl(includedFiles, f):
|
||||
localError(a.info, errRecursiveDependencyX, f.toFilename)
|
||||
else:
|
||||
let nn = includeModule(graph, module, f, cache)
|
||||
let nnn = expandIncludes(graph, module, nn, modulePath,
|
||||
includedFiles, cache)
|
||||
excl(includedFiles, f)
|
||||
for b in nnn:
|
||||
result.add b
|
||||
else:
|
||||
result.add a
|
||||
|
||||
proc splitSections(n: PNode): PNode =
|
||||
# Split typeSections and ConstSections into
|
||||
# sections that contain only one definition
|
||||
assert n.kind == nkStmtList
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
for a in n:
|
||||
if a.kind in {nkTypeSection, nkConstSection} and a.len > 1:
|
||||
for b in a:
|
||||
var s = newNode(a.kind)
|
||||
s.info = b.info
|
||||
s.add b
|
||||
result.add s
|
||||
else:
|
||||
result.add a
|
||||
|
||||
proc haveSameKind(dns: seq[DepN]): bool =
|
||||
# Check if all the nodes in a strongly connected
|
||||
# component have the same kind
|
||||
result = true
|
||||
let kind = dns[0].pnode.kind
|
||||
for dn in dns:
|
||||
if dn.pnode.kind != kind:
|
||||
return false
|
||||
|
||||
proc mergeSections(comps: seq[seq[DepN]], res: PNode) =
|
||||
# Merges typeSections and ConstSections when they form
|
||||
# a strong component (ex: circular type definition)
|
||||
for c in comps:
|
||||
assert c.len > 0
|
||||
if c.len == 1:
|
||||
res.add c[0].pnode
|
||||
else:
|
||||
let fstn = c[0].pnode
|
||||
let kind = fstn.kind
|
||||
# always return to the original order when we got circular dependencies
|
||||
let cs = c.sortedByIt(it.id)
|
||||
if kind in {nkTypeSection, nkConstSection} and haveSameKind(cs):
|
||||
# Circular dependency between type or const sections, we just
|
||||
# need to merge them
|
||||
var sn = newNode(kind)
|
||||
for dn in cs:
|
||||
sn.add dn.pnode.sons[0]
|
||||
res.add sn
|
||||
else:
|
||||
# Problematic circular dependency, we arrange the nodes into
|
||||
# their original relative order and make sure to re-merge
|
||||
# consecutive type and const sections
|
||||
var wmsg = "Circular dependency detected. reorder pragma may not be able to" &
|
||||
" reorder some nodes properely"
|
||||
when not defined(release):
|
||||
wmsg &= ":\n"
|
||||
for i in 0..<cs.len-1:
|
||||
for j in i..<cs.len:
|
||||
for ci in 0..<cs[i].kids.len:
|
||||
if cs[i].kids[ci].id == cs[j].id:
|
||||
wmsg &= "line " & $cs[i].pnode.info.line &
|
||||
" depends on line " & $cs[j].pnode.info.line &
|
||||
": " & cs[i].expls[ci] & "\n"
|
||||
for j in 0..<cs.len-1:
|
||||
for ci in 0..<cs[^1].kids.len:
|
||||
if cs[^1].kids[ci].id == cs[j].id:
|
||||
wmsg &= "line " & $cs[^1].pnode.info.line &
|
||||
" depends on line " & $cs[j].pnode.info.line &
|
||||
": " & cs[^1].expls[ci] & "\n"
|
||||
message(cs[0].pnode.info, warnUser, wmsg)
|
||||
|
||||
var i = 0
|
||||
while i < cs.len:
|
||||
if cs[i].pnode.kind in {nkTypeSection, nkConstSection}:
|
||||
let ckind = cs[i].pnode.kind
|
||||
var sn = newNode(ckind)
|
||||
sn.add cs[i].pnode[0]
|
||||
inc i
|
||||
while i < cs.len and cs[i].pnode.kind == ckind :
|
||||
sn.add cs[i].pnode[0]
|
||||
inc i
|
||||
res.add sn
|
||||
else:
|
||||
res.add cs[i].pnode
|
||||
inc i
|
||||
|
||||
proc hasImportStmt(n: PNode): bool =
|
||||
# Checks if the node is an import statement or
|
||||
# i it contains one
|
||||
case n.kind
|
||||
of nkImportStmt, nkFromStmt, nkImportExceptStmt:
|
||||
return true
|
||||
if nfPermMark notin n.flags:
|
||||
incl n.flags, nfTempMark
|
||||
var uses = deps[i][1]
|
||||
for j in 0..<all.len:
|
||||
if j != i:
|
||||
let declares = deps[j][0]
|
||||
of nkStmtList, nkStmtListExpr, nkWhenStmt, nkElifBranch, nkElse, nkStaticStmt:
|
||||
for a in n:
|
||||
if a.hasImportStmt:
|
||||
return true
|
||||
else:
|
||||
result = false
|
||||
|
||||
proc hasImportStmt(n: DepN): bool =
|
||||
if n.hIS < 0:
|
||||
n.hIS = ord(n.pnode.hasImportStmt)
|
||||
result = bool(n.hIS)
|
||||
|
||||
proc hasCommand(n: PNode): bool =
|
||||
# Checks if the node is a command or a call
|
||||
# or if it contains one
|
||||
case n.kind
|
||||
of nkCommand, nkCall:
|
||||
result = true
|
||||
of nkStmtList, nkStmtListExpr, nkWhenStmt, nkElifBranch, nkElse,
|
||||
nkStaticStmt, nkLetSection, nkConstSection, nkVarSection,
|
||||
nkIdentDefs:
|
||||
for a in n:
|
||||
if a.hasCommand:
|
||||
return true
|
||||
else:
|
||||
return false
|
||||
|
||||
proc hasCommand(n: DepN): bool =
|
||||
if n.hCmd < 0:
|
||||
n.hCmd = ord(n.pnode.hasCommand)
|
||||
result = bool(n.hCmd)
|
||||
|
||||
proc hasAccQuoted(n: PNode): bool =
|
||||
if n.kind == nkAccQuoted:
|
||||
return true
|
||||
for a in n:
|
||||
if hasAccQuoted(a):
|
||||
return true
|
||||
|
||||
const extandedProcDefs = procDefs + {nkMacroDef, nkTemplateDef}
|
||||
|
||||
proc hasAccQuotedDef(n: PNode): bool =
|
||||
# Checks if the node is a function, macro, template ...
|
||||
# with a quoted name or if it contains one
|
||||
case n.kind
|
||||
of extandedProcDefs:
|
||||
result = n[0].hasAccQuoted
|
||||
of nkStmtList, nkStmtListExpr, nkWhenStmt, nkElifBranch, nkElse, nkStaticStmt:
|
||||
for a in n:
|
||||
if a.hasAccQuotedDef:
|
||||
return true
|
||||
else:
|
||||
result = false
|
||||
|
||||
proc hasAccQuotedDef(n: DepN): bool =
|
||||
if n.hAQ < 0:
|
||||
n.hAQ = ord(n.pnode.hasAccQuotedDef)
|
||||
result = bool(n.hAQ)
|
||||
|
||||
proc hasBody(n: PNode): bool =
|
||||
# Checks if the node is a function, macro, template ...
|
||||
# with a body or if it contains one
|
||||
case n.kind
|
||||
of nkCommand, nkCall:
|
||||
result = true
|
||||
of extandedProcDefs:
|
||||
result = n[^1].kind == nkStmtList
|
||||
of nkStmtList, nkStmtListExpr, nkWhenStmt, nkElifBranch, nkElse, nkStaticStmt:
|
||||
for a in n:
|
||||
if a.hasBody:
|
||||
return true
|
||||
else:
|
||||
result = false
|
||||
|
||||
proc hasBody(n: DepN): bool =
|
||||
if n.hB < 0:
|
||||
n.hB = ord(n.pnode.hasBody)
|
||||
result = bool(n.hB)
|
||||
|
||||
proc intersects(s1, s2: IntSet): bool =
|
||||
for a in s1:
|
||||
if s2.contains(a):
|
||||
return true
|
||||
|
||||
proc buildGraph(n: PNode, deps: seq[(IntSet, IntSet)]): DepG =
|
||||
# Build a dependency graph
|
||||
result = newSeqOfCap[DepN](deps.len)
|
||||
for i in 0..<deps.len:
|
||||
result.add newDepN(i, n.sons[i])
|
||||
for i in 0..<deps.len:
|
||||
var ni = result[i]
|
||||
let uses = deps[i][1]
|
||||
let niHasBody = ni.hasBody
|
||||
let niHasCmd = ni.hasCommand
|
||||
for j in 0..<deps.len:
|
||||
if i == j: continue
|
||||
var nj = result[j]
|
||||
let declares = deps[j][0]
|
||||
if j < i and nj.hasCommand and niHasCmd:
|
||||
# Preserve order for commands and calls
|
||||
ni.kids.add nj
|
||||
when not defined(release):
|
||||
ni.expls.add "both have commands and one comes after the other"
|
||||
elif j < i and nj.hasImportStmt:
|
||||
# Every node that comes after an import statement must
|
||||
# depend on that import
|
||||
ni.kids.add nj
|
||||
when not defined(release):
|
||||
ni.expls.add "parent is, or contains, an import statement and child comes after it"
|
||||
elif j < i and niHasBody and nj.hasAccQuotedDef:
|
||||
# Every function, macro, template... with a body depends
|
||||
# on precedent function declarations that have quoted names.
|
||||
# That's because it is hard to detect the use of functions
|
||||
# like "[]=", "[]", "or" ... in their bodies.
|
||||
ni.kids.add nj
|
||||
when not defined(release):
|
||||
ni.expls.add "one declares a quoted identifier and the other has a body and comes after it"
|
||||
elif j < i and niHasBody and not nj.hasBody and
|
||||
intersects(deps[i][0], declares):
|
||||
# Keep function declaration before function definition
|
||||
ni.kids.add nj
|
||||
when not defined(release):
|
||||
for dep in deps[i][0]:
|
||||
if dep in declares:
|
||||
ni.expls.add "one declares \"" & idNames[dep] & "\" and the other defines it"
|
||||
else:
|
||||
for d in declares:
|
||||
if uses.contains(d):
|
||||
let oldLen = res.len
|
||||
if visit(j, all, res, deps):
|
||||
result = true
|
||||
# rollback what we did, it turned out to be a dependency that caused
|
||||
# trouble:
|
||||
for k in oldLen..<res.len:
|
||||
res.sons[k].flags = res.sons[k].flags - {nfPermMark, nfTempMark}
|
||||
if oldLen != res.len: res.sons.setLen oldLen
|
||||
break
|
||||
n.flags = n.flags + {nfPermMark} - {nfTempMark}
|
||||
res.add n
|
||||
ni.kids.add nj
|
||||
when not defined(release):
|
||||
ni.expls.add "one declares \"" & idNames[d] & "\" and the other uses it"
|
||||
|
||||
proc reorder*(n: PNode): PNode =
|
||||
proc strongConnect(v: var DepN, idx: var int, s: var seq[DepN],
|
||||
res: var seq[seq[DepN]]) =
|
||||
# Recursive part of trajan's algorithm
|
||||
v.idx = idx
|
||||
v.lowLink = idx
|
||||
inc idx
|
||||
s.add v
|
||||
v.onStack = true
|
||||
for w in v.kids.mitems:
|
||||
if w.idx < 0:
|
||||
strongConnect(w, idx, s, res)
|
||||
v.lowLink = min(v.lowLink, w.lowLink)
|
||||
elif w.onStack:
|
||||
v.lowLink = min(v.lowLink, w.idx)
|
||||
if v.lowLink == v.idx:
|
||||
var comp = newSeq[DepN]()
|
||||
while true:
|
||||
var w = s.pop
|
||||
w.onStack = false
|
||||
comp.add w
|
||||
if w.id == v.id: break
|
||||
res.add comp
|
||||
|
||||
proc getStrongComponents(g: var DepG): seq[seq[DepN]] =
|
||||
## Tarjan's algorithm. Performs a topological sort
|
||||
## and detects strongly connected components.
|
||||
result = newSeq[seq[DepN]]()
|
||||
var s = newSeq[DepN]()
|
||||
var idx = 0
|
||||
for v in g.mitems:
|
||||
if v.idx < 0:
|
||||
strongConnect(v, idx, s, result)
|
||||
|
||||
proc hasForbiddenPragma(n: PNode): bool =
|
||||
# Checks if the tree node has some pragmas that do not
|
||||
# play well with reordering, like the push/pop pragma
|
||||
for a in n:
|
||||
if a.kind == nkPragma and a[0].kind == nkIdent and
|
||||
a[0].ident.s == "push":
|
||||
return true
|
||||
|
||||
proc reorder*(graph: ModuleGraph, n: PNode, module: PSym, cache: IdentCache): PNode =
|
||||
if n.hasForbiddenPragma:
|
||||
return n
|
||||
var includedFiles = initIntSet()
|
||||
let mpath = module.fileIdx.toFullPath
|
||||
let n = expandIncludes(graph, module, n, mpath,
|
||||
includedFiles, cache).splitSections
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
var deps = newSeq[(IntSet, IntSet)](n.len)
|
||||
for i in 0..<n.len:
|
||||
@@ -88,15 +439,6 @@ proc reorder*(n: PNode): PNode =
|
||||
deps[i][1] = initIntSet()
|
||||
computeDeps(n[i], deps[i][0], deps[i][1], true)
|
||||
|
||||
for i in 0 .. n.len-1:
|
||||
discard visit(i, n, result, deps)
|
||||
for i in 0..<result.len:
|
||||
result.sons[i].flags = result.sons[i].flags - {nfTempMark, nfPermMark}
|
||||
when false:
|
||||
# reverse the result:
|
||||
let L = result.len-1
|
||||
for i in 0 .. result.len div 2:
|
||||
result.sons[i].flags = result.sons[i].flags - {nfTempMark, nfPermMark}
|
||||
result.sons[L - i].flags = result.sons[L - i].flags - {nfTempMark, nfPermMark}
|
||||
swap(result.sons[i], result.sons[L - i])
|
||||
#echo result
|
||||
var g = buildGraph(n, deps)
|
||||
let comps = getStrongComponents(g)
|
||||
mergeSections(comps, result)
|
||||
|
||||
@@ -336,10 +336,13 @@ proc decodeType(r: PRodReader, info: TLineInfo): PType =
|
||||
if r.s[r.pos] == '\17':
|
||||
inc(r.pos)
|
||||
result.assignment = rrGetSym(r, decodeVInt(r.s, r.pos), info)
|
||||
while r.s[r.pos] == '\18':
|
||||
if r.s[r.pos] == '\18':
|
||||
inc(r.pos)
|
||||
result.sink = rrGetSym(r, decodeVInt(r.s, r.pos), info)
|
||||
while r.s[r.pos] == '\19':
|
||||
inc(r.pos)
|
||||
let x = decodeVInt(r.s, r.pos)
|
||||
doAssert r.s[r.pos] == '\19'
|
||||
doAssert r.s[r.pos] == '\20'
|
||||
inc(r.pos)
|
||||
let y = rrGetSym(r, decodeVInt(r.s, r.pos), info)
|
||||
result.methods.safeAdd((x, y))
|
||||
@@ -792,7 +795,7 @@ proc getReader(moduleId: int): PRodReader =
|
||||
# the module ID! We could introduce a mapping ID->PRodReader but I'll leave
|
||||
# this for later versions if benchmarking shows the linear search causes
|
||||
# problems:
|
||||
for i in 0 .. <gMods.len:
|
||||
for i in 0 ..< gMods.len:
|
||||
result = gMods[i].rd
|
||||
if result != nil and result.moduleID == moduleId: return result
|
||||
return nil
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
## Serialization utilities for the compiler.
|
||||
import strutils
|
||||
|
||||
proc c_sprintf(buf, frmt: cstring) {.importc: "sprintf", header: "<stdio.h>", nodecl, varargs.}
|
||||
proc c_snprintf(s: cstring; n:uint; frmt: cstring): cint {.importc: "snprintf", header: "<stdio.h>", nodecl, varargs.}
|
||||
|
||||
proc toStrMaxPrecision*(f: BiggestFloat, literalPostfix = ""): string =
|
||||
if f != f:
|
||||
@@ -21,9 +21,14 @@ proc toStrMaxPrecision*(f: BiggestFloat, literalPostfix = ""): string =
|
||||
if f > 0.0: result = "INF"
|
||||
else: result = "-INF"
|
||||
else:
|
||||
var buf: array[0..80, char]
|
||||
c_sprintf(buf, "%#.16e" & literalPostfix, f)
|
||||
result = $buf
|
||||
when defined(nimNoArrayToCstringConversion):
|
||||
result = newString(81)
|
||||
let n = c_snprintf(result.cstring, result.len.uint, "%#.16e%s", f, literalPostfix.cstring)
|
||||
setLen(result, n)
|
||||
else:
|
||||
var buf: array[0..80, char]
|
||||
discard c_snprintf(buf.cstring, buf.len.uint, "%#.16e%s", f, literalPostfix.cstring)
|
||||
result = $buf.cstring
|
||||
|
||||
proc encodeStr*(s: string, result: var string) =
|
||||
for i in countup(0, len(s) - 1):
|
||||
@@ -133,4 +138,3 @@ iterator decodeStrArray*(s: cstring): string =
|
||||
while s[i] != '\0':
|
||||
yield decodeStr(s, i)
|
||||
if s[i] == ' ': inc i
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@
|
||||
|
||||
import
|
||||
intsets, os, options, strutils, nversion, ast, astalgo, msgs, platform,
|
||||
condsyms, ropes, idents, securehash, rodread, passes, importer, idgen,
|
||||
rodutils
|
||||
condsyms, ropes, idents, securehash, rodread, passes, idgen,
|
||||
rodutils, modulepaths
|
||||
|
||||
from modulegraphs import ModuleGraph
|
||||
|
||||
@@ -245,10 +245,14 @@ proc encodeType(w: PRodWriter, t: PType, result: var string) =
|
||||
add(result, '\17')
|
||||
encodeVInt(t.assignment.id, result)
|
||||
pushSym(w, t.assignment)
|
||||
for i, s in items(t.methods):
|
||||
if t.sink != nil:
|
||||
add(result, '\18')
|
||||
encodeVInt(i, result)
|
||||
encodeVInt(t.sink.id, result)
|
||||
pushSym(w, t.sink)
|
||||
for i, s in items(t.methods):
|
||||
add(result, '\19')
|
||||
encodeVInt(i, result)
|
||||
add(result, '\20')
|
||||
encodeVInt(s.id, result)
|
||||
pushSym(w, s)
|
||||
encodeLoc(w, t.loc, result)
|
||||
|
||||
@@ -79,7 +79,7 @@ proc setupVM*(module: PSym; cache: IdentCache; scriptName: string;
|
||||
setResult(a, osproc.execCmd getString(a, 0))
|
||||
|
||||
cbconf getEnv:
|
||||
setResult(a, os.getEnv(a.getString 0))
|
||||
setResult(a, os.getEnv(a.getString 0, a.getString 1))
|
||||
cbconf existsEnv:
|
||||
setResult(a, os.existsEnv(a.getString 0))
|
||||
cbconf dirExists:
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import
|
||||
ast, strutils, hashes, options, lexer, astalgo, trees, treetab,
|
||||
wordrecg, ropes, msgs, os, condsyms, idents, renderer, types, platform, math,
|
||||
magicsys, parser, nversion, nimsets, semfold, importer,
|
||||
magicsys, parser, nversion, nimsets, semfold, modulepaths, importer,
|
||||
procfind, lookups, rodread, pragmas, passes, semdata, semtypinst, sigmatch,
|
||||
intsets, transf, vmdef, vm, idgen, aliases, cgmeth, lambdalifting,
|
||||
evaltempl, patterns, parampatterns, sempass2, nimfix.pretty, semmacrosanity,
|
||||
@@ -122,7 +122,7 @@ proc commonType*(x, y: PType): PType =
|
||||
if a.sons[idx].kind == tyEmpty: return y
|
||||
elif a.kind == tyTuple and b.kind == tyTuple and a.len == b.len:
|
||||
var nt: PType
|
||||
for i in 0.. <a.len:
|
||||
for i in 0..<a.len:
|
||||
let aEmpty = isEmptyContainer(a.sons[i])
|
||||
let bEmpty = isEmptyContainer(b.sons[i])
|
||||
if aEmpty != bEmpty:
|
||||
@@ -522,14 +522,18 @@ proc semStmtAndGenerateGenerics(c: PContext, n: PNode): PNode =
|
||||
else:
|
||||
result = n
|
||||
result = semStmt(c, result)
|
||||
# BUGFIX: process newly generated generics here, not at the end!
|
||||
if c.lastGenericIdx < c.generics.len:
|
||||
var a = newNodeI(nkStmtList, n.info)
|
||||
addCodeForGenerics(c, a)
|
||||
if sonsLen(a) > 0:
|
||||
# a generic has been added to `a`:
|
||||
if result.kind != nkEmpty: addSon(a, result)
|
||||
result = a
|
||||
when false:
|
||||
# Code generators are lazy now and can deal with undeclared procs, so these
|
||||
# steps are not required anymore and actually harmful for the upcoming
|
||||
# destructor support.
|
||||
# BUGFIX: process newly generated generics here, not at the end!
|
||||
if c.lastGenericIdx < c.generics.len:
|
||||
var a = newNodeI(nkStmtList, n.info)
|
||||
addCodeForGenerics(c, a)
|
||||
if sonsLen(a) > 0:
|
||||
# a generic has been added to `a`:
|
||||
if result.kind != nkEmpty: addSon(a, result)
|
||||
result = a
|
||||
result = hloStmt(c, result)
|
||||
if gCmd == cmdInteractive and not isEmptyType(result.typ):
|
||||
result = buildEchoStmt(c, result)
|
||||
|
||||
@@ -22,7 +22,8 @@ type
|
||||
recurse: bool
|
||||
|
||||
proc liftBodyAux(c: var TLiftCtx; t: PType; body, x, y: PNode)
|
||||
proc liftBody(c: PContext; typ: PType; info: TLineInfo): PSym
|
||||
proc liftBody(c: PContext; typ: PType; kind: TTypeAttachedOp;
|
||||
info: TLineInfo): PSym {.discardable.}
|
||||
|
||||
proc at(a, i: PNode, elemType: PType): PNode =
|
||||
result = newNodeI(nkBracketExpr, a.info, 2)
|
||||
@@ -31,7 +32,7 @@ proc at(a, i: PNode, elemType: PType): PNode =
|
||||
result.typ = elemType
|
||||
|
||||
proc liftBodyTup(c: var TLiftCtx; t: PType; body, x, y: PNode) =
|
||||
for i in 0 .. <t.len:
|
||||
for i in 0 ..< t.len:
|
||||
let lit = lowerings.newIntLit(i)
|
||||
liftBodyAux(c, t.sons[i], body, x.at(lit, t.sons[i]), y.at(lit, t.sons[i]))
|
||||
|
||||
@@ -57,7 +58,7 @@ proc liftBodyObj(c: var TLiftCtx; n, body, x, y: PNode) =
|
||||
var access = dotField(x, n[0].sym)
|
||||
caseStmt.add(access)
|
||||
# copy the branches over, but replace the fields with the for loop body:
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
var branch = copyTree(n[i])
|
||||
let L = branch.len
|
||||
branch.sons[L-1] = newNodeI(nkStmtList, c.info)
|
||||
@@ -92,13 +93,41 @@ proc newAsgnStmt(le, ri: PNode): PNode =
|
||||
result.sons[0] = le
|
||||
result.sons[1] = ri
|
||||
|
||||
proc newDestructorCall(op: PSym; x: PNode): PNode =
|
||||
proc newOpCall(op: PSym; x: PNode): PNode =
|
||||
result = newNodeIT(nkCall, x.info, op.typ.sons[0])
|
||||
result.add(newSymNode(op))
|
||||
result.add x
|
||||
|
||||
proc destructorCall(c: PContext; op: PSym; x: PNode): PNode =
|
||||
result = newNodeIT(nkCall, x.info, op.typ.sons[0])
|
||||
result.add(newSymNode(op))
|
||||
if newDestructors:
|
||||
result.add genAddr(c, x)
|
||||
else:
|
||||
result.add x
|
||||
|
||||
proc newDeepCopyCall(op: PSym; x, y: PNode): PNode =
|
||||
result = newAsgnStmt(x, newDestructorCall(op, y))
|
||||
result = newAsgnStmt(x, newOpCall(op, y))
|
||||
|
||||
proc considerAsgnOrSink(c: var TLiftCtx; t: PType; body, x, y: PNode;
|
||||
field: PSym): bool =
|
||||
if tfHasAsgn in t.flags:
|
||||
var op: PSym
|
||||
if sameType(t, c.asgnForType):
|
||||
# generate recursive call:
|
||||
if c.recurse:
|
||||
op = c.fn
|
||||
else:
|
||||
c.recurse = true
|
||||
return false
|
||||
else:
|
||||
op = field
|
||||
if op == nil:
|
||||
op = liftBody(c.c, t, c.kind, c.info)
|
||||
markUsed(c.info, op, c.c.graph.usageSym)
|
||||
styleCheckUse(c.info, op)
|
||||
body.add newAsgnCall(c.c, op, x, y)
|
||||
result = true
|
||||
|
||||
proc considerOverloadedOp(c: var TLiftCtx; t: PType; body, x, y: PNode): bool =
|
||||
case c.kind
|
||||
@@ -107,26 +136,12 @@ proc considerOverloadedOp(c: var TLiftCtx; t: PType; body, x, y: PNode): bool =
|
||||
if op != nil:
|
||||
markUsed(c.info, op, c.c.graph.usageSym)
|
||||
styleCheckUse(c.info, op)
|
||||
body.add newDestructorCall(op, x)
|
||||
body.add destructorCall(c.c, op, x)
|
||||
result = true
|
||||
of attachedAsgn:
|
||||
if tfHasAsgn in t.flags:
|
||||
var op: PSym
|
||||
if sameType(t, c.asgnForType):
|
||||
# generate recursive call:
|
||||
if c.recurse:
|
||||
op = c.fn
|
||||
else:
|
||||
c.recurse = true
|
||||
return false
|
||||
else:
|
||||
op = t.assignment
|
||||
if op == nil:
|
||||
op = liftBody(c.c, t, c.info)
|
||||
markUsed(c.info, op, c.c.graph.usageSym)
|
||||
styleCheckUse(c.info, op)
|
||||
body.add newAsgnCall(c.c, op, x, y)
|
||||
result = true
|
||||
result = considerAsgnOrSink(c, t, body, x, y, t.assignment)
|
||||
of attachedSink:
|
||||
result = considerAsgnOrSink(c, t, body, x, y, t.sink)
|
||||
of attachedDeepCopy:
|
||||
let op = t.deepCopy
|
||||
if op != nil:
|
||||
@@ -188,7 +203,7 @@ proc liftBodyAux(c: var TLiftCtx; t: PType; body, x, y: PNode) =
|
||||
tyPtr, tyString, tyRef, tyOpt:
|
||||
defaultOp(c, t, body, x, y)
|
||||
of tyArray, tySequence:
|
||||
if tfHasAsgn in t.flags:
|
||||
if {tfHasAsgn, tfUncheckedArray} * t.flags == {tfHasAsgn}:
|
||||
if t.kind == tySequence:
|
||||
# XXX add 'nil' handling here
|
||||
body.add newSeqCall(c.c, x, y)
|
||||
@@ -245,12 +260,20 @@ proc addParam(procType: PType; param: PSym) =
|
||||
addSon(procType.n, newSymNode(param))
|
||||
rawAddSon(procType, param.typ)
|
||||
|
||||
proc liftBody(c: PContext; typ: PType; info: TLineInfo): PSym =
|
||||
proc liftBody(c: PContext; typ: PType; kind: TTypeAttachedOp;
|
||||
info: TLineInfo): PSym {.discardable.} =
|
||||
var a: TLiftCtx
|
||||
a.info = info
|
||||
a.c = c
|
||||
a.kind = kind
|
||||
let body = newNodeI(nkStmtList, info)
|
||||
result = newSym(skProc, getIdent":lifted=", typ.owner, info)
|
||||
let procname = case kind
|
||||
of attachedAsgn: getIdent"="
|
||||
of attachedSink: getIdent"=sink"
|
||||
of attachedDeepCopy: getIdent"=deepcopy"
|
||||
of attachedDestructor: getIdent"=destroy"
|
||||
|
||||
result = newSym(skProc, procname, typ.owner, info)
|
||||
a.fn = result
|
||||
a.asgnForType = typ
|
||||
|
||||
@@ -261,27 +284,48 @@ proc liftBody(c: PContext; typ: PType; info: TLineInfo): PSym =
|
||||
|
||||
result.typ = newProcType(info, typ.owner)
|
||||
result.typ.addParam dest
|
||||
result.typ.addParam src
|
||||
if kind != attachedDestructor:
|
||||
result.typ.addParam src
|
||||
|
||||
liftBodyAux(a, typ, body, newSymNode(dest).newDeref, newSymNode(src))
|
||||
# recursion is handled explicitly, do not register the type based operation
|
||||
# before 'liftBodyAux':
|
||||
case kind
|
||||
of attachedAsgn: typ.assignment = result
|
||||
of attachedSink: typ.sink = result
|
||||
of attachedDeepCopy: typ.deepCopy = result
|
||||
of attachedDestructor: typ.destructor = result
|
||||
|
||||
var n = newNodeI(nkProcDef, info, bodyPos+1)
|
||||
for i in 0 .. < n.len: n.sons[i] = emptyNode
|
||||
for i in 0 ..< n.len: n.sons[i] = emptyNode
|
||||
n.sons[namePos] = newSymNode(result)
|
||||
n.sons[paramsPos] = result.typ.n
|
||||
n.sons[bodyPos] = body
|
||||
result.ast = n
|
||||
|
||||
# register late as recursion is handled differently
|
||||
typ.assignment = result
|
||||
#echo "Produced this ", n
|
||||
|
||||
proc getAsgnOrLiftBody(c: PContext; typ: PType; info: TLineInfo): PSym =
|
||||
let t = typ.skipTypes({tyGenericInst, tyVar, tyAlias})
|
||||
result = t.assignment
|
||||
if result.isNil:
|
||||
result = liftBody(c, t, info)
|
||||
result = liftBody(c, t, attachedAsgn, info)
|
||||
|
||||
proc overloadedAsgn(c: PContext; dest, src: PNode): PNode =
|
||||
let a = getAsgnOrLiftBody(c, dest.typ, dest.info)
|
||||
result = newAsgnCall(c, a, dest, src)
|
||||
|
||||
proc liftTypeBoundOps*(c: PContext; typ: PType; info: TLineInfo) =
|
||||
## In the semantic pass this is called in strategic places
|
||||
## to ensure we lift assignment, destructors and moves properly.
|
||||
## The later 'destroyer' pass depends on it.
|
||||
if not newDestructors or not hasDestructor(typ): return
|
||||
# do not produce wrong liftings while we're still instantiating generics:
|
||||
if c.typesWithOps.len > 0: return
|
||||
let typ = typ.skipTypes({tyGenericInst, tyAlias})
|
||||
# we generate the destructor first so that other operators can depend on it:
|
||||
if typ.destructor == nil:
|
||||
liftBody(c, typ, attachedDestructor, info)
|
||||
if typ.assignment == nil:
|
||||
liftBody(c, typ, attachedAsgn, info)
|
||||
if typ.sink == nil:
|
||||
liftBody(c, typ, attachedSink, info)
|
||||
|
||||
@@ -27,7 +27,7 @@ proc sameMethodDispatcher(a, b: PSym): bool =
|
||||
# method collide[T](a: TThing, b: TUnit[T]) is instantiated and not
|
||||
# method collide[T](a: TUnit[T], b: TThing)! This means we need to
|
||||
# *instantiate* every candidate! However, we don't keep more than 2-3
|
||||
# candidated around so we cannot implement that for now. So in order
|
||||
# candidates around so we cannot implement that for now. So in order
|
||||
# to avoid subtle problems, the call remains ambiguous and needs to
|
||||
# be disambiguated by the programmer; this way the right generic is
|
||||
# instantiated.
|
||||
@@ -90,6 +90,10 @@ proc pickBestCandidate(c: PContext, headSymbol: PNode,
|
||||
if c.currentScope.symbols.counter == counterInitial or syms != nil:
|
||||
matches(c, n, orig, z)
|
||||
if z.state == csMatch:
|
||||
#if sym.name.s == "==" and (n.info ?? "temp3"):
|
||||
# echo typeToString(sym.typ)
|
||||
# writeMatches(z)
|
||||
|
||||
# little hack so that iterators are preferred over everything else:
|
||||
if sym.kind == skIterator: inc(z.exactMatches, 200)
|
||||
case best.state
|
||||
@@ -306,7 +310,7 @@ proc instGenericConvertersArg*(c: PContext, a: PNode, x: TCandidate) =
|
||||
proc instGenericConvertersSons*(c: PContext, n: PNode, x: TCandidate) =
|
||||
assert n.kind in nkCallKinds
|
||||
if x.genericConverter:
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
instGenericConvertersArg(c, n.sons[i], x)
|
||||
|
||||
proc indexTypesMatch(c: PContext, f, a: PType, arg: PNode): PNode =
|
||||
@@ -490,7 +494,7 @@ proc searchForBorrowProc(c: PContext, startScope: PScope, fn: PSym): PSym =
|
||||
var call = newNodeI(nkCall, fn.info)
|
||||
var hasDistinct = false
|
||||
call.add(newIdentNode(fn.name, fn.info))
|
||||
for i in 1.. <fn.typ.n.len:
|
||||
for i in 1..<fn.typ.n.len:
|
||||
let param = fn.typ.n.sons[i]
|
||||
let t = skipTypes(param.typ, abstractVar-{tyTypeDesc, tyDistinct})
|
||||
if t.kind == tyDistinct or param.typ.kind == tyDistinct: hasDistinct = true
|
||||
|
||||
@@ -37,7 +37,6 @@ type
|
||||
# in standalone ``except`` and ``finally``
|
||||
next*: PProcCon # used for stacking procedure contexts
|
||||
wasForwarded*: bool # whether the current proc has a separate header
|
||||
bracketExpr*: PNode # current bracket expression (for ^ support)
|
||||
mapping*: TIdTable
|
||||
|
||||
TMatchedConcept* = object
|
||||
@@ -70,6 +69,7 @@ type
|
||||
|
||||
TTypeAttachedOp* = enum
|
||||
attachedAsgn,
|
||||
attachedSink,
|
||||
attachedDeepCopy,
|
||||
attachedDestructor
|
||||
|
||||
@@ -131,6 +131,11 @@ type
|
||||
recursiveDep*: string
|
||||
suggestionsMade*: bool
|
||||
inTypeContext*: int
|
||||
typesWithOps*: seq[(PType, PType)] #\
|
||||
# We need to instantiate the type bound ops lazily after
|
||||
# the generic type has been constructed completely. See
|
||||
# tests/destructor/topttree.nim for an example that
|
||||
# would otherwise fail.
|
||||
|
||||
proc makeInstPair*(s: PSym, inst: PInstantiation): TInstantiationPair =
|
||||
result.genericSym = s
|
||||
@@ -218,6 +223,7 @@ proc newContext*(graph: ModuleGraph; module: PSym; cache: IdentCache): PContext
|
||||
result.cache = cache
|
||||
result.graph = graph
|
||||
initStrTable(result.signatures)
|
||||
result.typesWithOps = @[]
|
||||
|
||||
|
||||
proc inclSym(sq: var TSymSeq, s: PSym) =
|
||||
@@ -333,7 +339,7 @@ proc makeNotType*(c: PContext, t1: PType): PType =
|
||||
|
||||
proc nMinusOne*(n: PNode): PNode =
|
||||
result = newNode(nkCall, n.info, @[
|
||||
newSymNode(getSysMagic("<", mUnaryLt)),
|
||||
newSymNode(getSysMagic("pred", mPred)),
|
||||
n])
|
||||
|
||||
# Remember to fix the procs below this one when you make changes!
|
||||
|
||||
@@ -30,7 +30,7 @@ proc instantiateDestructor(c: PContext, typ: PType): PType
|
||||
proc doDestructorStuff(c: PContext, s: PSym, n: PNode) =
|
||||
var t = s.typ.sons[1].skipTypes({tyVar})
|
||||
if t.kind == tyGenericInvocation:
|
||||
for i in 1 .. <t.sonsLen:
|
||||
for i in 1 ..< t.sonsLen:
|
||||
if t.sons[i].kind != tyGenericParam:
|
||||
localError(n.info, errDestructorNotGenericEnough)
|
||||
return
|
||||
@@ -184,62 +184,3 @@ proc createDestructorCall(c: PContext, s: PSym): PNode =
|
||||
useSym(destructableT.destructor, c.graph.usageSym),
|
||||
useSym(s, c.graph.usageSym)]))
|
||||
result = newNode(nkDefer, s.info, @[call])
|
||||
|
||||
proc insertDestructors(c: PContext,
|
||||
varSection: PNode): tuple[outer, inner: PNode] =
|
||||
# Accepts a var or let section.
|
||||
#
|
||||
# When a var section has variables with destructors
|
||||
# the var section is split up and finally blocks are inserted
|
||||
# immediately after all "destructable" vars
|
||||
#
|
||||
# In case there were no destrucable variables, the proc returns
|
||||
# (nil, nil) and the enclosing stmt-list requires no modifications.
|
||||
#
|
||||
# Otherwise, after the try blocks are created, the rest of the enclosing
|
||||
# stmt-list should be inserted in the most `inner` such block (corresponding
|
||||
# to the last variable).
|
||||
#
|
||||
# `outer` is a statement list that should replace the original var section.
|
||||
# It will include the new truncated var section followed by the outermost
|
||||
# try block.
|
||||
let totalVars = varSection.sonsLen
|
||||
for j in countup(0, totalVars - 1):
|
||||
let
|
||||
varId = varSection[j][0]
|
||||
varTyp = varId.sym.typ
|
||||
info = varId.info
|
||||
|
||||
if varTyp == nil or sfGlobal in varId.sym.flags: continue
|
||||
let destructableT = instantiateDestructor(c, varTyp)
|
||||
|
||||
if destructableT != nil:
|
||||
var tryStmt = newNodeI(nkTryStmt, info)
|
||||
|
||||
if j < totalVars - 1:
|
||||
var remainingVars = newNodeI(varSection.kind, info)
|
||||
remainingVars.sons = varSection.sons[(j+1)..varSection.len-1]
|
||||
let (outer, inner) = insertDestructors(c, remainingVars)
|
||||
if outer != nil:
|
||||
tryStmt.addSon(outer)
|
||||
result.inner = inner
|
||||
else:
|
||||
result.inner = newNodeI(nkStmtList, info)
|
||||
result.inner.addSon(remainingVars)
|
||||
tryStmt.addSon(result.inner)
|
||||
else:
|
||||
result.inner = newNodeI(nkStmtList, info)
|
||||
tryStmt.addSon(result.inner)
|
||||
|
||||
tryStmt.addSon(
|
||||
newNode(nkFinally, info, @[
|
||||
semStmt(c, newNode(nkCall, info, @[
|
||||
useSym(destructableT.destructor, c.graph.usageSym),
|
||||
useSym(varId.sym, c.graph.usageSym)]))]))
|
||||
|
||||
result.outer = newNodeI(nkStmtList, info)
|
||||
varSection.sons.setLen(j+1)
|
||||
result.outer.addSon(varSection)
|
||||
result.outer.addSon(tryStmt)
|
||||
|
||||
return
|
||||
|
||||
@@ -436,12 +436,12 @@ proc semArrayConstr(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
#addSon(result, fitNode(c, typ, n.sons[i]))
|
||||
inc(lastIndex)
|
||||
addSonSkipIntLit(result.typ, typ)
|
||||
for i in 0 .. <result.len:
|
||||
for i in 0 ..< result.len:
|
||||
result.sons[i] = fitNode(c, typ, result.sons[i], result.sons[i].info)
|
||||
result.typ.sons[0] = makeRangeType(c, 0, sonsLen(result) - 1, n.info)
|
||||
|
||||
proc fixAbstractType(c: PContext, n: PNode) =
|
||||
for i in 1 .. < n.len:
|
||||
for i in 1 ..< n.len:
|
||||
let it = n.sons[i]
|
||||
# do not get rid of nkHiddenSubConv for OpenArrays, the codegen needs it:
|
||||
if it.kind == nkHiddenSubConv and
|
||||
@@ -539,7 +539,7 @@ proc evalAtCompileTime(c: PContext, n: PNode): PNode =
|
||||
var call = newNodeIT(nkCall, n.info, n.typ)
|
||||
call.add(n.sons[0])
|
||||
var allConst = true
|
||||
for i in 1 .. < n.len:
|
||||
for i in 1 ..< n.len:
|
||||
var a = getConstExpr(c.module, n.sons[i])
|
||||
if a == nil:
|
||||
allConst = false
|
||||
@@ -557,7 +557,7 @@ proc evalAtCompileTime(c: PContext, n: PNode): PNode =
|
||||
# done until we have a more robust infrastructure for
|
||||
# implicit statics.
|
||||
if n.len > 1:
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
# see bug #2113, it's possible that n[i].typ for errornous code:
|
||||
if n[i].typ.isNil or n[i].typ.kind != tyStatic or
|
||||
tfUnresolved notin n[i].typ.flags:
|
||||
@@ -579,7 +579,7 @@ proc evalAtCompileTime(c: PContext, n: PNode): PNode =
|
||||
|
||||
var call = newNodeIT(nkCall, n.info, n.typ)
|
||||
call.add(n.sons[0])
|
||||
for i in 1 .. < n.len:
|
||||
for i in 1 ..< n.len:
|
||||
let a = getConstExpr(c.module, n.sons[i])
|
||||
if a == nil: return n
|
||||
call.add(a)
|
||||
@@ -653,7 +653,7 @@ proc bracketedMacro(n: PNode): PSym =
|
||||
result = nil
|
||||
|
||||
proc setGenericParams(c: PContext, n: PNode) =
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
n[i].typ = semTypeNode(c, n[i], nil)
|
||||
|
||||
proc afterCallActions(c: PContext; n, orig: PNode, flags: TExprFlags): PNode =
|
||||
@@ -669,6 +669,7 @@ proc afterCallActions(c: PContext; n, orig: PNode, flags: TExprFlags): PNode =
|
||||
analyseIfAddressTakenInCall(c, result)
|
||||
if callee.magic != mNone:
|
||||
result = magicsAfterOverloadResolution(c, result, flags)
|
||||
if result.typ != nil: liftTypeBoundOps(c, result.typ, n.info)
|
||||
if c.matchedConcept == nil:
|
||||
result = evalAtCompileTime(c, result)
|
||||
|
||||
@@ -1187,7 +1188,6 @@ proc semSubscript(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
tyCString:
|
||||
if n.len != 2: return nil
|
||||
n.sons[0] = makeDeref(n.sons[0])
|
||||
c.p.bracketExpr = n.sons[0]
|
||||
for i in countup(1, sonsLen(n) - 1):
|
||||
n.sons[i] = semExprWithType(c, n.sons[i],
|
||||
flags*{efInTypeof, efDetermineType})
|
||||
@@ -1208,7 +1208,6 @@ proc semSubscript(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
of tyTuple:
|
||||
if n.len != 2: return nil
|
||||
n.sons[0] = makeDeref(n.sons[0])
|
||||
c.p.bracketExpr = n.sons[0]
|
||||
# [] operator for tuples requires constant expression:
|
||||
n.sons[1] = semConstExpr(c, n.sons[1])
|
||||
if skipTypes(n.sons[1].typ, {tyGenericInst, tyRange, tyOrdinal, tyAlias}).kind in
|
||||
@@ -1246,17 +1245,13 @@ proc semSubscript(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
of skType:
|
||||
result = symNodeFromType(c, semTypeNode(c, n, nil), n.info)
|
||||
else:
|
||||
c.p.bracketExpr = n.sons[0]
|
||||
else:
|
||||
c.p.bracketExpr = n.sons[0]
|
||||
discard
|
||||
|
||||
proc semArrayAccess(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
let oldBracketExpr = c.p.bracketExpr
|
||||
result = semSubscript(c, n, flags)
|
||||
if result == nil:
|
||||
# overloaded [] operator:
|
||||
result = semExpr(c, buildOverloadedSubscripts(n, getIdent"[]"))
|
||||
c.p.bracketExpr = oldBracketExpr
|
||||
|
||||
proc propertyWriteAccess(c: PContext, n, nOrig, a: PNode): PNode =
|
||||
var id = considerQuotedIdent(a[1], a)
|
||||
@@ -1295,10 +1290,13 @@ proc takeImplicitAddr(c: PContext, n: PNode): PNode =
|
||||
proc asgnToResultVar(c: PContext, n, le, ri: PNode) {.inline.} =
|
||||
if le.kind == nkHiddenDeref:
|
||||
var x = le.sons[0]
|
||||
if x.typ.kind == tyVar and x.kind == nkSym and x.sym.kind == skResult:
|
||||
n.sons[0] = x # 'result[]' --> 'result'
|
||||
n.sons[1] = takeImplicitAddr(c, ri)
|
||||
x.typ.flags.incl tfVarIsPtr
|
||||
if x.typ.kind == tyVar and x.kind == nkSym:
|
||||
if x.sym.kind == skResult:
|
||||
n.sons[0] = x # 'result[]' --> 'result'
|
||||
n.sons[1] = takeImplicitAddr(c, ri)
|
||||
if x.sym.kind != skParam:
|
||||
# XXX This is hacky. See bug #4910.
|
||||
x.typ.flags.incl tfVarIsPtr
|
||||
#echo x.info, " setting it for this type ", typeToString(x.typ), " ", n.info
|
||||
|
||||
template resultTypeIsInferrable(typ: PType): untyped =
|
||||
@@ -1325,7 +1323,6 @@ proc semAsgn(c: PContext, n: PNode; mode=asgnNormal): PNode =
|
||||
of nkBracketExpr:
|
||||
# a[i] = x
|
||||
# --> `[]=`(a, i, x)
|
||||
let oldBracketExpr = c.p.bracketExpr
|
||||
a = semSubscript(c, a, {efLValue})
|
||||
if a == nil:
|
||||
result = buildOverloadedSubscripts(n.sons[0], getIdent"[]=")
|
||||
@@ -1335,9 +1332,7 @@ proc semAsgn(c: PContext, n: PNode; mode=asgnNormal): PNode =
|
||||
return n
|
||||
else:
|
||||
result = semExprNoType(c, result)
|
||||
c.p.bracketExpr = oldBracketExpr
|
||||
return result
|
||||
c.p.bracketExpr = oldBracketExpr
|
||||
of nkCurlyExpr:
|
||||
# a{i} = x --> `{}=`(a, i, x)
|
||||
result = buildOverloadedSubscripts(n.sons[0], getIdent"{}=")
|
||||
@@ -1382,9 +1377,12 @@ proc semAsgn(c: PContext, n: PNode; mode=asgnNormal): PNode =
|
||||
typeMismatch(n.info, lhs.typ, rhs.typ)
|
||||
|
||||
n.sons[1] = fitNode(c, le, rhs, n.info)
|
||||
if tfHasAsgn in lhs.typ.flags and not lhsIsResult and
|
||||
mode != noOverloadedAsgn:
|
||||
return overloadedAsgn(c, lhs, n.sons[1])
|
||||
if not newDestructors:
|
||||
if tfHasAsgn in lhs.typ.flags and not lhsIsResult and
|
||||
mode != noOverloadedAsgn:
|
||||
return overloadedAsgn(c, lhs, n.sons[1])
|
||||
else:
|
||||
liftTypeBoundOps(c, lhs.typ, lhs.info)
|
||||
|
||||
fixAbstractType(c, n)
|
||||
asgnToResultVar(c, n, n.sons[0], n.sons[1])
|
||||
@@ -1456,7 +1454,7 @@ proc semYieldVarResult(c: PContext, n: PNode, restype: PType) =
|
||||
|
||||
n.sons[0] = takeImplicitAddr(c, n.sons[0])
|
||||
of tyTuple:
|
||||
for i in 0.. <t.sonsLen:
|
||||
for i in 0..<t.sonsLen:
|
||||
var e = skipTypes(t.sons[i], {tyGenericInst, tyAlias})
|
||||
if e.kind == tyVar:
|
||||
if n.sons[0].kind == nkPar:
|
||||
@@ -1649,7 +1647,7 @@ proc processQuotations(n: var PNode, op: string,
|
||||
elif n.kind == nkAccQuoted and op == "``":
|
||||
returnQuote n[0]
|
||||
|
||||
for i in 0 .. <n.safeLen:
|
||||
for i in 0 ..< n.safeLen:
|
||||
processQuotations(n.sons[i], op, quotes, ids)
|
||||
|
||||
proc semQuoteAst(c: PContext, n: PNode): PNode =
|
||||
@@ -1657,7 +1655,7 @@ proc semQuoteAst(c: PContext, n: PNode): PNode =
|
||||
# We transform the do block into a template with a param for
|
||||
# each interpolation. We'll pass this template to getAst.
|
||||
var
|
||||
quotedBlock = n{-1}
|
||||
quotedBlock = n[^1]
|
||||
op = if n.len == 3: expectString(c, n[1]) else: "``"
|
||||
quotes = newSeq[PNode](1)
|
||||
# the quotes will be added to a nkCall statement
|
||||
@@ -1817,7 +1815,7 @@ proc semMagic(c: PContext, n: PNode, s: PSym, flags: TExprFlags): PNode =
|
||||
dec c.inParallelStmt
|
||||
of mSpawn:
|
||||
result = setMs(n, s)
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
result.sons[i] = semExpr(c, n.sons[i])
|
||||
let typ = result[^1].typ
|
||||
if not typ.isEmptyType:
|
||||
@@ -2068,7 +2066,7 @@ proc semBlock(c: PContext, n: PNode): PNode =
|
||||
proc semExport(c: PContext, n: PNode): PNode =
|
||||
var x = newNodeI(n.kind, n.info)
|
||||
#let L = if n.kind == nkExportExceptStmt: L = 1 else: n.len
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
let a = n.sons[i]
|
||||
var o: TOverloadIter
|
||||
var s = initOverloadIter(o, c, a)
|
||||
@@ -2099,7 +2097,7 @@ proc shouldBeBracketExpr(n: PNode): bool =
|
||||
let b = a[0]
|
||||
if b.kind in nkSymChoices:
|
||||
for i in 0..<b.len:
|
||||
if b[i].sym.magic == mArrGet:
|
||||
if b[i].kind == nkSym and b[i].sym.magic == mArrGet:
|
||||
let be = newNodeI(nkBracketExpr, n.info)
|
||||
for i in 1..<a.len: be.add(a[i])
|
||||
n.sons[0] = be
|
||||
@@ -2356,6 +2354,10 @@ proc semExpr(c: PContext, n: PNode, flags: TExprFlags = {}): PNode =
|
||||
if not n.sons[0].typ.isEmptyType and not implicitlyDiscardable(n.sons[0]):
|
||||
localError(n.info, errGenerated, "'defer' takes a 'void' expression")
|
||||
#localError(n.info, errGenerated, "'defer' not allowed in this context")
|
||||
of nkGotoState, nkState:
|
||||
if n.len != 1 and n.len != 2: illFormedAst(n)
|
||||
for i in 0 ..< n.len:
|
||||
n.sons[i] = semExpr(c, n.sons[i])
|
||||
else:
|
||||
localError(n.info, errInvalidExpressionX,
|
||||
renderTree(n, {renderNoComments}))
|
||||
|
||||
@@ -89,7 +89,7 @@ proc semForObjectFields(c: TFieldsCtx, typ, forLoop, father: PNode) =
|
||||
access.sons[1] = newSymNode(typ.sons[0].sym, forLoop.info)
|
||||
caseStmt.add(semExprWithType(c.c, access))
|
||||
# copy the branches over, but replace the fields with the for loop body:
|
||||
for i in 1 .. <typ.len:
|
||||
for i in 1 ..< typ.len:
|
||||
var branch = copyTree(typ[i])
|
||||
let L = branch.len
|
||||
branch.sons[L-1] = newNodeI(nkStmtList, forLoop.info)
|
||||
|
||||
@@ -121,14 +121,14 @@ proc freshGenSyms(n: PNode, owner, orig: PSym, symMap: var TIdTable) =
|
||||
idTablePut(symMap, s, x)
|
||||
n.sym = x
|
||||
else:
|
||||
for i in 0 .. <safeLen(n): freshGenSyms(n.sons[i], owner, orig, symMap)
|
||||
for i in 0 ..< safeLen(n): freshGenSyms(n.sons[i], owner, orig, symMap)
|
||||
|
||||
proc addParamOrResult(c: PContext, param: PSym, kind: TSymKind)
|
||||
|
||||
proc instantiateBody(c: PContext, n, params: PNode, result, orig: PSym) =
|
||||
if n.sons[bodyPos].kind != nkEmpty:
|
||||
let procParams = result.typ.n
|
||||
for i in 1 .. <procParams.len:
|
||||
for i in 1 ..< procParams.len:
|
||||
addDecl(c, procParams[i].sym)
|
||||
maybeAddResult(c, result, result.ast)
|
||||
|
||||
@@ -138,7 +138,7 @@ proc instantiateBody(c: PContext, n, params: PNode, result, orig: PSym) =
|
||||
var symMap: TIdTable
|
||||
initIdTable symMap
|
||||
if params != nil:
|
||||
for i in 1 .. <params.len:
|
||||
for i in 1 ..< params.len:
|
||||
let param = params[i].sym
|
||||
if sfGenSym in param.flags:
|
||||
idTablePut(symMap, params[i].sym, result.typ.n[param.position+1].sym)
|
||||
@@ -211,7 +211,7 @@ proc instantiateProcType(c: PContext, pt: TIdTable,
|
||||
let originalParams = result.n
|
||||
result.n = originalParams.shallowCopy
|
||||
|
||||
for i in 1 .. <result.len:
|
||||
for i in 1 ..< result.len:
|
||||
# twrong_field_caching requires these 'resetIdTable' calls:
|
||||
if i > 1:
|
||||
resetIdTable(cl.symMap)
|
||||
|
||||
@@ -42,7 +42,7 @@ proc annotateType*(n: PNode, t: PType) =
|
||||
of nkObjConstr:
|
||||
let x = t.skipTypes(abstractPtrs)
|
||||
n.typ = t
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
var j = i-1
|
||||
let field = x.n.ithField(j)
|
||||
if field.isNil:
|
||||
@@ -53,7 +53,7 @@ proc annotateType*(n: PNode, t: PType) =
|
||||
of nkPar:
|
||||
if x.kind == tyTuple:
|
||||
n.typ = t
|
||||
for i in 0 .. <n.len:
|
||||
for i in 0 ..< n.len:
|
||||
if i >= x.len: globalError n.info, "invalid field at index " & $i
|
||||
else: annotateType(n.sons[i], x.sons[i])
|
||||
elif x.kind == tyProc and x.callConv == ccClosure:
|
||||
|
||||
@@ -38,9 +38,7 @@ proc skipAddr(n: PNode): PNode {.inline.} =
|
||||
proc semArrGet(c: PContext; n: PNode; flags: TExprFlags): PNode =
|
||||
result = newNodeI(nkBracketExpr, n.info)
|
||||
for i in 1..<n.len: result.add(n[i])
|
||||
let oldBracketExpr = c.p.bracketExpr
|
||||
result = semSubscript(c, result, flags)
|
||||
c.p.bracketExpr = oldBracketExpr
|
||||
if result.isNil:
|
||||
let x = copyTree(n)
|
||||
x.sons[0] = newIdentNode(getIdent"[]", n.info)
|
||||
@@ -146,8 +144,14 @@ proc evalTypeTrait(traitCall: PNode, operand: PType, context: PSym): PNode =
|
||||
result = res.base.toNode(traitCall.info)
|
||||
of "stripGenericParams":
|
||||
result = uninstantiate(operand).toNode(traitCall.info)
|
||||
of "supportsCopyMem":
|
||||
let t = operand.skipTypes({tyVar, tyGenericInst, tyAlias, tyInferred})
|
||||
let complexObj = containsGarbageCollectedRef(t) or
|
||||
hasDestructor(t)
|
||||
result = newIntNodeT(ord(not complexObj), traitCall)
|
||||
else:
|
||||
internalAssert false
|
||||
localError(traitCall.info, "unknown trait")
|
||||
result = emptyNode
|
||||
|
||||
proc semTypeTraits(c: PContext, n: PNode): PNode =
|
||||
checkMinSonsLen(n, 2)
|
||||
@@ -246,7 +250,11 @@ proc magicsAfterOverloadResolution(c: PContext, n: PNode,
|
||||
result = semTypeOf(c, n.sons[1])
|
||||
of mArrGet: result = semArrGet(c, n, flags)
|
||||
of mArrPut: result = semArrPut(c, n, flags)
|
||||
of mAsgn: result = semAsgnOpr(c, n)
|
||||
of mAsgn:
|
||||
if n[0].sym.name.s == "=":
|
||||
result = semAsgnOpr(c, n)
|
||||
else:
|
||||
result = n
|
||||
of mIsPartOf: result = semIsPartOf(c, n, flags)
|
||||
of mTypeTrait: result = semTypeTraits(c, n)
|
||||
of mAstToStr:
|
||||
@@ -264,35 +272,7 @@ proc magicsAfterOverloadResolution(c: PContext, n: PNode,
|
||||
of mDotDot:
|
||||
result = n
|
||||
of mRoof:
|
||||
let bracketExpr = if n.len == 3: n.sons[2] else: c.p.bracketExpr
|
||||
if bracketExpr.isNil:
|
||||
localError(n.info, "no surrounding array access context for '^'")
|
||||
result = n.sons[1]
|
||||
elif bracketExpr.checkForSideEffects != seNoSideEffect:
|
||||
localError(n.info, "invalid context for '^' as '$#' has side effects" %
|
||||
renderTree(bracketExpr))
|
||||
result = n.sons[1]
|
||||
elif bracketExpr.typ.isStrangeArray:
|
||||
localError(n.info, "invalid context for '^' as len!=high+1 for '$#'" %
|
||||
renderTree(bracketExpr))
|
||||
result = n.sons[1]
|
||||
else:
|
||||
# ^x is rewritten to: len(a)-x
|
||||
let lenExpr = newNodeI(nkCall, n.info)
|
||||
lenExpr.add newIdentNode(getIdent"len", n.info)
|
||||
lenExpr.add bracketExpr
|
||||
let lenExprB = semExprWithType(c, lenExpr)
|
||||
if lenExprB.typ.isNil or not isOrdinalType(lenExprB.typ):
|
||||
localError(n.info, "'$#' has to be of an ordinal type for '^'" %
|
||||
renderTree(lenExpr))
|
||||
result = n.sons[1]
|
||||
else:
|
||||
result = newNodeIT(nkCall, n.info, getSysType(tyInt))
|
||||
let subi = getSysMagic("-", mSubI)
|
||||
#echo "got ", typeToString(subi.typ)
|
||||
result.add newSymNode(subi, n.info)
|
||||
result.add lenExprB
|
||||
result.add n.sons[1]
|
||||
localError(n.info, "builtin roof operator is not supported anymore")
|
||||
of mPlugin:
|
||||
let plugin = getPlugin(n[0].sym)
|
||||
if plugin.isNil:
|
||||
|
||||
@@ -42,7 +42,7 @@ proc mergeInitStatus(existing: var InitStatus, newStatus: InitStatus) =
|
||||
proc locateFieldInInitExpr(field: PSym, initExpr: PNode): PNode =
|
||||
# Returns the assignment nkExprColonExpr node or nil
|
||||
let fieldId = field.name.id
|
||||
for i in 1 .. <initExpr.len:
|
||||
for i in 1 ..< initExpr.len:
|
||||
let assignment = initExpr[i]
|
||||
if assignment.kind != nkExprColonExpr:
|
||||
localError(initExpr.info, "incorrect object construction syntax")
|
||||
@@ -78,13 +78,13 @@ proc caseBranchMatchesExpr(branch, matched: PNode): bool =
|
||||
|
||||
proc pickCaseBranch(caseExpr, matched: PNode): PNode =
|
||||
# XXX: Perhaps this proc already exists somewhere
|
||||
let endsWithElse = caseExpr{-1}.kind == nkElse
|
||||
let endsWithElse = caseExpr[^1].kind == nkElse
|
||||
for i in 1 .. caseExpr.len - 1 - int(endsWithElse):
|
||||
if caseExpr[i].caseBranchMatchesExpr(matched):
|
||||
return caseExpr[i]
|
||||
|
||||
if endsWithElse:
|
||||
return caseExpr{-1}
|
||||
return caseExpr[^1]
|
||||
|
||||
iterator directFieldsInRecList(recList: PNode): PNode =
|
||||
# XXX: We can remove this case by making all nkOfBranch nodes
|
||||
@@ -136,17 +136,20 @@ proc semConstructFields(c: PContext, recNode: PNode,
|
||||
|
||||
of nkRecCase:
|
||||
template fieldsPresentInBranch(branchIdx: int): string =
|
||||
fieldsPresentInInitExpr(recNode[branchIdx]{-1}, initExpr)
|
||||
let branch = recNode[branchIdx]
|
||||
let fields = branch[branch.len - 1]
|
||||
fieldsPresentInInitExpr(fields, initExpr)
|
||||
|
||||
template checkMissingFields(branchNode: PNode) =
|
||||
checkForMissingFields(branchNode{-1}, initExpr)
|
||||
let fields = branchNode[branchNode.len - 1]
|
||||
checkForMissingFields(fields, initExpr)
|
||||
|
||||
let discriminator = recNode.sons[0];
|
||||
internalAssert discriminator.kind == nkSym
|
||||
var selectedBranch = -1
|
||||
|
||||
for i in 1 .. <recNode.len:
|
||||
let innerRecords = recNode[i]{-1}
|
||||
for i in 1 ..< recNode.len:
|
||||
let innerRecords = recNode[i][^1]
|
||||
let status = semConstructFields(c, innerRecords, initExpr, flags)
|
||||
if status notin {initNone, initUnknown}:
|
||||
mergeInitStatus(result, status)
|
||||
@@ -220,7 +223,7 @@ proc semConstructFields(c: PContext, recNode: PNode,
|
||||
else:
|
||||
# All bets are off. If any of the branches has a mandatory
|
||||
# fields we must produce an error:
|
||||
for i in 1 .. <recNode.len: checkMissingFields recNode[i]
|
||||
for i in 1 ..< recNode.len: checkMissingFields recNode[i]
|
||||
|
||||
of nkSym:
|
||||
let field = recNode.sym
|
||||
@@ -250,7 +253,7 @@ proc semObjConstr(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
var t = semTypeNode(c, n.sons[0], nil)
|
||||
result = newNodeIT(nkObjConstr, n.info, t)
|
||||
for child in n: result.add child
|
||||
|
||||
|
||||
t = skipTypes(t, {tyGenericInst, tyAlias})
|
||||
if t.kind == tyRef: t = skipTypes(t.sons[0], {tyGenericInst, tyAlias})
|
||||
if t.kind != tyObject:
|
||||
@@ -277,7 +280,7 @@ proc semObjConstr(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
# Since we were traversing the object fields, it's possible that
|
||||
# not all of the fields specified in the constructor was visited.
|
||||
# We'll check for such fields here:
|
||||
for i in 1.. <result.len:
|
||||
for i in 1..<result.len:
|
||||
let field = result[i]
|
||||
if nfSem notin field.flags:
|
||||
if field.kind != nkExprColonExpr:
|
||||
@@ -286,7 +289,7 @@ proc semObjConstr(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
let id = considerQuotedIdent(field[0])
|
||||
# This node was not processed. There are two possible reasons:
|
||||
# 1) It was shadowed by a field with the same name on the left
|
||||
for j in 1 .. <i:
|
||||
for j in 1 ..< i:
|
||||
let prevId = considerQuotedIdent(result[j][0])
|
||||
if prevId.id == id.id:
|
||||
localError(field.info, errFieldInitTwice, id.s)
|
||||
|
||||
@@ -81,7 +81,7 @@ proc initAnalysisCtx(): AnalysisCtx =
|
||||
result.guards = @[]
|
||||
|
||||
proc lookupSlot(c: AnalysisCtx; s: PSym): int =
|
||||
for i in 0.. <c.locals.len:
|
||||
for i in 0..<c.locals.len:
|
||||
if c.locals[i].v == s or c.locals[i].alias == s: return i
|
||||
return -1
|
||||
|
||||
@@ -94,7 +94,7 @@ proc getSlot(c: var AnalysisCtx; v: PSym): ptr MonotonicVar =
|
||||
return addr(c.locals[L])
|
||||
|
||||
proc gatherArgs(c: var AnalysisCtx; n: PNode) =
|
||||
for i in 0.. <n.safeLen:
|
||||
for i in 0..<n.safeLen:
|
||||
let root = getRoot n[i]
|
||||
if root != nil:
|
||||
block addRoot:
|
||||
@@ -119,7 +119,7 @@ proc checkLocal(c: AnalysisCtx; n: PNode) =
|
||||
if s >= 0 and c.locals[s].stride != nil:
|
||||
localError(n.info, "invalid usage of counter after increment")
|
||||
else:
|
||||
for i in 0 .. <n.safeLen: checkLocal(c, n.sons[i])
|
||||
for i in 0 ..< n.safeLen: checkLocal(c, n.sons[i])
|
||||
|
||||
template `?`(x): untyped = x.renderTree
|
||||
|
||||
@@ -180,7 +180,7 @@ proc stride(c: AnalysisCtx; n: PNode): BiggestInt =
|
||||
if s >= 0 and c.locals[s].stride != nil:
|
||||
result = c.locals[s].stride.intVal
|
||||
else:
|
||||
for i in 0 .. <n.safeLen: result += stride(c, n.sons[i])
|
||||
for i in 0 ..< n.safeLen: result += stride(c, n.sons[i])
|
||||
|
||||
proc subStride(c: AnalysisCtx; n: PNode): PNode =
|
||||
# substitute with stride:
|
||||
@@ -192,7 +192,7 @@ proc subStride(c: AnalysisCtx; n: PNode): PNode =
|
||||
result = n
|
||||
elif n.safeLen > 0:
|
||||
result = shallowCopy(n)
|
||||
for i in 0 .. <n.len: result.sons[i] = subStride(c, n.sons[i])
|
||||
for i in 0 ..< n.len: result.sons[i] = subStride(c, n.sons[i])
|
||||
else:
|
||||
result = n
|
||||
|
||||
@@ -251,7 +251,7 @@ proc checkSlicesAreDisjoint(c: var AnalysisCtx) =
|
||||
proc analyse(c: var AnalysisCtx; n: PNode)
|
||||
|
||||
proc analyseSons(c: var AnalysisCtx; n: PNode) =
|
||||
for i in 0 .. <safeLen(n): analyse(c, n[i])
|
||||
for i in 0 ..< safeLen(n): analyse(c, n[i])
|
||||
|
||||
proc min(a, b: PNode): PNode =
|
||||
if a.isNil: result = b
|
||||
@@ -293,11 +293,11 @@ proc analyseCall(c: var AnalysisCtx; n: PNode; op: PSym) =
|
||||
proc analyseCase(c: var AnalysisCtx; n: PNode) =
|
||||
analyse(c, n.sons[0])
|
||||
let oldFacts = c.guards.len
|
||||
for i in 1.. <n.len:
|
||||
for i in 1..<n.len:
|
||||
let branch = n.sons[i]
|
||||
setLen(c.guards, oldFacts)
|
||||
addCaseBranchFacts(c.guards, n, i)
|
||||
for i in 0 .. <branch.len:
|
||||
for i in 0 ..< branch.len:
|
||||
analyse(c, branch.sons[i])
|
||||
setLen(c.guards, oldFacts)
|
||||
|
||||
@@ -307,14 +307,14 @@ proc analyseIf(c: var AnalysisCtx; n: PNode) =
|
||||
addFact(c.guards, canon(n.sons[0].sons[0]))
|
||||
|
||||
analyse(c, n.sons[0].sons[1])
|
||||
for i in 1.. <n.len:
|
||||
for i in 1..<n.len:
|
||||
let branch = n.sons[i]
|
||||
setLen(c.guards, oldFacts)
|
||||
for j in 0..i-1:
|
||||
addFactNeg(c.guards, canon(n.sons[j].sons[0]))
|
||||
if branch.len > 1:
|
||||
addFact(c.guards, canon(branch.sons[0]))
|
||||
for i in 0 .. <branch.len:
|
||||
for i in 0 ..< branch.len:
|
||||
analyse(c, branch.sons[i])
|
||||
setLen(c.guards, oldFacts)
|
||||
|
||||
@@ -407,7 +407,7 @@ proc transformSlices(n: PNode): PNode =
|
||||
return result
|
||||
if n.safeLen > 0:
|
||||
result = shallowCopy(n)
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
result.sons[i] = transformSlices(n.sons[i])
|
||||
else:
|
||||
result = n
|
||||
@@ -415,7 +415,7 @@ proc transformSlices(n: PNode): PNode =
|
||||
proc transformSpawn(owner: PSym; n, barrier: PNode): PNode
|
||||
proc transformSpawnSons(owner: PSym; n, barrier: PNode): PNode =
|
||||
result = shallowCopy(n)
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
result.sons[i] = transformSpawn(owner, n.sons[i], barrier)
|
||||
|
||||
proc transformSpawn(owner: PSym; n, barrier: PNode): PNode =
|
||||
|
||||
@@ -11,23 +11,14 @@ import
|
||||
intsets, ast, astalgo, msgs, renderer, magicsys, types, idents, trees,
|
||||
wordrecg, strutils, options, guards, writetracking
|
||||
|
||||
when defined(useDfa):
|
||||
import dfa
|
||||
|
||||
# Second semantic checking pass over the AST. Necessary because the old
|
||||
# way had some inherent problems. Performs:
|
||||
#
|
||||
# * effect+exception tracking
|
||||
# * "usage before definition" checking
|
||||
# * checks for invalid usages of compiletime magics (not implemented)
|
||||
# * checks for invalid usages of NimNode (not implemented)
|
||||
# * later: will do an escape analysis for closures at least
|
||||
|
||||
# Predefined effects:
|
||||
# io, time (time dependent), gc (performs GC'ed allocation), exceptions,
|
||||
# side effect (accesses global), store (stores into *type*),
|
||||
# store_unknown (performs some store) --> store(any)|store(x)
|
||||
# load (loads from *type*), recursive (recursive call), unsafe,
|
||||
# endless (has endless loops), --> user effects are defined over *patterns*
|
||||
# --> a TR macro can annotate the proc with user defined annotations
|
||||
# --> the effect system can access these
|
||||
|
||||
# ------------------------ exception and tag tracking -------------------------
|
||||
|
||||
@@ -248,6 +239,7 @@ proc useVar(a: PEffects, n: PNode) =
|
||||
(tfHasGCedMem in s.typ.flags or s.typ.isGCedMem):
|
||||
#if warnGcUnsafe in gNotes: warnAboutGcUnsafe(n)
|
||||
markGcUnsafe(a, s)
|
||||
markSideEffect(a, s)
|
||||
else:
|
||||
markSideEffect(a, s)
|
||||
|
||||
@@ -256,7 +248,7 @@ type
|
||||
TIntersection = seq[tuple[id, count: int]] # a simple count table
|
||||
|
||||
proc addToIntersection(inter: var TIntersection, s: int) =
|
||||
for j in 0.. <inter.len:
|
||||
for j in 0..<inter.len:
|
||||
if s == inter[j].id:
|
||||
inc inter[j].count
|
||||
return
|
||||
@@ -290,7 +282,7 @@ proc createTag(n: PNode): PNode =
|
||||
proc addEffect(a: PEffects, e: PNode, useLineInfo=true) =
|
||||
assert e.kind != nkRaiseStmt
|
||||
var aa = a.exc
|
||||
for i in a.bottom .. <aa.len:
|
||||
for i in a.bottom ..< aa.len:
|
||||
if sameType(aa[i].excType, e.excType):
|
||||
if not useLineInfo or gCmd == cmdDoc: return
|
||||
elif aa[i].info == e.info: return
|
||||
@@ -298,7 +290,7 @@ proc addEffect(a: PEffects, e: PNode, useLineInfo=true) =
|
||||
|
||||
proc addTag(a: PEffects, e: PNode, useLineInfo=true) =
|
||||
var aa = a.tags
|
||||
for i in 0 .. <aa.len:
|
||||
for i in 0 ..< aa.len:
|
||||
if sameType(aa[i].typ.skipTypes(skipPtrs), e.typ.skipTypes(skipPtrs)):
|
||||
if not useLineInfo or gCmd == cmdDoc: return
|
||||
elif aa[i].info == e.info: return
|
||||
@@ -353,12 +345,12 @@ proc trackTryStmt(tracked: PEffects, n: PNode) =
|
||||
inc tracked.inTryStmt
|
||||
track(tracked, n.sons[0])
|
||||
dec tracked.inTryStmt
|
||||
for i in oldState.. <tracked.init.len:
|
||||
for i in oldState..<tracked.init.len:
|
||||
addToIntersection(inter, tracked.init[i])
|
||||
|
||||
var branches = 1
|
||||
var hasFinally = false
|
||||
for i in 1 .. < n.len:
|
||||
for i in 1 ..< n.len:
|
||||
let b = n.sons[i]
|
||||
let blen = sonsLen(b)
|
||||
if b.kind == nkExceptBranch:
|
||||
@@ -372,7 +364,7 @@ proc trackTryStmt(tracked: PEffects, n: PNode) =
|
||||
|
||||
setLen(tracked.init, oldState)
|
||||
track(tracked, b.sons[blen-1])
|
||||
for i in oldState.. <tracked.init.len:
|
||||
for i in oldState..<tracked.init.len:
|
||||
addToIntersection(inter, tracked.init[i])
|
||||
else:
|
||||
assert b.kind == nkFinally
|
||||
@@ -428,7 +420,7 @@ proc documentEffect(n, x: PNode, effectType: TSpecialWord, idx: int): PNode =
|
||||
|
||||
# warning: hack ahead:
|
||||
var effects = newNodeI(nkBracket, n.info, real.len)
|
||||
for i in 0 .. <real.len:
|
||||
for i in 0 ..< real.len:
|
||||
var t = typeToString(real[i].typ)
|
||||
if t.startsWith("ref "): t = substr(t, 4)
|
||||
effects.sons[i] = newIdentNode(getIdent(t), n.info)
|
||||
@@ -590,6 +582,12 @@ proc trackOperand(tracked: PEffects, n: PNode, paramType: PType) =
|
||||
if paramType != nil and paramType.kind == tyVar:
|
||||
if n.kind == nkSym and isLocalVar(tracked, n.sym):
|
||||
makeVolatile(tracked, n.sym)
|
||||
if paramType != nil and paramType.kind == tyProc and tfGcSafe in paramType.flags:
|
||||
let argtype = skipTypes(a.typ, abstractInst)
|
||||
# XXX figure out why this can be a non tyProc here. See httpclient.nim for an
|
||||
# example that triggers it.
|
||||
if argtype.kind == tyProc and notGcSafe(argtype) and not tracked.inEnforcedGcSafe:
|
||||
localError(n.info, $n & " is not GC safe")
|
||||
notNilCheck(tracked, n, paramType)
|
||||
|
||||
proc breaksBlock(n: PNode): bool =
|
||||
@@ -615,16 +613,16 @@ proc trackCase(tracked: PEffects, n: PNode) =
|
||||
warnProveField in gNotes
|
||||
var inter: TIntersection = @[]
|
||||
var toCover = 0
|
||||
for i in 1.. <n.len:
|
||||
for i in 1..<n.len:
|
||||
let branch = n.sons[i]
|
||||
setLen(tracked.init, oldState)
|
||||
if interesting:
|
||||
setLen(tracked.guards, oldFacts)
|
||||
addCaseBranchFacts(tracked.guards, n, i)
|
||||
for i in 0 .. <branch.len:
|
||||
for i in 0 ..< branch.len:
|
||||
track(tracked, branch.sons[i])
|
||||
if not breaksBlock(branch.lastSon): inc toCover
|
||||
for i in oldState.. <tracked.init.len:
|
||||
for i in oldState..<tracked.init.len:
|
||||
addToIntersection(inter, tracked.init[i])
|
||||
|
||||
setLen(tracked.init, oldState)
|
||||
@@ -644,10 +642,10 @@ proc trackIf(tracked: PEffects, n: PNode) =
|
||||
var toCover = 0
|
||||
track(tracked, n.sons[0].sons[1])
|
||||
if not breaksBlock(n.sons[0].sons[1]): inc toCover
|
||||
for i in oldState.. <tracked.init.len:
|
||||
for i in oldState..<tracked.init.len:
|
||||
addToIntersection(inter, tracked.init[i])
|
||||
|
||||
for i in 1.. <n.len:
|
||||
for i in 1..<n.len:
|
||||
let branch = n.sons[i]
|
||||
setLen(tracked.guards, oldFacts)
|
||||
for j in 0..i-1:
|
||||
@@ -655,10 +653,10 @@ proc trackIf(tracked: PEffects, n: PNode) =
|
||||
if branch.len > 1:
|
||||
addFact(tracked.guards, branch.sons[0])
|
||||
setLen(tracked.init, oldState)
|
||||
for i in 0 .. <branch.len:
|
||||
for i in 0 ..< branch.len:
|
||||
track(tracked, branch.sons[i])
|
||||
if not breaksBlock(branch.lastSon): inc toCover
|
||||
for i in oldState.. <tracked.init.len:
|
||||
for i in oldState..<tracked.init.len:
|
||||
addToIntersection(inter, tracked.init[i])
|
||||
setLen(tracked.init, oldState)
|
||||
if lastSon(n).len == 1:
|
||||
@@ -670,7 +668,7 @@ proc trackIf(tracked: PEffects, n: PNode) =
|
||||
proc trackBlock(tracked: PEffects, n: PNode) =
|
||||
if n.kind in {nkStmtList, nkStmtListExpr}:
|
||||
var oldState = -1
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
if hasSubnodeWith(n.sons[i], nkBreakStmt):
|
||||
# block:
|
||||
# x = def
|
||||
@@ -703,7 +701,7 @@ proc track(tracked: PEffects, n: PNode) =
|
||||
n.sons[0].info = n.info
|
||||
#throws(tracked.exc, n.sons[0])
|
||||
addEffect(tracked, n.sons[0], useLineInfo=false)
|
||||
for i in 0 .. <safeLen(n):
|
||||
for i in 0 ..< safeLen(n):
|
||||
track(tracked, n.sons[i])
|
||||
of nkCallKinds:
|
||||
# p's effects are ours too:
|
||||
@@ -742,7 +740,7 @@ proc track(tracked: PEffects, n: PNode) =
|
||||
if not (a.kind == nkSym and a.sym == tracked.owner):
|
||||
markSideEffect(tracked, a)
|
||||
if a.kind != nkSym or a.sym.magic != mNBindSym:
|
||||
for i in 1 .. <len(n): trackOperand(tracked, n.sons[i], paramType(op, i))
|
||||
for i in 1 ..< len(n): trackOperand(tracked, n.sons[i], paramType(op, i))
|
||||
if a.kind == nkSym and a.sym.magic in {mNew, mNewFinalize, mNewSeq}:
|
||||
# may not look like an assignment, but it is:
|
||||
let arg = n.sons[1]
|
||||
@@ -754,11 +752,11 @@ proc track(tracked: PEffects, n: PNode) =
|
||||
discard
|
||||
else:
|
||||
message(arg.info, warnProveInit, $arg)
|
||||
for i in 0 .. <safeLen(n):
|
||||
for i in 0 ..< safeLen(n):
|
||||
track(tracked, n.sons[i])
|
||||
of nkDotExpr:
|
||||
guardDotAccess(tracked, n)
|
||||
for i in 0 .. <len(n): track(tracked, n.sons[i])
|
||||
for i in 0 ..< len(n): track(tracked, n.sons[i])
|
||||
of nkCheckedFieldExpr:
|
||||
track(tracked, n.sons[0])
|
||||
if warnProveField in gNotes: checkFieldAccess(tracked.guards, n)
|
||||
@@ -806,13 +804,13 @@ proc track(tracked: PEffects, n: PNode) =
|
||||
of nkForStmt, nkParForStmt:
|
||||
# we are very conservative here and assume the loop is never executed:
|
||||
let oldState = tracked.init.len
|
||||
for i in 0 .. <len(n):
|
||||
for i in 0 ..< len(n):
|
||||
track(tracked, n.sons[i])
|
||||
setLen(tracked.init, oldState)
|
||||
of nkObjConstr:
|
||||
when false: track(tracked, n.sons[0])
|
||||
let oldFacts = tracked.guards.len
|
||||
for i in 1 .. <len(n):
|
||||
for i in 1 ..< len(n):
|
||||
let x = n.sons[i]
|
||||
track(tracked, x)
|
||||
if x.sons[0].kind == nkSym and sfDiscriminant in x.sons[0].sym.flags:
|
||||
@@ -823,7 +821,7 @@ proc track(tracked: PEffects, n: PNode) =
|
||||
let oldLocked = tracked.locked.len
|
||||
let oldLockLevel = tracked.currLockLevel
|
||||
var enforcedGcSafety = false
|
||||
for i in 0 .. <pragmaList.len:
|
||||
for i in 0 ..< pragmaList.len:
|
||||
let pragma = whichPragma(pragmaList.sons[i])
|
||||
if pragma == wLocks:
|
||||
lockLocations(tracked, pragmaList.sons[i])
|
||||
@@ -842,7 +840,7 @@ proc track(tracked: PEffects, n: PNode) =
|
||||
of nkObjUpConv, nkObjDownConv, nkChckRange, nkChckRangeF, nkChckRange64:
|
||||
if n.len == 1: track(tracked, n.sons[0])
|
||||
else:
|
||||
for i in 0 .. <safeLen(n): track(tracked, n.sons[i])
|
||||
for i in 0 ..< safeLen(n): track(tracked, n.sons[i])
|
||||
|
||||
proc subtypeRelation(spec, real: PNode): bool =
|
||||
result = safeInheritanceDiff(real.excType, spec.typ) <= 0
|
||||
@@ -854,7 +852,7 @@ proc checkRaisesSpec(spec, real: PNode, msg: string, hints: bool;
|
||||
var used = initIntSet()
|
||||
for r in items(real):
|
||||
block search:
|
||||
for s in 0 .. <spec.len:
|
||||
for s in 0 ..< spec.len:
|
||||
if effectPredicate(spec[s], r):
|
||||
used.incl(s)
|
||||
break search
|
||||
@@ -864,7 +862,7 @@ proc checkRaisesSpec(spec, real: PNode, msg: string, hints: bool;
|
||||
popInfoContext()
|
||||
# hint about unnecessarily listed exception types:
|
||||
if hints:
|
||||
for s in 0 .. <spec.len:
|
||||
for s in 0 ..< spec.len:
|
||||
if not used.contains(s):
|
||||
message(spec[s].info, hintXDeclaredButNotUsed, renderTree(spec[s]))
|
||||
|
||||
@@ -979,7 +977,10 @@ proc trackProc*(s: PSym, body: PNode) =
|
||||
message(s.info, warnLockLevel,
|
||||
"declared lock level is $1, but real lock level is $2" %
|
||||
[$s.typ.lockLevel, $t.maxLockLevel])
|
||||
if s.kind == skFunc: trackWrites(s, body)
|
||||
when false:
|
||||
if s.kind == skFunc:
|
||||
when defined(dfa): dataflowAnalysis(s, body)
|
||||
trackWrites(s, body)
|
||||
|
||||
proc trackTopLevelStmt*(module: PSym; n: PNode) =
|
||||
if n.kind in {nkPragma, nkMacroDef, nkTemplateDef, nkProcDef, nkFuncDef,
|
||||
|
||||
@@ -100,15 +100,16 @@ proc semProc(c: PContext, n: PNode): PNode
|
||||
include semdestruct
|
||||
|
||||
proc semDestructorCheck(c: PContext, n: PNode, flags: TExprFlags) {.inline.} =
|
||||
if efAllowDestructor notin flags and
|
||||
n.kind in nkCallKinds+{nkObjConstr,nkBracket}:
|
||||
if instantiateDestructor(c, n.typ) != nil:
|
||||
localError(n.info, warnDestructor)
|
||||
# This still breaks too many things:
|
||||
when false:
|
||||
if efDetermineType notin flags and n.typ.kind == tyTypeDesc and
|
||||
c.p.owner.kind notin {skTemplate, skMacro}:
|
||||
localError(n.info, errGenerated, "value expected, but got a type")
|
||||
if not newDestructors:
|
||||
if efAllowDestructor notin flags and
|
||||
n.kind in nkCallKinds+{nkObjConstr,nkBracket}:
|
||||
if instantiateDestructor(c, n.typ) != nil:
|
||||
localError(n.info, warnDestructor)
|
||||
# This still breaks too many things:
|
||||
when false:
|
||||
if efDetermineType notin flags and n.typ.kind == tyTypeDesc and
|
||||
c.p.owner.kind notin {skTemplate, skMacro}:
|
||||
localError(n.info, errGenerated, "value expected, but got a type")
|
||||
|
||||
proc semExprBranch(c: PContext, n: PNode): PNode =
|
||||
result = semExpr(c, n)
|
||||
@@ -384,7 +385,7 @@ proc checkNilable(v: PSym) =
|
||||
{tfNotNil, tfNeedsInit} * v.typ.flags != {}:
|
||||
if v.ast.isNil:
|
||||
message(v.info, warnProveInit, v.name.s)
|
||||
elif tfNeedsInit in v.typ.flags and tfNotNil notin v.ast.typ.flags:
|
||||
elif tfNotNil in v.typ.flags and tfNotNil notin v.ast.typ.flags:
|
||||
message(v.info, warnProveInit, v.name.s)
|
||||
|
||||
include semasgn
|
||||
@@ -399,7 +400,7 @@ proc addToVarSection(c: PContext; result: var PNode; orig, identDefs: PNode) =
|
||||
# in order for this transformation to be correct.
|
||||
let L = identDefs.len
|
||||
let value = identDefs[L-1]
|
||||
if value.typ != nil and tfHasAsgn in value.typ.flags:
|
||||
if value.typ != nil and tfHasAsgn in value.typ.flags and not newDestructors:
|
||||
# the spec says we need to rewrite 'var x = T()' to 'var x: T; x = T()':
|
||||
identDefs.sons[L-1] = emptyNode
|
||||
if result.kind != nkStmtList:
|
||||
@@ -552,6 +553,7 @@ proc semVarOrLet(c: PContext, n: PNode, symkind: TSymKind): PNode =
|
||||
# this can only happen for errornous var statements:
|
||||
if typ == nil: continue
|
||||
typeAllowedCheck(a.info, typ, symkind)
|
||||
liftTypeBoundOps(c, typ, a.info)
|
||||
var tup = skipTypes(typ, {tyGenericInst, tyAlias})
|
||||
if a.kind == nkVarTuple:
|
||||
if tup.kind != tyTuple:
|
||||
@@ -607,7 +609,7 @@ proc semVarOrLet(c: PContext, n: PNode, symkind: TSymKind): PNode =
|
||||
if def.kind == nkPar: v.ast = def[j]
|
||||
setVarType(v, tup.sons[j])
|
||||
b.sons[j] = newSymNode(v)
|
||||
addDefer(c, result, v)
|
||||
if not newDestructors: addDefer(c, result, v)
|
||||
checkNilable(v)
|
||||
if sfCompileTime in v.flags: hasCompileTime = true
|
||||
if hasCompileTime: vm.setupCompileTimeVar(c.module, c.cache, result)
|
||||
@@ -773,24 +775,55 @@ proc typeSectionLeftSidePass(c: PContext, n: PNode) =
|
||||
checkSonsLen(a, 3)
|
||||
let name = a.sons[0]
|
||||
var s: PSym
|
||||
if name.kind == nkDotExpr:
|
||||
s = qualifiedLookUp(c, name, {checkUndeclared, checkModule})
|
||||
if s.kind != skType or
|
||||
s.typ.skipTypes(abstractPtrs).kind != tyObject or
|
||||
tfPartial notin s.typ.skipTypes(abstractPtrs).flags:
|
||||
localError(name.info, "only .partial objects can be extended")
|
||||
if name.kind == nkDotExpr and a[2].kind == nkObjectTy:
|
||||
let pkgName = considerQuotedIdent(name[0])
|
||||
let typName = considerQuotedIdent(name[1])
|
||||
let pkg = c.graph.packageSyms.strTableGet(pkgName)
|
||||
if pkg.isNil or pkg.kind != skPackage:
|
||||
localError(name.info, "unknown package name: " & pkgName.s)
|
||||
else:
|
||||
let typsym = pkg.tab.strTableGet(typName)
|
||||
if typsym.isNil:
|
||||
s = semIdentDef(c, name[1], skType)
|
||||
s.typ = newTypeS(tyObject, c)
|
||||
s.typ.sym = s
|
||||
s.flags.incl sfForward
|
||||
pkg.tab.strTableAdd s
|
||||
addInterfaceDecl(c, s)
|
||||
elif typsym.kind == skType and sfForward in typsym.flags:
|
||||
s = typsym
|
||||
addInterfaceDecl(c, s)
|
||||
else:
|
||||
localError(name.info, typsym.name.s & " is not a type that can be forwarded")
|
||||
s = typsym
|
||||
else:
|
||||
s = semIdentDef(c, name, skType)
|
||||
s.typ = newTypeS(tyForward, c)
|
||||
s.typ.sym = s # process pragmas:
|
||||
if name.kind == nkPragmaExpr:
|
||||
pragma(c, s, name.sons[1], typePragmas)
|
||||
if sfForward in s.flags:
|
||||
# check if the symbol already exists:
|
||||
let pkg = c.module.owner
|
||||
if not isTopLevel(c) or pkg.isNil:
|
||||
localError(name.info, "only top level types in a package can be 'package'")
|
||||
else:
|
||||
let typsym = pkg.tab.strTableGet(s.name)
|
||||
if typsym != nil:
|
||||
if sfForward notin typsym.flags or sfNoForward notin typsym.flags:
|
||||
typeCompleted(typsym)
|
||||
typsym.info = s.info
|
||||
else:
|
||||
localError(name.info, "cannot complete type '" & s.name.s & "' twice; " &
|
||||
"previous type completion was here: " & $typsym.info)
|
||||
s = typsym
|
||||
# add it here, so that recursive types are possible:
|
||||
if sfGenSym notin s.flags: addInterfaceDecl(c, s)
|
||||
|
||||
a.sons[0] = newSymNode(s)
|
||||
|
||||
proc checkCovariantParamsUsages(genericType: PType) =
|
||||
var body = genericType{-1}
|
||||
var body = genericType[^1]
|
||||
|
||||
proc traverseSubTypes(t: PType): bool =
|
||||
template error(msg) = localError(genericType.sym.info, msg)
|
||||
@@ -825,7 +858,7 @@ proc checkCovariantParamsUsages(genericType: PType) =
|
||||
|
||||
of tyGenericInvocation:
|
||||
let targetBody = t[0]
|
||||
for i in 1 .. <t.len:
|
||||
for i in 1 ..< t.len:
|
||||
let param = t[i]
|
||||
if param.kind == tyGenericParam:
|
||||
if tfCovariant in param.flags:
|
||||
@@ -972,7 +1005,7 @@ proc checkForMetaFields(n: PNode) =
|
||||
of tySequence, tySet, tyArray, tyOpenArray, tyVar, tyPtr, tyRef,
|
||||
tyProc, tyGenericInvocation, tyGenericInst, tyAlias:
|
||||
let start = ord(t.kind in {tyGenericInvocation, tyGenericInst})
|
||||
for i in start .. <t.sons.len:
|
||||
for i in start ..< t.sons.len:
|
||||
checkMeta(t.sons[i])
|
||||
else:
|
||||
checkMeta(t)
|
||||
@@ -1098,7 +1131,7 @@ proc addResultNode(c: PContext, n: PNode) =
|
||||
|
||||
proc copyExcept(n: PNode, i: int): PNode =
|
||||
result = copyNode(n)
|
||||
for j in 0.. <n.len:
|
||||
for j in 0..<n.len:
|
||||
if j != i: result.add(n.sons[j])
|
||||
|
||||
proc lookupMacro(c: PContext, n: PNode): PSym =
|
||||
@@ -1112,7 +1145,7 @@ proc semProcAnnotation(c: PContext, prc: PNode;
|
||||
validPragmas: TSpecialWords): PNode =
|
||||
var n = prc.sons[pragmasPos]
|
||||
if n == nil or n.kind == nkEmpty: return
|
||||
for i in countup(0, <n.len):
|
||||
for i in countup(0, n.len-1):
|
||||
var it = n.sons[i]
|
||||
var key = if it.kind == nkExprColonExpr: it.sons[0] else: it
|
||||
let m = lookupMacro(c, key)
|
||||
@@ -1263,7 +1296,7 @@ proc activate(c: PContext, n: PNode) =
|
||||
of nkLambdaKinds:
|
||||
discard semLambda(c, n, {})
|
||||
of nkCallKinds:
|
||||
for i in 1 .. <n.len: activate(c, n[i])
|
||||
for i in 1 ..< n.len: activate(c, n[i])
|
||||
else:
|
||||
discard
|
||||
|
||||
@@ -1276,9 +1309,30 @@ proc maybeAddResult(c: PContext, s: PSym, n: PNode) =
|
||||
proc semOverride(c: PContext, s: PSym, n: PNode) =
|
||||
case s.name.s.normalize
|
||||
of "destroy", "=destroy":
|
||||
doDestructorStuff(c, s, n)
|
||||
if not experimentalMode(c):
|
||||
localError n.info, "use the {.experimental.} pragma to enable destructors"
|
||||
if newDestructors:
|
||||
let t = s.typ
|
||||
var noError = false
|
||||
if t.len == 2 and t.sons[0] == nil and t.sons[1].kind == tyVar:
|
||||
var obj = t.sons[1].sons[0]
|
||||
while true:
|
||||
incl(obj.flags, tfHasAsgn)
|
||||
if obj.kind == tyGenericBody: obj = obj.lastSon
|
||||
elif obj.kind == tyGenericInvocation: obj = obj.sons[0]
|
||||
else: break
|
||||
if obj.kind in {tyObject, tyDistinct}:
|
||||
if obj.destructor.isNil:
|
||||
obj.destructor = s
|
||||
else:
|
||||
localError(n.info, errGenerated,
|
||||
"cannot bind another '" & s.name.s & "' to: " & typeToString(obj))
|
||||
noError = true
|
||||
if not noError and sfSystemModule notin s.owner.flags:
|
||||
localError(n.info, errGenerated,
|
||||
"signature for '" & s.name.s & "' must be proc[T: object](x: var T)")
|
||||
else:
|
||||
doDestructorStuff(c, s, n)
|
||||
if not experimentalMode(c):
|
||||
localError n.info, "use the {.experimental.} pragma to enable destructors"
|
||||
incl(s.flags, sfUsed)
|
||||
of "deepcopy", "=deepcopy":
|
||||
if s.typ.len == 2 and
|
||||
@@ -1303,7 +1357,7 @@ proc semOverride(c: PContext, s: PSym, n: PNode) =
|
||||
localError(n.info, errGenerated,
|
||||
"signature for 'deepCopy' must be proc[T: ptr|ref](x: T): T")
|
||||
incl(s.flags, sfUsed)
|
||||
of "=":
|
||||
of "=", "=sink":
|
||||
if s.magic == mAsgn: return
|
||||
incl(s.flags, sfUsed)
|
||||
let t = s.typ
|
||||
@@ -1321,14 +1375,16 @@ proc semOverride(c: PContext, s: PSym, n: PNode) =
|
||||
objB = objB.sons[0]
|
||||
else: break
|
||||
if obj.kind in {tyObject, tyDistinct} and sameType(obj, objB):
|
||||
if obj.assignment.isNil:
|
||||
obj.assignment = s
|
||||
let opr = if s.name.s == "=": addr(obj.assignment) else: addr(obj.sink)
|
||||
if opr[].isNil:
|
||||
opr[] = s
|
||||
else:
|
||||
localError(n.info, errGenerated,
|
||||
"cannot bind another '=' to: " & typeToString(obj))
|
||||
"cannot bind another '" & s.name.s & "' to: " & typeToString(obj))
|
||||
return
|
||||
localError(n.info, errGenerated,
|
||||
"signature for '=' must be proc[T: object](x: var T; y: T)")
|
||||
if sfSystemModule notin s.owner.flags:
|
||||
localError(n.info, errGenerated,
|
||||
"signature for '" & s.name.s & "' must be proc[T: object](x: var T; y: T)")
|
||||
else:
|
||||
if sfOverriden in s.flags:
|
||||
localError(n.info, errGenerated,
|
||||
@@ -1663,7 +1719,7 @@ proc evalInclude(c: PContext, n: PNode): PNode =
|
||||
excl(c.includedFiles, f)
|
||||
|
||||
proc setLine(n: PNode, info: TLineInfo) =
|
||||
for i in 0 .. <safeLen(n): setLine(n.sons[i], info)
|
||||
for i in 0 ..< safeLen(n): setLine(n.sons[i], info)
|
||||
n.info = info
|
||||
|
||||
proc semPragmaBlock(c: PContext, n: PNode): PNode =
|
||||
@@ -1671,7 +1727,7 @@ proc semPragmaBlock(c: PContext, n: PNode): PNode =
|
||||
pragma(c, nil, pragmaList, exprPragmas)
|
||||
result = semExpr(c, n.sons[1])
|
||||
n.sons[1] = result
|
||||
for i in 0 .. <pragmaList.len:
|
||||
for i in 0 ..< pragmaList.len:
|
||||
case whichPragma(pragmaList.sons[i])
|
||||
of wLine: setLine(result, pragmaList.sons[i].info)
|
||||
of wLocks, wGcSafe:
|
||||
@@ -1808,7 +1864,8 @@ proc semStmtList(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
of LastBlockStmts:
|
||||
for j in countup(i + 1, length - 1):
|
||||
case n.sons[j].kind
|
||||
of nkPragma, nkCommentStmt, nkNilLit, nkEmpty: discard
|
||||
of nkPragma, nkCommentStmt, nkNilLit, nkEmpty, nkBlockExpr,
|
||||
nkBlockStmt, nkState: discard
|
||||
else: localError(n.sons[j].info, errStmtInvalidAfterReturn)
|
||||
else: discard
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ proc symChoice(c: PContext, n: PNode, s: PSym, r: TSymChoiceRule): PNode =
|
||||
a = nextOverloadIter(o, c, n)
|
||||
|
||||
proc semBindStmt(c: PContext, n: PNode, toBind: var IntSet): PNode =
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
var a = n.sons[i]
|
||||
# If 'a' is an overloaded symbol, we used to use the first symbol
|
||||
# as a 'witness' and use the fact that subsequent lookups will yield
|
||||
@@ -95,7 +95,7 @@ proc semBindStmt(c: PContext, n: PNode, toBind: var IntSet): PNode =
|
||||
result = newNodeI(nkEmpty, n.info)
|
||||
|
||||
proc semMixinStmt(c: PContext, n: PNode, toMixin: var IntSet): PNode =
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
toMixin.incl(considerQuotedIdent(n.sons[i]).id)
|
||||
result = newNodeI(nkEmpty, n.info)
|
||||
|
||||
@@ -113,13 +113,9 @@ type
|
||||
owner: PSym
|
||||
cursorInBody: bool # only for nimsuggest
|
||||
scopeN: int
|
||||
bracketExpr: PNode
|
||||
|
||||
template withBracketExpr(ctx, x, body: untyped) =
|
||||
let old = ctx.bracketExpr
|
||||
ctx.bracketExpr = x
|
||||
body
|
||||
ctx.bracketExpr = old
|
||||
|
||||
proc getIdentNode(c: var TemplCtx, n: PNode): PNode =
|
||||
case n.kind
|
||||
@@ -163,7 +159,7 @@ proc onlyReplaceParams(c: var TemplCtx, n: PNode): PNode =
|
||||
result = newSymNode(s, n.info)
|
||||
styleCheckUse(n.info, s)
|
||||
else:
|
||||
for i in 0 .. <n.safeLen:
|
||||
for i in 0 ..< n.safeLen:
|
||||
result.sons[i] = onlyReplaceParams(c, n.sons[i])
|
||||
|
||||
proc newGenSym(kind: TSymKind, n: PNode, c: var TemplCtx): PSym =
|
||||
@@ -301,21 +297,9 @@ proc semPattern(c: PContext, n: PNode): PNode
|
||||
|
||||
proc semTemplBodySons(c: var TemplCtx, n: PNode): PNode =
|
||||
result = n
|
||||
for i in 0.. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
result.sons[i] = semTemplBody(c, n.sons[i])
|
||||
|
||||
proc oprIsRoof(n: PNode): bool =
|
||||
const roof = "^"
|
||||
case n.kind
|
||||
of nkIdent: result = n.ident.s == roof
|
||||
of nkSym: result = n.sym.name.s == roof
|
||||
of nkAccQuoted:
|
||||
if n.len == 1:
|
||||
result = oprIsRoof(n.sons[0])
|
||||
of nkOpenSymChoice, nkClosedSymChoice:
|
||||
result = oprIsRoof(n.sons[0])
|
||||
else: discard
|
||||
|
||||
proc semTemplBody(c: var TemplCtx, n: PNode): PNode =
|
||||
result = n
|
||||
semIdeForTemplateOrGenericCheck(n, c.cursorInBody)
|
||||
@@ -506,8 +490,6 @@ proc semTemplBody(c: var TemplCtx, n: PNode): PNode =
|
||||
result = semTemplBodySons(c, n)
|
||||
of nkCallKinds-{nkPostfix}:
|
||||
result = semTemplBodySons(c, n)
|
||||
if c.bracketExpr != nil and n.len == 2 and oprIsRoof(n.sons[0]):
|
||||
result.add c.bracketExpr
|
||||
of nkDotExpr, nkAccQuoted:
|
||||
# dotExpr is ambiguous: note that we explicitly allow 'x.TemplateParam',
|
||||
# so we use the generic code for nkDotExpr too
|
||||
|
||||
@@ -515,7 +515,7 @@ proc semCaseBranch(c: PContext, t, branch: PNode, branchIndex: int,
|
||||
# first element is special and will overwrite: branch.sons[i]:
|
||||
branch.sons[i] = semCaseBranchSetElem(c, t, r[0], covered)
|
||||
# other elements have to be added to ``branch``
|
||||
for j in 1 .. <r.len:
|
||||
for j in 1 ..< r.len:
|
||||
branch.add(semCaseBranchSetElem(c, t, r[j], covered))
|
||||
# caution! last son of branch must be the actions to execute:
|
||||
var L = branch.len
|
||||
@@ -846,7 +846,7 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
@[newTypeS(paramType.kind, c)])
|
||||
result = addImplicitGeneric(typ)
|
||||
else:
|
||||
for i in 0 .. <paramType.len:
|
||||
for i in 0 ..< paramType.len:
|
||||
if paramType.sons[i] == paramType:
|
||||
globalError(info, errIllegalRecursionInTypeX, typeToString(paramType))
|
||||
var lifted = liftingWalk(paramType.sons[i])
|
||||
@@ -897,7 +897,7 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
result.shouldHaveMeta
|
||||
|
||||
of tyGenericInvocation:
|
||||
for i in 1 .. <paramType.len:
|
||||
for i in 1 ..< paramType.len:
|
||||
let lifted = liftingWalk(paramType.sons[i])
|
||||
if lifted != nil: paramType.sons[i] = lifted
|
||||
|
||||
@@ -1146,7 +1146,7 @@ proc semGeneric(c: PContext, n: PNode, s: PSym, prev: PType): PType =
|
||||
|
||||
var isConcrete = true
|
||||
|
||||
for i in 1 .. <m.call.len:
|
||||
for i in 1 ..< m.call.len:
|
||||
var typ = m.call[i].typ
|
||||
if typ.kind == tyTypeDesc and typ.sons[0].kind == tyNone:
|
||||
isConcrete = false
|
||||
@@ -1167,7 +1167,10 @@ proc semGeneric(c: PContext, n: PNode, s: PSym, prev: PType): PType =
|
||||
|
||||
# special check for generic object with
|
||||
# generic/partial specialized parent
|
||||
let tx = result.skipTypes(abstractPtrs)
|
||||
let tx = result.skipTypes(abstractPtrs, 50)
|
||||
if tx.isNil:
|
||||
localError(n.info, "invalid recursion in type '$1'" % typeToString(result[0]))
|
||||
return errorType(c)
|
||||
if tx != result and tx.kind == tyObject and tx.sons[0] != nil:
|
||||
semObjectTypeForInheritedGenericInst(c, n, tx)
|
||||
|
||||
@@ -1295,8 +1298,7 @@ proc symFromExpectedTypeNode(c: PContext, n: PNode): PSym =
|
||||
|
||||
proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
|
||||
result = nil
|
||||
when defined(nimsuggest):
|
||||
inc c.inTypeContext
|
||||
inc c.inTypeContext
|
||||
|
||||
if gCmd == cmdIdeTools: suggestExpr(c, n)
|
||||
case n.kind
|
||||
@@ -1511,8 +1513,13 @@ proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
|
||||
localError(n.info, errTypeExpected)
|
||||
result = newOrPrevType(tyError, prev, c)
|
||||
n.typ = result
|
||||
when defined(nimsuggest):
|
||||
dec c.inTypeContext
|
||||
dec c.inTypeContext
|
||||
if c.inTypeContext == 0: instAllTypeBoundOp(c, n.info)
|
||||
|
||||
when false:
|
||||
proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
|
||||
result = semTypeNodeInner(c, n, prev)
|
||||
instAllTypeBoundOp(c, n.info)
|
||||
|
||||
proc setMagicType(m: PSym, kind: TTypeKind, size: int) =
|
||||
# source : https://en.wikipedia.org/wiki/Data_structure_alignment#x86
|
||||
@@ -1614,8 +1621,8 @@ proc semGenericParamList(c: PContext, n: PNode, father: PType = nil): PNode =
|
||||
var a = n.sons[i]
|
||||
if a.kind != nkIdentDefs: illFormedAst(n)
|
||||
let L = a.len
|
||||
var def = a{-1}
|
||||
let constraint = a{-2}
|
||||
var def = a[^1]
|
||||
let constraint = a[^2]
|
||||
var typ: PType
|
||||
|
||||
if constraint.kind != nkEmpty:
|
||||
|
||||
@@ -77,7 +77,7 @@ type
|
||||
topLayer*: TIdTable
|
||||
nextLayer*: ptr LayeredIdTable
|
||||
|
||||
TReplTypeVars* {.final.} = object
|
||||
TReplTypeVars* = object
|
||||
c*: PContext
|
||||
typeMap*: ptr LayeredIdTable # map PType to PType
|
||||
symMap*: TIdTable # map PSym to PSym
|
||||
@@ -133,7 +133,7 @@ proc prepareNode(cl: var TReplTypeVars, n: PNode): PNode =
|
||||
result.typ = t
|
||||
if result.kind == nkSym: result.sym = replaceTypeVarsS(cl, n.sym)
|
||||
let isCall = result.kind in nkCallKinds
|
||||
for i in 0 .. <n.safeLen:
|
||||
for i in 0 ..< n.safeLen:
|
||||
# XXX HACK: ``f(a, b)``, avoid to instantiate `f`
|
||||
if isCall and i == 0: result.add(n[i])
|
||||
else: result.add(prepareNode(cl, n[i]))
|
||||
@@ -151,7 +151,7 @@ proc hasGenericArguments*(n: PNode): bool =
|
||||
(n.sym.kind == skType and
|
||||
n.sym.typ.flags * {tfGenericTypeParam, tfImplicitTypeParam} != {})
|
||||
else:
|
||||
for i in 0.. <n.safeLen:
|
||||
for i in 0..<n.safeLen:
|
||||
if hasGenericArguments(n.sons[i]): return true
|
||||
return false
|
||||
|
||||
@@ -166,13 +166,13 @@ proc reResolveCallsWithTypedescParams(cl: var TReplTypeVars, n: PNode): PNode =
|
||||
# overload resolution is executed again (which may trigger generateInstance).
|
||||
if n.kind in nkCallKinds and sfFromGeneric in n[0].sym.flags:
|
||||
var needsFixing = false
|
||||
for i in 1 .. <n.safeLen:
|
||||
for i in 1 ..< n.safeLen:
|
||||
if isTypeParam(n[i]): needsFixing = true
|
||||
if needsFixing:
|
||||
n.sons[0] = newSymNode(n.sons[0].sym.owner)
|
||||
return cl.c.semOverloadedCall(cl.c, n, n, {skProc, skFunc}, {})
|
||||
|
||||
for i in 0 .. <n.safeLen:
|
||||
for i in 0 ..< n.safeLen:
|
||||
n.sons[i] = reResolveCallsWithTypedescParams(cl, n[i])
|
||||
|
||||
return n
|
||||
@@ -261,6 +261,17 @@ proc instCopyType*(cl: var TReplTypeVars, t: PType): PType =
|
||||
if not (t.kind in tyMetaTypes or
|
||||
(t.kind == tyStatic and t.n == nil)):
|
||||
result.flags.excl tfInstClearedFlags
|
||||
when false:
|
||||
if newDestructors:
|
||||
result.assignment = nil
|
||||
#result.destructor = nil
|
||||
result.sink = nil
|
||||
|
||||
template typeBound(c, newty, oldty, field, info) =
|
||||
let opr = newty.field
|
||||
if opr != nil and sfFromGeneric notin opr.flags:
|
||||
# '=' needs to be instantiated for generics when the type is constructed:
|
||||
newty.field = c.instTypeBoundOp(c, opr, oldty, info, attachedAsgn, 1)
|
||||
|
||||
proc handleGenericInvocation(cl: var TReplTypeVars, t: PType): PType =
|
||||
# tyGenericInvocation[A, tyGenericInvocation[A, B]]
|
||||
@@ -357,11 +368,10 @@ proc handleGenericInvocation(cl: var TReplTypeVars, t: PType): PType =
|
||||
assert newbody.kind in {tyRef, tyPtr}
|
||||
assert newbody.lastSon.typeInst == nil
|
||||
newbody.lastSon.typeInst = result
|
||||
let asgn = newbody.assignment
|
||||
if asgn != nil and sfFromGeneric notin asgn.flags:
|
||||
# '=' needs to be instantiated for generics when the type is constructed:
|
||||
newbody.assignment = cl.c.instTypeBoundOp(cl.c, asgn, result, cl.info,
|
||||
attachedAsgn, 1)
|
||||
if newDestructors:
|
||||
cl.c.typesWithOps.add((newbody, result))
|
||||
else:
|
||||
typeBound(cl.c, newbody, result, assignment, cl.info)
|
||||
let methods = skipTypes(bbody, abstractPtrs).methods
|
||||
for col, meth in items(methods):
|
||||
# we instantiate the known methods belonging to that type, this causes
|
||||
@@ -375,11 +385,11 @@ proc eraseVoidParams*(t: PType) =
|
||||
if t.sons[0] != nil and t.sons[0].kind == tyVoid:
|
||||
t.sons[0] = nil
|
||||
|
||||
for i in 1 .. <t.sonsLen:
|
||||
for i in 1 ..< t.sonsLen:
|
||||
# don't touch any memory unless necessary
|
||||
if t.sons[i].kind == tyVoid:
|
||||
var pos = i
|
||||
for j in i+1 .. <t.sonsLen:
|
||||
for j in i+1 ..< t.sonsLen:
|
||||
if t.sons[j].kind != tyVoid:
|
||||
t.sons[pos] = t.sons[j]
|
||||
t.n.sons[pos] = t.n.sons[j]
|
||||
@@ -389,7 +399,7 @@ proc eraseVoidParams*(t: PType) =
|
||||
return
|
||||
|
||||
proc skipIntLiteralParams*(t: PType) =
|
||||
for i in 0 .. <t.sonsLen:
|
||||
for i in 0 ..< t.sonsLen:
|
||||
let p = t.sons[i]
|
||||
if p == nil: continue
|
||||
let skipped = p.skipIntLit
|
||||
@@ -490,7 +500,7 @@ proc replaceTypeVarsTAux(cl: var TReplTypeVars, t: PType): PType =
|
||||
bailout()
|
||||
result = instCopyType(cl, t)
|
||||
idTablePut(cl.localCache, t, result)
|
||||
for i in 1 .. <result.sonsLen:
|
||||
for i in 1 ..< result.sonsLen:
|
||||
result.sons[i] = replaceTypeVarsT(cl, result.sons[i])
|
||||
propagateToOwner(result, result.lastSon)
|
||||
|
||||
@@ -529,6 +539,17 @@ proc replaceTypeVarsTAux(cl: var TReplTypeVars, t: PType): PType =
|
||||
|
||||
else: discard
|
||||
|
||||
proc instAllTypeBoundOp*(c: PContext, info: TLineInfo) =
|
||||
if not newDestructors: return
|
||||
var i = 0
|
||||
while i < c.typesWithOps.len:
|
||||
let (newty, oldty) = c.typesWithOps[i]
|
||||
typeBound(c, newty, oldty, destructor, info)
|
||||
typeBound(c, newty, oldty, sink, info)
|
||||
typeBound(c, newty, oldty, assignment, info)
|
||||
inc i
|
||||
setLen(c.typesWithOps, 0)
|
||||
|
||||
proc initTypeVars*(p: PContext, typeMap: ptr LayeredIdTable, info: TLineInfo;
|
||||
owner: PSym): TReplTypeVars =
|
||||
initIdTable(result.symMap)
|
||||
|
||||
@@ -136,7 +136,7 @@ proc hashTree(c: var MD5Context, n: PNode) =
|
||||
of nkStrLit..nkTripleStrLit:
|
||||
c &= n.strVal
|
||||
else:
|
||||
for i in 0.. <n.len: hashTree(c, n.sons[i])
|
||||
for i in 0..<n.len: hashTree(c, n.sons[i])
|
||||
|
||||
proc hashType(c: var MD5Context, t: PType; flags: set[ConsiderFlag]) =
|
||||
if t == nil:
|
||||
@@ -230,14 +230,14 @@ proc hashType(c: var MD5Context, t: PType; flags: set[ConsiderFlag]) =
|
||||
c &= ','
|
||||
c.hashType(t.sons[0], flags)
|
||||
else:
|
||||
for i in 0.. <t.len: c.hashType(t.sons[i], flags)
|
||||
for i in 0..<t.len: c.hashType(t.sons[i], flags)
|
||||
c &= char(t.callConv)
|
||||
if CoType notin flags:
|
||||
if tfNoSideEffect in t.flags: c &= ".noSideEffect"
|
||||
if tfThread in t.flags: c &= ".thread"
|
||||
if tfVarargs in t.flags: c &= ".varargs"
|
||||
else:
|
||||
for i in 0.. <t.len: c.hashType(t.sons[i], flags)
|
||||
for i in 0..<t.len: c.hashType(t.sons[i], flags)
|
||||
if tfNotNil in t.flags and CoType notin flags: c &= "not nil"
|
||||
|
||||
when defined(debugSigHashes):
|
||||
|
||||
@@ -55,6 +55,7 @@ type
|
||||
# a distrinct type
|
||||
typedescMatched*: bool
|
||||
isNoCall*: bool # misused for generic type instantiations C[T]
|
||||
mutabilityProblem*: uint8 # tyVar mismatch
|
||||
inferredTypes: seq[PType] # inferred types during the current signature
|
||||
# matching. they will be reset if the matching
|
||||
# is not successful. may replace the bindings
|
||||
@@ -66,7 +67,6 @@ type
|
||||
# or when the explain pragma is used. may be
|
||||
# triggered with an idetools command in the
|
||||
# future.
|
||||
mutabilityProblem*: uint8 # tyVar mismatch
|
||||
inheritancePenalty: int # to prefer closest father object type
|
||||
|
||||
TTypeRelFlag* = enum
|
||||
@@ -200,7 +200,7 @@ proc sumGeneric(t: PType): int =
|
||||
inc result
|
||||
of tyGenericInvocation, tyTuple, tyProc, tyAnd:
|
||||
result += ord(t.kind in {tyGenericInvocation, tyAnd})
|
||||
for i in 0 .. <t.len:
|
||||
for i in 0 ..< t.len:
|
||||
if t.sons[i] != nil:
|
||||
result += t.sons[i].sumGeneric
|
||||
break
|
||||
@@ -220,11 +220,12 @@ proc sumGeneric(t: PType): int =
|
||||
proc complexDisambiguation(a, b: PType): int =
|
||||
# 'a' matches better if *every* argument matches better or equal than 'b'.
|
||||
var winner = 0
|
||||
for i in 1 .. <min(a.len, b.len):
|
||||
for i in 1 ..< min(a.len, b.len):
|
||||
let x = a.sons[i].sumGeneric
|
||||
let y = b.sons[i].sumGeneric
|
||||
#if ggDebug:
|
||||
# echo "came her ", typeToString(a.sons[i]), " ", typeToString(b.sons[i])
|
||||
#echo "came herA ", typeToString(a.sons[i]), " ", x
|
||||
#echo "came herB ", typeToString(b.sons[i]), " ", y
|
||||
if x != y:
|
||||
if winner == 0:
|
||||
if x > y: winner = 1
|
||||
@@ -239,8 +240,8 @@ proc complexDisambiguation(a, b: PType): int =
|
||||
result = winner
|
||||
when false:
|
||||
var x, y: int
|
||||
for i in 1 .. <a.len: x += a.sons[i].sumGeneric
|
||||
for i in 1 .. <b.len: y += b.sons[i].sumGeneric
|
||||
for i in 1 ..< a.len: x += a.sons[i].sumGeneric
|
||||
for i in 1 ..< b.len: y += b.sons[i].sumGeneric
|
||||
result = x - y
|
||||
|
||||
proc writeMatches*(c: TCandidate) =
|
||||
@@ -275,7 +276,7 @@ proc cmpCandidates*(a, b: TCandidate): int =
|
||||
proc argTypeToString(arg: PNode; prefer: TPreferedDesc): string =
|
||||
if arg.kind in nkSymChoices:
|
||||
result = typeToString(arg[0].typ, prefer)
|
||||
for i in 1 .. <arg.len:
|
||||
for i in 1 ..< arg.len:
|
||||
result.add(" | ")
|
||||
result.add typeToString(arg[i].typ, prefer)
|
||||
elif arg.typ == nil:
|
||||
@@ -389,7 +390,16 @@ proc isConvertibleToRange(f, a: PType): bool =
|
||||
# be less picky for tyRange, as that it is used for array indexing:
|
||||
if f.kind in {tyInt..tyInt64, tyUInt..tyUInt64} and
|
||||
a.kind in {tyInt..tyInt64, tyUInt..tyUInt64}:
|
||||
result = true
|
||||
case f.kind
|
||||
of tyInt, tyInt64: result = true
|
||||
of tyInt8: result = a.kind in {tyInt8, tyInt}
|
||||
of tyInt16: result = a.kind in {tyInt8, tyInt16, tyInt}
|
||||
of tyInt32: result = a.kind in {tyInt8, tyInt16, tyInt32, tyInt}
|
||||
of tyUInt, tyUInt64: result = true
|
||||
of tyUInt8: result = a.kind in {tyUInt8, tyUInt}
|
||||
of tyUInt16: result = a.kind in {tyUInt8, tyUInt16, tyUInt}
|
||||
of tyUInt32: result = a.kind in {tyUInt8, tyUInt16, tyUInt32, tyUInt}
|
||||
else: result = false
|
||||
elif f.kind in {tyFloat..tyFloat128} and
|
||||
a.kind in {tyFloat..tyFloat128}:
|
||||
result = true
|
||||
@@ -580,7 +590,7 @@ proc procTypeRel(c: var TCandidate, f, a: PType): TTypeRelation =
|
||||
|
||||
# Note: We have to do unification for the parameters before the
|
||||
# return type!
|
||||
for i in 1 .. <f.sonsLen:
|
||||
for i in 1 ..< f.sonsLen:
|
||||
checkParam(f.sons[i], a.sons[i])
|
||||
|
||||
if f.sons[0] != nil:
|
||||
@@ -658,7 +668,7 @@ proc matchUserTypeClass*(m: var TCandidate; ff, a: PType): PType =
|
||||
var typeParams: seq[(PSym, PType)]
|
||||
|
||||
if ff.kind == tyUserTypeClassInst:
|
||||
for i in 1 .. <(ff.len - 1):
|
||||
for i in 1 ..< (ff.len - 1):
|
||||
var
|
||||
typeParamName = ff.base.sons[i-1].sym.name
|
||||
typ = ff.sons[i]
|
||||
@@ -1288,12 +1298,13 @@ proc typeRelImpl(c: var TCandidate, f, aOrig: PType,
|
||||
of tyString: result = isConvertible
|
||||
of tyPtr:
|
||||
# ptr[Tag, char] is not convertible to 'cstring' for now:
|
||||
if a.len == 1 and a.sons[0].kind == tyChar: result = isConvertible
|
||||
of tyArray:
|
||||
if (firstOrd(a.sons[0]) == 0) and
|
||||
(skipTypes(a.sons[0], {tyRange}).kind in {tyInt..tyInt64}) and
|
||||
(a.sons[1].kind == tyChar):
|
||||
result = isConvertible
|
||||
if a.len == 1:
|
||||
let pointsTo = a.sons[0].skipTypes(abstractInst)
|
||||
if pointsTo.kind == tyChar: result = isConvertible
|
||||
elif pointsTo.kind == tyArray and firstOrd(pointsTo.sons[0]) == 0 and
|
||||
skipTypes(pointsTo.sons[0], {tyRange}).kind in {tyInt..tyInt64} and
|
||||
pointsTo.sons[1].kind == tyChar:
|
||||
result = isConvertible
|
||||
else: discard
|
||||
|
||||
of tyEmpty, tyVoid:
|
||||
@@ -1450,8 +1461,13 @@ proc typeRelImpl(c: var TCandidate, f, aOrig: PType,
|
||||
of tyOr:
|
||||
considerPreviousT:
|
||||
result = isNone
|
||||
let oldInheritancePenalty = c.inheritancePenalty
|
||||
var maxInheritance = 0
|
||||
for branch in f.sons:
|
||||
c.inheritancePenalty = 0
|
||||
let x = typeRel(c, branch, aOrig)
|
||||
maxInheritance = max(maxInheritance, c.inheritancePenalty)
|
||||
|
||||
# 'or' implies maximum matching result:
|
||||
if x > result: result = x
|
||||
if result >= isSubtype:
|
||||
@@ -1459,6 +1475,7 @@ proc typeRelImpl(c: var TCandidate, f, aOrig: PType,
|
||||
bindingRet result
|
||||
else:
|
||||
result = isNone
|
||||
c.inheritancePenalty = oldInheritancePenalty + maxInheritance
|
||||
|
||||
of tyNot:
|
||||
considerPreviousT:
|
||||
@@ -1549,11 +1566,19 @@ proc typeRelImpl(c: var TCandidate, f, aOrig: PType,
|
||||
result = isNone
|
||||
else:
|
||||
if f.sonsLen > 0 and f.sons[0].kind != tyNone:
|
||||
let oldInheritancePenalty = c.inheritancePenalty
|
||||
result = typeRel(c, f.lastSon, a, flags + {trDontBind})
|
||||
if doBind and result notin {isNone, isGeneric}:
|
||||
let concrete = concreteType(c, a)
|
||||
if concrete == nil: return isNone
|
||||
put(c, f, concrete)
|
||||
# bug #6526
|
||||
if result in {isEqual, isSubtype}:
|
||||
# 'T: Class' is a *better* match than just 'T'
|
||||
# but 'T: Subclass' is even better:
|
||||
c.inheritancePenalty = oldInheritancePenalty - c.inheritancePenalty -
|
||||
100 * ord(result == isEqual)
|
||||
result = isGeneric
|
||||
else:
|
||||
result = isGeneric
|
||||
|
||||
@@ -2217,7 +2242,7 @@ proc matchesAux(c: PContext, n, nOrig: PNode,
|
||||
proc semFinishOperands*(c: PContext, n: PNode) =
|
||||
# this needs to be called to ensure that after overloading resolution every
|
||||
# argument has been sem'checked:
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
n.sons[i] = prepareOperand(c, n.sons[i])
|
||||
|
||||
proc partialMatch*(c: PContext, n, nOrig: PNode, m: var TCandidate) =
|
||||
@@ -2287,7 +2312,8 @@ proc instTypeBoundOp*(c: PContext; dc: PSym; t: PType; info: TLineInfo;
|
||||
localError(info, errGenerated, "cannot instantiate '" & dc.name.s & "'")
|
||||
else:
|
||||
result = c.semGenerateInstance(c, dc, m.bindings, info)
|
||||
assert sfFromGeneric in result.flags
|
||||
if op == attachedDeepCopy:
|
||||
assert sfFromGeneric in result.flags
|
||||
|
||||
include suggest
|
||||
|
||||
|
||||
@@ -21,9 +21,7 @@
|
||||
import
|
||||
intsets, strutils, options, ast, astalgo, trees, treetab, msgs, os,
|
||||
idents, renderer, types, passes, semfold, magicsys, cgmeth, rodread,
|
||||
lambdalifting, sempass2, lowerings, lookups
|
||||
|
||||
# implementation
|
||||
lambdalifting, sempass2, lowerings, lookups, destroyer, liftlocals
|
||||
|
||||
type
|
||||
PTransNode* = distinct PNode
|
||||
@@ -45,7 +43,7 @@ type
|
||||
inlining: int # > 0 if we are in inlining context (copy vars)
|
||||
nestedProcs: int # > 0 if we are in a nested proc
|
||||
contSyms, breakSyms: seq[PSym] # to transform 'continue' and 'break'
|
||||
deferDetected, tooEarly: bool
|
||||
deferDetected, tooEarly, needsDestroyPass: bool
|
||||
PTransf = ref TTransfContext
|
||||
|
||||
proc newTransNode(a: PNode): PTransNode {.inline.} =
|
||||
@@ -233,7 +231,7 @@ proc freshLabels(c: PTransf, n: PNode; symMap: var TIdTable) =
|
||||
let x = PSym(idTableGet(symMap, n.sym))
|
||||
if x != nil: n.sym = x
|
||||
else:
|
||||
for i in 0 .. <safeLen(n): freshLabels(c, n.sons[i], symMap)
|
||||
for i in 0 ..< safeLen(n): freshLabels(c, n.sons[i], symMap)
|
||||
|
||||
proc transformBlock(c: PTransf, n: PNode): PTransNode =
|
||||
var labl: PSym
|
||||
@@ -277,7 +275,7 @@ proc transformWhile(c: PTransf; n: PNode): PTransNode =
|
||||
var body = newTransNode(n)
|
||||
for i in 0..n.len-2:
|
||||
body[i] = transform(c, n.sons[i])
|
||||
body[<n.len] = transformLoopBody(c, n.sons[<n.len])
|
||||
body[n.len-1] = transformLoopBody(c, n.sons[n.len-1])
|
||||
result[1] = body
|
||||
discard c.breakSyms.pop
|
||||
|
||||
@@ -367,16 +365,22 @@ proc transformAddrDeref(c: PTransf, n: PNode, a, b: TNodeKind): PTransNode =
|
||||
# addr ( nkConv ( deref ( x ) ) ) --> nkConv(x)
|
||||
n.sons[0].sons[0] = m.sons[0]
|
||||
result = PTransNode(n.sons[0])
|
||||
if n.typ.kind != tyOpenArray:
|
||||
PNode(result).typ = n.typ
|
||||
of nkHiddenStdConv, nkHiddenSubConv, nkConv:
|
||||
var m = n.sons[0].sons[1]
|
||||
if m.kind == a or m.kind == b:
|
||||
# addr ( nkConv ( deref ( x ) ) ) --> nkConv(x)
|
||||
n.sons[0].sons[1] = m.sons[0]
|
||||
result = PTransNode(n.sons[0])
|
||||
if n.typ.kind != tyOpenArray:
|
||||
PNode(result).typ = n.typ
|
||||
else:
|
||||
if n.sons[0].kind == a or n.sons[0].kind == b:
|
||||
# addr ( deref ( x )) --> x
|
||||
result = PTransNode(n.sons[0].sons[0])
|
||||
if n.typ.kind != tyOpenArray:
|
||||
PNode(result).typ = n.typ
|
||||
|
||||
proc generateThunk(prc: PNode, dest: PType): PNode =
|
||||
## Converts 'prc' into '(thunk, nil)' so that it's compatible with
|
||||
@@ -512,7 +516,7 @@ proc findWrongOwners(c: PTransf, n: PNode) =
|
||||
internalError(x.info, "bah " & x.sym.name.s & " " &
|
||||
x.sym.owner.name.s & " " & getCurrOwner(c).name.s)
|
||||
else:
|
||||
for i in 0 .. <safeLen(n): findWrongOwners(c, n.sons[i])
|
||||
for i in 0 ..< safeLen(n): findWrongOwners(c, n.sons[i])
|
||||
|
||||
proc transformFor(c: PTransf, n: PNode): PTransNode =
|
||||
# generate access statements for the parameters (unless they are constant)
|
||||
@@ -642,7 +646,7 @@ proc transformArrayAccess(c: PTransf, n: PNode): PTransNode =
|
||||
result = n.PTransNode
|
||||
else:
|
||||
result = newTransNode(n)
|
||||
for i in 0 .. < n.len:
|
||||
for i in 0 ..< n.len:
|
||||
result[i] = transform(c, skipConv(n.sons[i]))
|
||||
|
||||
proc getMergeOp(n: PNode): PSym =
|
||||
@@ -746,7 +750,7 @@ proc dontInlineConstant(orig, cnst: PNode): bool {.inline.} =
|
||||
|
||||
proc commonOptimizations*(c: PSym, n: PNode): PNode =
|
||||
result = n
|
||||
for i in 0 .. < n.safeLen:
|
||||
for i in 0 ..< n.safeLen:
|
||||
result.sons[i] = commonOptimizations(c, n.sons[i])
|
||||
var op = getMergeOp(n)
|
||||
if (op != nil) and (op.magic != mNone) and (sonsLen(n) >= 3):
|
||||
@@ -782,7 +786,8 @@ proc transform(c: PTransf, n: PNode): PTransNode =
|
||||
nkBlockStmt, nkBlockExpr}:
|
||||
oldDeferAnchor = c.deferAnchor
|
||||
c.deferAnchor = n
|
||||
|
||||
if n.typ != nil and tfHasAsgn in n.typ.flags:
|
||||
c.needsDestroyPass = true
|
||||
case n.kind
|
||||
of nkSym:
|
||||
result = transformSym(c, n)
|
||||
@@ -972,9 +977,12 @@ proc transformBody*(module: PSym, n: PNode, prc: PSym): PNode =
|
||||
result = processTransf(c, result, prc)
|
||||
liftDefer(c, result)
|
||||
#result = liftLambdas(prc, result)
|
||||
incl(result.flags, nfTransf)
|
||||
when useEffectSystem: trackProc(prc, result)
|
||||
#if prc.name.s == "testbody":
|
||||
liftLocalsIfRequested(prc)
|
||||
if c.needsDestroyPass and newDestructors:
|
||||
result = injectDestructorCalls(prc, result)
|
||||
incl(result.flags, nfTransf)
|
||||
#if prc.name.s == "testbody":
|
||||
# echo renderTree(result)
|
||||
|
||||
proc transformStmt*(module: PSym, n: PNode): PNode =
|
||||
@@ -985,10 +993,12 @@ proc transformStmt*(module: PSym, n: PNode): PNode =
|
||||
result = processTransf(c, n, module)
|
||||
liftDefer(c, result)
|
||||
#result = liftLambdasForTopLevel(module, result)
|
||||
incl(result.flags, nfTransf)
|
||||
when useEffectSystem: trackTopLevelStmt(module, result)
|
||||
#if n.info ?? "temp.nim":
|
||||
# echo renderTree(result, {renderIds})
|
||||
if c.needsDestroyPass and newDestructors:
|
||||
result = injectDestructorCalls(module, result)
|
||||
incl(result.flags, nfTransf)
|
||||
|
||||
proc transformExpr*(module: PSym, n: PNode): PNode =
|
||||
if nfTransf in n.flags:
|
||||
@@ -997,4 +1007,6 @@ proc transformExpr*(module: PSym, n: PNode): PNode =
|
||||
var c = openTransf(module, "")
|
||||
result = processTransf(c, n, module)
|
||||
liftDefer(c, result)
|
||||
if c.needsDestroyPass and newDestructors:
|
||||
result = injectDestructorCalls(module, result)
|
||||
incl(result.flags, nfTransf)
|
||||
|
||||
@@ -98,7 +98,7 @@ proc isDeepConstExpr*(n: PNode): bool =
|
||||
of nkExprEqExpr, nkExprColonExpr, nkHiddenStdConv, nkHiddenSubConv:
|
||||
result = isDeepConstExpr(n.sons[1])
|
||||
of nkCurly, nkBracket, nkPar, nkObjConstr, nkClosure, nkRange:
|
||||
for i in ord(n.kind == nkObjConstr) .. <n.len:
|
||||
for i in ord(n.kind == nkObjConstr) ..< n.len:
|
||||
if not isDeepConstExpr(n.sons[i]): return false
|
||||
if n.typ.isNil: result = true
|
||||
else:
|
||||
|
||||
@@ -667,14 +667,6 @@ proc lengthOrd*(t: PType): BiggestInt =
|
||||
else:
|
||||
result = lastOrd(t) - firstOrd(t) + 1
|
||||
|
||||
proc isCompatibleToCString*(a: PType): bool =
|
||||
if a.kind == tyArray:
|
||||
if (firstOrd(a.sons[0]) == 0) and
|
||||
(skipTypes(a.sons[0], {tyRange, tyGenericInst, tyAlias}).kind in
|
||||
{tyInt..tyInt64, tyUInt..tyUInt64}) and
|
||||
(a.sons[1].kind == tyChar):
|
||||
result = true
|
||||
|
||||
# -------------- type equality -----------------------------------------------
|
||||
|
||||
type
|
||||
@@ -750,7 +742,7 @@ proc equalParam(a, b: PSym): TParamsEquality =
|
||||
proc sameConstraints(a, b: PNode): bool =
|
||||
if isNil(a) and isNil(b): return true
|
||||
internalAssert a.len == b.len
|
||||
for i in 1 .. <a.len:
|
||||
for i in 1 ..< a.len:
|
||||
if not exprStructuralEquivalent(a[i].sym.constraint,
|
||||
b[i].sym.constraint):
|
||||
return false
|
||||
@@ -1517,7 +1509,7 @@ proc isCompileTimeOnly*(t: PType): bool {.inline.} =
|
||||
proc containsCompileTimeOnly*(t: PType): bool =
|
||||
if isCompileTimeOnly(t): return true
|
||||
if t.sons != nil:
|
||||
for i in 0 .. <t.sonsLen:
|
||||
for i in 0 ..< t.sonsLen:
|
||||
if t.sons[i] != nil and isCompileTimeOnly(t.sons[i]):
|
||||
return true
|
||||
return false
|
||||
|
||||
@@ -20,7 +20,7 @@ proc renderPlainSymbolName*(n: PNode): string =
|
||||
result = ""
|
||||
case n.kind
|
||||
of nkPostfix, nkAccQuoted:
|
||||
result = renderPlainSymbolName(n[<n.len])
|
||||
result = renderPlainSymbolName(n[n.len-1])
|
||||
of nkIdent:
|
||||
result = n.ident.s
|
||||
of nkSym:
|
||||
@@ -58,8 +58,8 @@ proc renderType(n: PNode): string =
|
||||
assert params.kind == nkFormalParams
|
||||
assert len(params) > 0
|
||||
result = "proc("
|
||||
for i in 1 .. <len(params): result.add(renderType(params[i]) & ',')
|
||||
result[<len(result)] = ')'
|
||||
for i in 1 ..< len(params): result.add(renderType(params[i]) & ',')
|
||||
result[len(result)-1] = ')'
|
||||
else:
|
||||
result = "proc"
|
||||
of nkIdentDefs:
|
||||
@@ -67,18 +67,18 @@ proc renderType(n: PNode): string =
|
||||
let typePos = len(n) - 2
|
||||
let typeStr = renderType(n[typePos])
|
||||
result = typeStr
|
||||
for i in 1 .. <typePos:
|
||||
for i in 1 ..< typePos:
|
||||
assert n[i].kind == nkIdent
|
||||
result.add(',' & typeStr)
|
||||
of nkTupleTy:
|
||||
result = "tuple["
|
||||
for i in 0 .. <len(n): result.add(renderType(n[i]) & ',')
|
||||
result[<len(result)] = ']'
|
||||
for i in 0 ..< len(n): result.add(renderType(n[i]) & ',')
|
||||
result[len(result)-1] = ']'
|
||||
of nkBracketExpr:
|
||||
assert len(n) >= 2
|
||||
result = renderType(n[0]) & '['
|
||||
for i in 1 .. <len(n): result.add(renderType(n[i]) & ',')
|
||||
result[<len(result)] = ']'
|
||||
for i in 1 ..< len(n): result.add(renderType(n[i]) & ',')
|
||||
result[len(result)-1] = ']'
|
||||
else: result = ""
|
||||
assert(not result.isNil)
|
||||
|
||||
@@ -91,7 +91,7 @@ proc renderParamTypes(found: var seq[string], n: PNode) =
|
||||
## generator does include the information.
|
||||
case n.kind
|
||||
of nkFormalParams:
|
||||
for i in 1 .. <len(n): renderParamTypes(found, n[i])
|
||||
for i in 1 ..< len(n): renderParamTypes(found, n[i])
|
||||
of nkIdentDefs:
|
||||
# These are parameter names + type + default value node.
|
||||
let typePos = len(n) - 2
|
||||
@@ -102,7 +102,7 @@ proc renderParamTypes(found: var seq[string], n: PNode) =
|
||||
let typ = n[typePos+1].typ
|
||||
if not typ.isNil: typeStr = typeToString(typ, preferExported)
|
||||
if typeStr.len < 1: return
|
||||
for i in 0 .. <typePos:
|
||||
for i in 0 ..< typePos:
|
||||
found.add(typeStr)
|
||||
else:
|
||||
internalError(n.info, "renderParamTypes(found,n) with " & $n.kind)
|
||||
|
||||
@@ -322,7 +322,7 @@ proc opConv*(dest: var TFullReg, src: TFullReg, desttyp, srctyp: PType): bool =
|
||||
if x <% n.len and (let f = n.sons[x].sym; f.position == x):
|
||||
dest.node.strVal = if f.ast.isNil: f.name.s else: f.ast.strVal
|
||||
else:
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
if n.sons[i].kind != nkSym: internalError("opConv for enum")
|
||||
let f = n.sons[i].sym
|
||||
if f.position == x:
|
||||
@@ -431,7 +431,7 @@ proc setLenSeq(c: PCtx; node: PNode; newLen: int; info: TLineInfo) =
|
||||
setLen(node.sons, newLen)
|
||||
if oldLen < newLen:
|
||||
# TODO: This is still not correct for tyPtr, tyRef default value
|
||||
for i in oldLen .. <newLen:
|
||||
for i in oldLen ..< newLen:
|
||||
node.sons[i] = newNodeI(typeKind, info)
|
||||
|
||||
proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
|
||||
@@ -1078,7 +1078,7 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
|
||||
regs[ra].node = newNodeI(nkBracket, c.debug[pc])
|
||||
regs[ra].node.typ = typ
|
||||
newSeq(regs[ra].node.sons, count)
|
||||
for i in 0 .. <count:
|
||||
for i in 0 ..< count:
|
||||
regs[ra].node.sons[i] = getNullValue(typ.sons[0], c.debug[pc])
|
||||
of opcNewStr:
|
||||
decodeB(rkNode)
|
||||
@@ -1213,7 +1213,7 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
|
||||
var u = regs[rb].node
|
||||
if u.kind notin {nkEmpty..nkNilLit}:
|
||||
# XXX can be optimized:
|
||||
for i in 0.. <x.len: u.add(x.sons[i])
|
||||
for i in 0..<x.len: u.add(x.sons[i])
|
||||
else:
|
||||
stackTrace(c, tos, pc, errGenerated, "cannot add to node kind: " & $u.kind)
|
||||
regs[ra].node = u
|
||||
@@ -1555,7 +1555,7 @@ proc execProc*(c: PCtx; sym: PSym; args: openArray[PNode]): PNode =
|
||||
if not isEmptyType(sym.typ.sons[0]) or sym.kind == skMacro:
|
||||
putIntoReg(tos.slots[0], getNullValue(sym.typ.sons[0], sym.info))
|
||||
# XXX We could perform some type checking here.
|
||||
for i in 1.. <sym.typ.len:
|
||||
for i in 1..<sym.typ.len:
|
||||
putIntoReg(tos.slots[i], args[i-1])
|
||||
|
||||
result = rawExecute(c, start, tos).regToNode
|
||||
@@ -1637,7 +1637,7 @@ proc evalConstExprAux(module: PSym; cache: IdentCache; prc: PSym, n: PNode,
|
||||
when debugEchoCode: c.echoCode start
|
||||
var tos = PStackFrame(prc: prc, comesFrom: 0, next: nil)
|
||||
newSeq(tos.slots, c.prc.maxSlots)
|
||||
#for i in 0 .. <c.prc.maxSlots: tos.slots[i] = newNode(nkEmpty)
|
||||
#for i in 0 ..< c.prc.maxSlots: tos.slots[i] = newNode(nkEmpty)
|
||||
result = rawExecute(c, start, tos).regToNode
|
||||
if result.info.line < 0: result.info = n.info
|
||||
|
||||
@@ -1670,7 +1670,7 @@ proc setupMacroParam(x: PNode, typ: PType): TFullReg =
|
||||
|
||||
iterator genericParamsInMacroCall*(macroSym: PSym, call: PNode): (PSym, PNode) =
|
||||
let gp = macroSym.ast[genericParamsPos]
|
||||
for i in 0 .. <gp.len:
|
||||
for i in 0 ..< gp.len:
|
||||
let genericParam = gp[i].sym
|
||||
let posInCall = macroSym.typ.len + i
|
||||
yield (genericParam, call[posInCall])
|
||||
@@ -1688,8 +1688,7 @@ proc evalMacroCall*(module: PSym; cache: IdentCache, n, nOrig: PNode,
|
||||
# arity here too:
|
||||
if sym.typ.len > n.safeLen and sym.typ.len > 1:
|
||||
globalError(n.info, "in call '$#' got $#, but expected $# argument(s)" % [
|
||||
n.renderTree,
|
||||
$ <n.safeLen, $ <sym.typ.len])
|
||||
n.renderTree, $(n.safeLen-1), $(sym.typ.len-1)])
|
||||
|
||||
setupGlobalCtx(module, cache)
|
||||
var c = globalCtx
|
||||
@@ -1713,11 +1712,11 @@ proc evalMacroCall*(module: PSym; cache: IdentCache, n, nOrig: PNode,
|
||||
tos.slots[0].node = newNodeI(nkEmpty, n.info)
|
||||
|
||||
# setup parameters:
|
||||
for i in 1.. <sym.typ.len:
|
||||
for i in 1..<sym.typ.len:
|
||||
tos.slots[i] = setupMacroParam(n.sons[i], sym.typ.sons[i])
|
||||
|
||||
let gp = sym.ast[genericParamsPos]
|
||||
for i in 0 .. <gp.len:
|
||||
for i in 0 ..< gp.len:
|
||||
if sfImmediate notin sym.flags:
|
||||
let idx = sym.typ.len + i
|
||||
if idx < n.len:
|
||||
@@ -1732,7 +1731,7 @@ proc evalMacroCall*(module: PSym; cache: IdentCache, n, nOrig: PNode,
|
||||
c.callsite = nil
|
||||
globalError(n.info, "static[T] or typedesc nor supported for .immediate macros")
|
||||
# temporary storage:
|
||||
#for i in L .. <maxSlots: tos.slots[i] = newNode(nkEmpty)
|
||||
#for i in L ..< maxSlots: tos.slots[i] = newNode(nkEmpty)
|
||||
result = rawExecute(c, start, tos).regToNode
|
||||
if result.info.line < 0: result.info = n.info
|
||||
if cyclicTree(result): globalError(n.info, errCyclicTree)
|
||||
|
||||
@@ -41,7 +41,7 @@ proc mapTypeToBracketX(name: string; m: TMagic; t: PType; info: TLineInfo;
|
||||
inst=false): PNode =
|
||||
result = newNodeIT(nkBracketExpr, if t.n.isNil: info else: t.n.info, t)
|
||||
result.add atomicTypeX(name, m, t, info)
|
||||
for i in 0 .. < t.len:
|
||||
for i in 0 ..< t.len:
|
||||
if t.sons[i] == nil:
|
||||
let void = atomicTypeX("void", mVoid, t, info)
|
||||
void.typ = newType(tyVoid, t.owner)
|
||||
@@ -119,7 +119,7 @@ proc mapTypeToAstX(t: PType; info: TLineInfo;
|
||||
result = atomicType("typeDesc", mTypeDesc)
|
||||
of tyGenericInvocation:
|
||||
result = newNodeIT(nkBracketExpr, if t.n.isNil: info else: t.n.info, t)
|
||||
for i in 0 .. < t.len:
|
||||
for i in 0 ..< t.len:
|
||||
result.add mapTypeToAst(t.sons[i], info)
|
||||
of tyGenericInst, tyAlias:
|
||||
if inst:
|
||||
@@ -128,7 +128,7 @@ proc mapTypeToAstX(t: PType; info: TLineInfo;
|
||||
else:
|
||||
result = newNodeX(nkBracketExpr)
|
||||
result.add mapTypeToAst(t.lastSon, info)
|
||||
for i in 1 .. < t.len-1:
|
||||
for i in 1 ..< t.len-1:
|
||||
result.add mapTypeToAst(t.sons[i], info)
|
||||
else:
|
||||
result = mapTypeToAstX(t.lastSon, info, inst, allowRecursion)
|
||||
|
||||
@@ -401,7 +401,7 @@ proc sameConstant*(a, b: PNode): bool =
|
||||
|
||||
proc genLiteral(c: PCtx; n: PNode): int =
|
||||
# types do not matter here:
|
||||
for i in 0 .. <c.constants.len:
|
||||
for i in 0 ..< c.constants.len:
|
||||
if sameConstant(c.constants[i], n): return i
|
||||
result = rawGenLiteral(c, n)
|
||||
|
||||
@@ -430,7 +430,7 @@ proc genCase(c: PCtx; n: PNode; dest: var TDest) =
|
||||
c.gen(n.sons[0], tmp)
|
||||
# branch tmp, codeIdx
|
||||
# fjmp elseLabel
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
let it = n.sons[i]
|
||||
if it.len == 1:
|
||||
# else stmt:
|
||||
@@ -460,7 +460,7 @@ proc genTry(c: PCtx; n: PNode; dest: var TDest) =
|
||||
c.gen(n.sons[0], dest)
|
||||
c.clearDest(n, dest)
|
||||
c.patch(elsePos)
|
||||
for i in 1 .. <n.len:
|
||||
for i in 1 ..< n.len:
|
||||
let it = n.sons[i]
|
||||
if it.kind != nkFinally:
|
||||
var blen = len(it)
|
||||
@@ -518,7 +518,7 @@ proc genCall(c: PCtx; n: PNode; dest: var TDest) =
|
||||
let x = c.getTempRange(n.len, slotTempUnknown)
|
||||
# varargs need 'opcSetType' for the FFI support:
|
||||
let fntyp = skipTypes(n.sons[0].typ, abstractInst)
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
#if i > 0 and i < sonsLen(fntyp):
|
||||
# let paramType = fntyp.n.sons[i]
|
||||
# if paramType.typ.isCompileTimeOnly: continue
|
||||
@@ -995,7 +995,7 @@ proc genMagic(c: PCtx; n: PNode; dest: var TDest; m: TMagic) =
|
||||
let n = n[1].skipConv
|
||||
let x = c.getTempRange(n.len, slotTempUnknown)
|
||||
internalAssert n.kind == nkBracket
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
var r: TRegister = x+i
|
||||
c.gen(n.sons[i], r)
|
||||
c.gABC(n, opcEcho, x, n.len)
|
||||
@@ -1645,7 +1645,7 @@ proc genObjConstr(c: PCtx, n: PNode, dest: var TDest) =
|
||||
c.gABx(n, opcNew, dest, c.genType(t.sons[0]))
|
||||
else:
|
||||
c.gABx(n, opcLdNull, dest, c.genType(n.typ))
|
||||
for i in 1.. <n.len:
|
||||
for i in 1..<n.len:
|
||||
let it = n.sons[i]
|
||||
if it.kind == nkExprColonExpr and it.sons[0].kind == nkSym:
|
||||
let idx = genField(it.sons[0])
|
||||
@@ -1660,7 +1660,7 @@ proc genTupleConstr(c: PCtx, n: PNode, dest: var TDest) =
|
||||
if dest < 0: dest = c.getTemp(n.typ)
|
||||
c.gABx(n, opcLdNull, dest, c.genType(n.typ))
|
||||
# XXX x = (x.old, 22) produces wrong code ... stupid self assignments
|
||||
for i in 0.. <n.len:
|
||||
for i in 0..<n.len:
|
||||
let it = n.sons[i]
|
||||
if it.kind == nkExprColonExpr:
|
||||
let idx = genField(it.sons[0])
|
||||
@@ -1796,7 +1796,7 @@ proc gen(c: PCtx; n: PNode; dest: var TDest; flags: TGenFlags = {}) =
|
||||
for x in n: gen(c, x)
|
||||
of nkStmtListExpr:
|
||||
let L = n.len-1
|
||||
for i in 0 .. <L: gen(c, n.sons[i])
|
||||
for i in 0 ..< L: gen(c, n.sons[i])
|
||||
gen(c, n.sons[L], dest, flags)
|
||||
of nkPragmaBlock:
|
||||
gen(c, n.lastSon, dest, flags)
|
||||
@@ -1882,7 +1882,7 @@ proc genExpr*(c: PCtx; n: PNode, requiresValue = true): int =
|
||||
proc genParams(c: PCtx; params: PNode) =
|
||||
# res.sym.position is already 0
|
||||
c.prc.slots[0] = (inUse: true, kind: slotFixedVar)
|
||||
for i in 1.. <params.len:
|
||||
for i in 1..<params.len:
|
||||
c.prc.slots[i] = (inUse: true, kind: slotFixedLet)
|
||||
c.prc.maxSlots = max(params.len, 1)
|
||||
|
||||
@@ -1895,7 +1895,7 @@ proc finalJumpTarget(c: PCtx; pc, diff: int) =
|
||||
|
||||
proc genGenericParams(c: PCtx; gp: PNode) =
|
||||
var base = c.prc.maxSlots
|
||||
for i in 0.. <gp.len:
|
||||
for i in 0..<gp.len:
|
||||
var param = gp.sons[i].sym
|
||||
param.position = base + i # XXX: fix this earlier; make it consistent with templates
|
||||
c.prc.slots[base + i] = (inUse: true, kind: slotFixedLet)
|
||||
@@ -1903,7 +1903,7 @@ proc genGenericParams(c: PCtx; gp: PNode) =
|
||||
|
||||
proc optimizeJumps(c: PCtx; start: int) =
|
||||
const maxIterations = 10
|
||||
for i in start .. <c.code.len:
|
||||
for i in start ..< c.code.len:
|
||||
let opc = c.code[i].opcode
|
||||
case opc
|
||||
of opcTJmp, opcFJmp:
|
||||
|
||||
@@ -78,7 +78,7 @@ proc storeAny(s: var string; t: PType; a: PNode; stored: var IntSet) =
|
||||
s.add("]")
|
||||
of tyTuple:
|
||||
s.add("{")
|
||||
for i in 0.. <t.len:
|
||||
for i in 0..<t.len:
|
||||
if i > 0: s.add(", ")
|
||||
s.add("\"Field" & $i)
|
||||
s.add("\": ")
|
||||
@@ -90,7 +90,7 @@ proc storeAny(s: var string; t: PType; a: PNode; stored: var IntSet) =
|
||||
s.add("}")
|
||||
of tySet:
|
||||
s.add("[")
|
||||
for i in 0.. <a.len:
|
||||
for i in 0..<a.len:
|
||||
if i > 0: s.add(", ")
|
||||
if a[i].kind == nkRange:
|
||||
var x = copyNode(a[i][0])
|
||||
|
||||
@@ -47,6 +47,11 @@ template wrap1s_ospaths(op) {.dirty.} =
|
||||
setResult(a, op(getString(a, 0)))
|
||||
ospathsop op
|
||||
|
||||
template wrap2s_ospaths(op) {.dirty.} =
|
||||
proc `op Wrapper`(a: VmArgs) {.nimcall.} =
|
||||
setResult(a, op(getString(a, 0), getString(a, 1)))
|
||||
ospathsop op
|
||||
|
||||
template wrap1s_system(op) {.dirty.} =
|
||||
proc `op Wrapper`(a: VmArgs) {.nimcall.} =
|
||||
setResult(a, op(getString(a, 0)))
|
||||
@@ -96,7 +101,7 @@ proc registerAdditionalOps*(c: PCtx) =
|
||||
wrap1f_math(ceil)
|
||||
wrap2f_math(fmod)
|
||||
|
||||
wrap1s_ospaths(getEnv)
|
||||
wrap2s_ospaths(getEnv)
|
||||
wrap1s_ospaths(existsEnv)
|
||||
wrap1s_os(dirExists)
|
||||
wrap1s_os(fileExists)
|
||||
|
||||
@@ -55,7 +55,7 @@ type
|
||||
wFloatchecks, wNanChecks, wInfChecks,
|
||||
wAssertions, wPatterns, wWarnings,
|
||||
wHints, wOptimization, wRaises, wWrites, wReads, wSize, wEffects, wTags,
|
||||
wDeadCodeElim, wSafecode, wNoForward, wReorder, wNoRewrite,
|
||||
wDeadCodeElim, wSafecode, wPackage, wNoForward, wReorder, wNoRewrite,
|
||||
wPragma,
|
||||
wCompileTime, wNoInit,
|
||||
wPassc, wPassl, wBorrow, wDiscardable,
|
||||
@@ -66,7 +66,7 @@ type
|
||||
wWrite, wGensym, wInject, wDirty, wInheritable, wThreadVar, wEmit,
|
||||
wAsmNoStackFrame,
|
||||
wImplicitStatic, wGlobal, wCodegenDecl, wUnchecked, wGuard, wLocks,
|
||||
wPartial, wExplain,
|
||||
wPartial, wExplain, wLiftLocals,
|
||||
|
||||
wAuto, wBool, wCatch, wChar, wClass,
|
||||
wConst_cast, wDefault, wDelete, wDouble, wDynamic_cast,
|
||||
@@ -143,7 +143,7 @@ const
|
||||
|
||||
"assertions", "patterns", "warnings", "hints",
|
||||
"optimization", "raises", "writes", "reads", "size", "effects", "tags",
|
||||
"deadcodeelim", "safecode", "noforward", "reorder", "norewrite",
|
||||
"deadcodeelim", "safecode", "package", "noforward", "reorder", "norewrite",
|
||||
"pragma",
|
||||
"compiletime", "noinit",
|
||||
"passc", "passl", "borrow", "discardable", "fieldchecks",
|
||||
@@ -152,7 +152,7 @@ const
|
||||
"computedgoto", "injectstmt", "experimental",
|
||||
"write", "gensym", "inject", "dirty", "inheritable", "threadvar", "emit",
|
||||
"asmnostackframe", "implicitstatic", "global", "codegendecl", "unchecked",
|
||||
"guard", "locks", "partial", "explain",
|
||||
"guard", "locks", "partial", "explain", "liftlocals",
|
||||
|
||||
"auto", "bool", "catch", "char", "class",
|
||||
"const_cast", "default", "delete", "double",
|
||||
|
||||
@@ -123,7 +123,7 @@ proc returnsNewExpr*(n: PNode): NewLocation =
|
||||
of nkCurly, nkBracket, nkPar, nkObjConstr, nkClosure,
|
||||
nkIfExpr, nkIfStmt, nkWhenStmt, nkCaseStmt, nkTryStmt:
|
||||
result = newLit
|
||||
for i in ord(n.kind == nkObjConstr) .. <n.len:
|
||||
for i in ord(n.kind == nkObjConstr) ..< n.len:
|
||||
let x = returnsNewExpr(n.sons[i])
|
||||
case x
|
||||
of newNone: return newNone
|
||||
|
||||
@@ -202,6 +202,11 @@ vcc.cpp.linkerexe = "vccexe.exe"
|
||||
|
||||
# set the options for specific platforms:
|
||||
vcc.options.always = "/nologo"
|
||||
@if release:
|
||||
# no debug symbols in release builds
|
||||
@else:
|
||||
vcc.options.always %= "${vcc.options.always} /Z7" # Get VCC to output full debug symbols in the obj file
|
||||
@end
|
||||
vcc.cpp.options.always %= "${vcc.options.always} /EHsc"
|
||||
vcc.options.linker = "/nologo /DEBUG /Zi /F33554432" # set the stack size to 32 MiB
|
||||
vcc.cpp.options.linker %= "${vcc.options.linker}"
|
||||
@@ -222,8 +227,8 @@ vcc.options.linker %= "--platform:arm ${vcc.options.linker}"
|
||||
vcc.cpp.options.linker %= "--platform:arm ${vcc.cpp.options.linker}"
|
||||
@end
|
||||
|
||||
vcc.options.debug = "/Zi /FS /Od"
|
||||
vcc.cpp.options.debug = "/Zi /FS /Od"
|
||||
vcc.options.debug = "/Od"
|
||||
vcc.cpp.options.debug = "/Od"
|
||||
vcc.options.speed = "/O2"
|
||||
vcc.cpp.options.speed = "/O2"
|
||||
vcc.options.size = "/O1"
|
||||
|
||||
@@ -20,10 +20,12 @@ to compile to C++, Objective-C or JavaScript. This document tries to
|
||||
concentrate in a single place all the backend and interfacing options.
|
||||
|
||||
The Nim compiler supports mainly two backend families: the C, C++ and
|
||||
Objective-C targets and the JavaScript target. `The C like targets`_ creates
|
||||
source files which can be compiled into a library or a final executable. `The
|
||||
JavaScript target`_ can generate a ``.js`` file which you reference from an
|
||||
HTML file or create a `standalone nodejs program <http://nodejs.org>`_.
|
||||
Objective-C targets and the JavaScript target. `The C like targets
|
||||
<#backends-the-c-like-targets>`_ creates source files which can be compiled
|
||||
into a library or a final executable. `The JavaScript target
|
||||
<#backends-the-javascript-target>`_ can generate a ``.js`` file which you
|
||||
reference from an HTML file or create a `standalone nodejs program
|
||||
<http://nodejs.org>`_.
|
||||
|
||||
On top of generating libraries or standalone applications, Nim offers
|
||||
bidirectional interfacing with the backend targets through generic and
|
||||
|
||||
@@ -29,6 +29,7 @@ Options:
|
||||
--floatChecks:on|off turn all floating point (NaN/Inf) checks on|off
|
||||
--nanChecks:on|off turn NaN checks on|off
|
||||
--infChecks:on|off turn Inf checks on|off
|
||||
--nilChecks:on|off turn nil checks on|off
|
||||
--deadCodeElim:on|off whole program dead code elimination on|off
|
||||
--opt:none|speed|size optimize not at all or for speed|size
|
||||
Note: use -d:release for a release build!
|
||||
|
||||
@@ -206,7 +206,7 @@ strings, because they are precompiled.
|
||||
**Note**: Passing variables to the ``dynlib`` pragma will fail at runtime
|
||||
because of order of initialization problems.
|
||||
|
||||
**Note**: A ``dynlib`` import can be overriden with
|
||||
**Note**: A ``dynlib`` import can be overridden with
|
||||
the ``--dynlibOverride:name`` command line option. The Compiler User Guide
|
||||
contains further information.
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ Guards and locks
|
||||
================
|
||||
|
||||
Apart from ``spawn`` and ``parallel`` Nim also provides all the common low level
|
||||
concurrency mechanisms like locks, atomic intristics or condition variables.
|
||||
concurrency mechanisms like locks, atomic intrinsics or condition variables.
|
||||
|
||||
Nim significantly improves on the safety of these features via additional
|
||||
pragmas:
|
||||
@@ -74,7 +74,7 @@ model low level lockfree mechanisms:
|
||||
|
||||
The ``locks`` pragma takes a list of lock expressions ``locks: [a, b, ...]``
|
||||
in order to support *multi lock* statements. Why these are essential is
|
||||
explained in the `lock levels`_ section.
|
||||
explained in the `lock levels <#guards-and-locks-lock-levels>`_ section.
|
||||
|
||||
|
||||
Protecting general locations
|
||||
|
||||
@@ -105,7 +105,7 @@ From import statement
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
After the ``from`` statement a module name follows followed by
|
||||
an ``import`` to list the symbols one likes to use without explict
|
||||
an ``import`` to list the symbols one likes to use without explicit
|
||||
full qualification:
|
||||
|
||||
.. code-block:: nim
|
||||
@@ -123,7 +123,7 @@ in ``module``.
|
||||
Export statement
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
An ``export`` statement can be used for symbol fowarding so that client
|
||||
An ``export`` statement can be used for symbol forwarding so that client
|
||||
modules don't need to import a module's dependencies:
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
@@ -70,7 +70,7 @@ compileTime pragma
|
||||
The ``compileTime`` pragma is used to mark a proc or variable to be used at
|
||||
compile time only. No code will be generated for it. Compile time procs are
|
||||
useful as helpers for macros. Since version 0.12.0 of the language, a proc
|
||||
that uses ``system.NimNode`` within its parameter types is implictly declared
|
||||
that uses ``system.NimNode`` within its parameter types is implicitly declared
|
||||
``compileTime``:
|
||||
|
||||
.. code-block:: nim
|
||||
@@ -733,7 +733,8 @@ about the ``importcpp`` pragma pattern language. It is not necessary
|
||||
to know all the details described here.
|
||||
|
||||
|
||||
Similar to the `importc pragma for C <manual.html#importc-pragma>`_, the
|
||||
Similar to the `importc pragma for C
|
||||
<#foreign-function-interface-importc-pragma>`_, the
|
||||
``importcpp`` pragma can be used to import `C++`:idx: methods or C++ symbols
|
||||
in general. The generated code then uses the C++ method calling
|
||||
syntax: ``obj->method(arg)``. In combination with the ``header`` and ``emit``
|
||||
@@ -955,10 +956,11 @@ Produces:
|
||||
|
||||
ImportObjC pragma
|
||||
-----------------
|
||||
Similar to the `importc pragma for C <manual.html#importc-pragma>`_, the
|
||||
``importobjc`` pragma can be used to import `Objective C`:idx: methods. The
|
||||
generated code then uses the Objective C method calling syntax: ``[obj method
|
||||
param1: arg]``. In addition with the ``header`` and ``emit`` pragmas this
|
||||
Similar to the `importc pragma for C
|
||||
<#foreign-function-interface-importc-pragma>`_, the ``importobjc`` pragma can
|
||||
be used to import `Objective C`:idx: methods. The generated code then uses the
|
||||
Objective C method calling syntax: ``[obj method param1: arg]``.
|
||||
In addition with the ``header`` and ``emit`` pragmas this
|
||||
allows *sloppy* interfacing with libraries written in Objective C:
|
||||
|
||||
.. code-block:: Nim
|
||||
|
||||
@@ -142,10 +142,11 @@ The method call syntax conflicts with explicit generic instantiations:
|
||||
parsed as ``(x.p)[T]``.
|
||||
|
||||
**Future directions**: ``p[.T.]`` might be introduced as an alternative syntax
|
||||
to pass explict types to a generic and then ``x.p[.T.]`` can be parsed as
|
||||
to pass explicit types to a generic and then ``x.p[.T.]`` can be parsed as
|
||||
``x.(p[.T.])``.
|
||||
|
||||
See also: `Limitations of the method call syntax`_.
|
||||
See also: `Limitations of the method call syntax
|
||||
<#templates-limitations-of-the-method-call-syntax>`_.
|
||||
|
||||
|
||||
Properties
|
||||
@@ -178,7 +179,7 @@ different; for this a special setter syntax is needed:
|
||||
Command invocation syntax
|
||||
-------------------------
|
||||
|
||||
Routines can be invoked without the ``()`` if the call is syntatically
|
||||
Routines can be invoked without the ``()`` if the call is syntactically
|
||||
a statement. This command invocation syntax also works for
|
||||
expressions, but then only a single argument may follow. This restriction
|
||||
means ``echo f 1, f 2`` is parsed as ``echo(f(1), f(2))`` and not as
|
||||
|
||||
@@ -199,7 +199,7 @@ The rules for compile-time computability are:
|
||||
(possibly empty) list of compile-time computable arguments.
|
||||
|
||||
|
||||
Constants cannot be of type ``ptr``, ``ref``, ``var`` or ``object``, nor can
|
||||
Constants cannot be of type ``ptr``, ``ref`` or ``var``, nor can
|
||||
they contain such a type.
|
||||
|
||||
|
||||
|
||||
@@ -293,7 +293,7 @@ Limitations of the method call syntax
|
||||
|
||||
The expression ``x`` in ``x.f`` needs to be semantically checked (that means
|
||||
symbol lookup and type checking) before it can be decided that it needs to be
|
||||
rewritten to ``f(x)``. Therefore the dot syntax has some limiations when it
|
||||
rewritten to ``f(x)``. Therefore the dot syntax has some limitations when it
|
||||
is used to invoke templates/macros:
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
@@ -5,7 +5,7 @@ To enable thread support the ``--threads:on`` command line switch needs to
|
||||
be used. The ``system`` module then contains several threading primitives.
|
||||
See the `threads <threads.html>`_ and `channels <channels.html>`_ modules
|
||||
for the low level thread API. There are also high level parallelism constructs
|
||||
available. See `spawn`_ for further details.
|
||||
available. See `spawn <#parallel-spawn>`_ for further details.
|
||||
|
||||
Nim's memory model for threads is quite different than that of other common
|
||||
programming languages (C, Pascal, Java): Each thread has its own (garbage
|
||||
|
||||
@@ -7,12 +7,12 @@ There are 3 operations that are bound to a type:
|
||||
2. Destruction
|
||||
3. Deep copying for communication between threads
|
||||
|
||||
These operations can be *overriden* instead of *overloaded*. This means the
|
||||
These operations can be *overridden* instead of *overloaded*. This means the
|
||||
implementation is automatically lifted to structured types. For instance if type
|
||||
``T`` has an overriden assignment operator ``=`` this operator is also used
|
||||
``T`` has an overridden assignment operator ``=`` this operator is also used
|
||||
for assignments of the type ``seq[T]``. Since these operations are bound to a
|
||||
type they have to be bound to a nominal type for reasons of simplicity of
|
||||
implementation: This means an overriden ``deepCopy`` for ``ref T`` is really
|
||||
implementation: This means an overridden ``deepCopy`` for ``ref T`` is really
|
||||
bound to ``T`` and not to ``ref T``. This also means that one cannot override
|
||||
``deepCopy`` for both ``ptr T`` and ``ref T`` at the same time; instead a
|
||||
helper distinct or object type has to be used for one pointer type.
|
||||
|
||||
@@ -491,4 +491,4 @@ metatypes ``typed`` and ``typedesc`` are not lazy.
|
||||
Varargs matching
|
||||
----------------
|
||||
|
||||
See `Varargs`_.
|
||||
See `Varargs <#types-varargs>`_.
|
||||
|
||||
@@ -114,7 +114,8 @@ if the literal's value fits this smaller type and such a conversion is less
|
||||
expensive than other implicit conversions, so ``myInt16 + 34`` produces
|
||||
an ``int16`` result.
|
||||
|
||||
For further details, see `Convertible relation`_.
|
||||
For further details, see `Convertible relation
|
||||
<#type-relations-convertible-relation>`_.
|
||||
|
||||
|
||||
Subrange types
|
||||
@@ -550,7 +551,7 @@ order. The *names* of the fields also have to be identical.
|
||||
|
||||
The assignment operator for tuples copies each component.
|
||||
The default assignment operator for objects copies each component. Overloading
|
||||
of the assignment operator is described in `type-bound-operations-operator`_.
|
||||
of the assignment operator is described in `type-bound-operations-operator`_.
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
@@ -677,6 +678,44 @@ branch switch ``system.reset`` has to be used. Also, when the fields of a
|
||||
particular branch are specified during object construction, the correct value
|
||||
for the discriminator must be supplied at compile-time.
|
||||
|
||||
Package level objects
|
||||
---------------------
|
||||
|
||||
Every Nim module resides in a (nimble) package. An object type can be attached
|
||||
to the package it resides in. If that is done, the type can be referenced from
|
||||
other modules as an `incomplete`:idx: object type. This features allows to
|
||||
break up recursive type dependencies accross module boundaries. Incomplete
|
||||
object types are always passed ``byref`` and can only be used in pointer like
|
||||
contexts (``var/ref/ptr IncompleteObject``) in general since the compiler does
|
||||
not yet know the size of the object. To complete an incomplete object
|
||||
the ``package`` pragma has to be used. ``package`` implies ``byref``.
|
||||
|
||||
As long as a type ``T`` is incomplete ``sizeof(T)`` or "runtime type
|
||||
information" for ``T`` is not available.
|
||||
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
# module A (in an arbitrary package)
|
||||
type
|
||||
Pack.SomeObject = object ## declare as incomplete object of package 'Pack'
|
||||
Triple = object
|
||||
a, b, c: ref SomeObject ## pointers to incomplete objects are allowed
|
||||
|
||||
## Incomplete objects can be used as parameters:
|
||||
proc myproc(x: SomeObject) = discard
|
||||
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
# module B (in package "Pack")
|
||||
type
|
||||
SomeObject* {.package.} = object ## Use 'package' to complete the object
|
||||
s, t: string
|
||||
x, y: int
|
||||
|
||||
|
||||
Set type
|
||||
--------
|
||||
@@ -885,8 +924,8 @@ not compatible to ``pointer`` to prevent the following from compiling:
|
||||
Future directions:
|
||||
|
||||
* Memory regions might become available for ``string`` and ``seq`` too.
|
||||
* Builtin regions like ``private``, ``global`` and ``local`` will
|
||||
prove very useful for the upcoming OpenCL target.
|
||||
* Builtin regions like ``private``, ``global`` and ``local`` might be
|
||||
useful for an OpenCL target.
|
||||
* Builtin "regions" can model ``lent`` and ``unique`` pointers.
|
||||
* An assignment operator can be attached to a region so that proper write
|
||||
barriers can be generated. This would imply that the GC can be implemented
|
||||
|
||||
5
koch.nim
5
koch.nim
@@ -263,7 +263,7 @@ proc buildTools(latest: bool) =
|
||||
nimexec "c -o:" & nimgrepExe & " tools/nimgrep.nim"
|
||||
when defined(windows): buildVccTool()
|
||||
|
||||
nimexec "c -o:" & ("bin/nimresolve".exe) & " tools/nimresolve.nim"
|
||||
#nimexec "c -o:" & ("bin/nimresolve".exe) & " tools/nimresolve.nim"
|
||||
|
||||
buildNimble(latest)
|
||||
|
||||
@@ -472,7 +472,8 @@ proc temp(args: string) =
|
||||
# 125 is the magic number to tell git bisect to skip the current
|
||||
# commit.
|
||||
let (bootArgs, programArgs) = splitArgs(args)
|
||||
exec("nim c " & bootArgs & " compiler" / "nim", 125)
|
||||
let nimexec = findNim()
|
||||
exec(nimexec & " c " & bootArgs & " compiler" / "nim", 125)
|
||||
copyExe(output, finalDest)
|
||||
if programArgs.len > 0: exec(finalDest & " " & programArgs)
|
||||
|
||||
|
||||
@@ -129,13 +129,6 @@ const
|
||||
nnkCallKinds* = {nnkCall, nnkInfix, nnkPrefix, nnkPostfix, nnkCommand,
|
||||
nnkCallStrLit}
|
||||
|
||||
proc `[]`*(n: NimNode, i: int): NimNode {.magic: "NChild", noSideEffect.}
|
||||
## get `n`'s `i`'th child.
|
||||
|
||||
proc `[]=`*(n: NimNode, i: int, child: NimNode) {.magic: "NSetChild",
|
||||
noSideEffect.}
|
||||
## set `n`'s `i`'th child to `child`.
|
||||
|
||||
proc `!`*(s: string): NimIdent {.magic: "StrToIdent", noSideEffect.}
|
||||
## constructs an identifier from the string `s`
|
||||
|
||||
@@ -162,6 +155,20 @@ proc sameType*(a, b: NimNode): bool {.magic: "SameNodeType", noSideEffect.} =
|
||||
proc len*(n: NimNode): int {.magic: "NLen", noSideEffect.}
|
||||
## returns the number of children of `n`.
|
||||
|
||||
proc `[]`*(n: NimNode, i: int): NimNode {.magic: "NChild", noSideEffect.}
|
||||
## get `n`'s `i`'th child.
|
||||
|
||||
proc `[]`*(n: NimNode, i: BackwardsIndex): NimNode = n[n.len - i.int]
|
||||
## get `n`'s `i`'th child.
|
||||
|
||||
proc `[]=`*(n: NimNode, i: int, child: NimNode) {.magic: "NSetChild",
|
||||
noSideEffect.}
|
||||
## set `n`'s `i`'th child to `child`.
|
||||
|
||||
proc `[]=`*(n: NimNode, i: BackwardsIndex, child: NimNode) =
|
||||
## set `n`'s `i`'th child to `child`.
|
||||
n[n.len - i.int] = child
|
||||
|
||||
proc add*(father, child: NimNode): NimNode {.magic: "NAdd", discardable,
|
||||
noSideEffect, locks: 0.}
|
||||
## Adds the `child` to the `father` node. Returns the
|
||||
@@ -839,7 +846,7 @@ proc newNilLit*(): NimNode {.compileTime.} =
|
||||
## New nil literal shortcut
|
||||
result = newNimNode(nnkNilLit)
|
||||
|
||||
proc last*(node: NimNode): NimNode {.compileTime.} = node[<node.len]
|
||||
proc last*(node: NimNode): NimNode {.compileTime.} = node[node.len-1]
|
||||
## Return the last item in nodes children. Same as `node[^1]`
|
||||
|
||||
|
||||
@@ -887,7 +894,7 @@ proc newIfStmt*(branches: varargs[tuple[cond, body: NimNode]]):
|
||||
|
||||
proc copyChildrenTo*(src, dest: NimNode) {.compileTime.}=
|
||||
## Copy all children from `src` to `dest`
|
||||
for i in 0 .. < src.len:
|
||||
for i in 0 ..< src.len:
|
||||
dest.add src[i].copyNimTree
|
||||
|
||||
template expectRoutine(node: NimNode) =
|
||||
@@ -986,6 +993,11 @@ iterator items*(n: NimNode): NimNode {.inline.} =
|
||||
for i in 0 ..< n.len:
|
||||
yield n[i]
|
||||
|
||||
iterator pairs*(n: NimNode): (int, NimNode) {.inline.} =
|
||||
## Iterates over the children of the NimNode ``n`` and its indices.
|
||||
for i in 0 ..< n.len:
|
||||
yield (i, n[i])
|
||||
|
||||
iterator children*(n: NimNode): NimNode {.inline.} =
|
||||
## Iterates over the children of the NimNode ``n``.
|
||||
for i in 0 ..< n.len:
|
||||
@@ -1099,10 +1111,10 @@ proc eqIdent*(node: NimNode; s: string): bool {.compileTime.} =
|
||||
else:
|
||||
result = false
|
||||
|
||||
proc hasArgOfName* (params: NimNode; name: string): bool {.compiletime.}=
|
||||
proc hasArgOfName*(params: NimNode; name: string): bool {.compiletime.}=
|
||||
## Search nnkFormalParams for an argument.
|
||||
assert params.kind == nnkFormalParams
|
||||
for i in 1 .. <params.len:
|
||||
for i in 1 ..< params.len:
|
||||
template node: untyped = params[i]
|
||||
if name.eqIdent( $ node[0]):
|
||||
return true
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
## var
|
||||
## a: ActorPool[int, void]
|
||||
## createActorPool(a)
|
||||
## for i in 0 .. < 300:
|
||||
## for i in 0 ..< 300:
|
||||
## a.spawn(i, proc (x: int) {.thread.} = echo x)
|
||||
## a.join()
|
||||
##
|
||||
@@ -133,7 +133,7 @@ proc createActorPool*[In, Out](a: var ActorPool[In, Out], poolSize = 4) =
|
||||
newSeq(a.actors, poolSize)
|
||||
when Out isnot void:
|
||||
open(a.outputs)
|
||||
for i in 0 .. < a.actors.len:
|
||||
for i in 0 ..< a.actors.len:
|
||||
a.actors[i] = spawn(poolWorker[In, Out])
|
||||
|
||||
proc sync*[In, Out](a: var ActorPool[In, Out], polling=50) =
|
||||
@@ -164,8 +164,8 @@ proc terminate*[In, Out](a: var ActorPool[In, Out]) =
|
||||
## resources attached to `a`.
|
||||
var t: Task[In, Out]
|
||||
t.shutdown = true
|
||||
for i in 0.. <a.actors.len: send(a.actors[i].i, t)
|
||||
for i in 0.. <a.actors.len: join(a.actors[i])
|
||||
for i in 0..<a.actors.len: send(a.actors[i].i, t)
|
||||
for i in 0..<a.actors.len: join(a.actors[i])
|
||||
when Out isnot void:
|
||||
close(a.outputs)
|
||||
a.actors = nil
|
||||
@@ -227,7 +227,7 @@ when not defined(testing) and isMainModule:
|
||||
var
|
||||
a: ActorPool[int, void]
|
||||
createActorPool(a)
|
||||
for i in 0 .. < 300:
|
||||
for i in 0 ..< 300:
|
||||
a.spawn(i, proc (x: int) {.thread.} = echo x)
|
||||
|
||||
when false:
|
||||
|
||||
@@ -98,15 +98,18 @@ proc dbFormat(formatstr: SqlQuery, args: varargs[string]): string =
|
||||
var a = 0
|
||||
if args.len > 0 and not string(formatstr).contains("?"):
|
||||
dbError("""parameter substitution expects "?" """)
|
||||
for c in items(string(formatstr)):
|
||||
if c == '?':
|
||||
if args[a] == nil:
|
||||
add(result, "NULL")
|
||||
if args.len == 0:
|
||||
return string(formatstr)
|
||||
else:
|
||||
for c in items(string(formatstr)):
|
||||
if c == '?':
|
||||
if args[a] == nil:
|
||||
add(result, "NULL")
|
||||
else:
|
||||
add(result, dbQuote(args[a]))
|
||||
inc(a)
|
||||
else:
|
||||
add(result, dbQuote(args[a]))
|
||||
inc(a)
|
||||
else:
|
||||
add(result, c)
|
||||
add(result, c)
|
||||
|
||||
proc tryExec*(db: DbConn, query: SqlQuery,
|
||||
args: varargs[string, `$`]): bool {.tags: [ReadDbEffect, WriteDbEffect].} =
|
||||
|
||||
@@ -155,7 +155,7 @@ type
|
||||
## - ``"abc".match(re"(?<letter>\w)").captures["letter"] == "a"``
|
||||
## - ``"abc".match(re"(\w)\w").captures[-1] == "ab"``
|
||||
##
|
||||
## ``captureBounds[]: Option[Slice[int]]``
|
||||
## ``captureBounds[]: Option[HSlice[int, int]]``
|
||||
## gets the bounds of the given capture according to the same rules as
|
||||
## the above. If the capture is not filled, then ``None`` is returned.
|
||||
## The bounds are both inclusive.
|
||||
@@ -167,7 +167,7 @@ type
|
||||
## ``match: string``
|
||||
## the full text of the match.
|
||||
##
|
||||
## ``matchBounds: Slice[int]``
|
||||
## ``matchBounds: HSlice[int, int]``
|
||||
## the bounds of the match, as in ``captureBounds[]``
|
||||
##
|
||||
## ``(captureBounds|captures).toTable``
|
||||
@@ -182,9 +182,9 @@ type
|
||||
## Not nil.
|
||||
str*: string ## The string that was matched against.
|
||||
## Not nil.
|
||||
pcreMatchBounds: seq[Slice[cint]] ## First item is the bounds of the match
|
||||
## Other items are the captures
|
||||
## `a` is inclusive start, `b` is exclusive end
|
||||
pcreMatchBounds: seq[HSlice[cint, cint]] ## First item is the bounds of the match
|
||||
## Other items are the captures
|
||||
## `a` is inclusive start, `b` is exclusive end
|
||||
|
||||
Captures* = distinct RegexMatch
|
||||
CaptureBounds* = distinct RegexMatch
|
||||
@@ -251,13 +251,13 @@ proc captureBounds*(pattern: RegexMatch): CaptureBounds = return CaptureBounds(p
|
||||
|
||||
proc captures*(pattern: RegexMatch): Captures = return Captures(pattern)
|
||||
|
||||
proc `[]`*(pattern: CaptureBounds, i: int): Option[Slice[int]] =
|
||||
proc `[]`*(pattern: CaptureBounds, i: int): Option[HSlice[int, int]] =
|
||||
let pattern = RegexMatch(pattern)
|
||||
if pattern.pcreMatchBounds[i + 1].a != -1:
|
||||
let bounds = pattern.pcreMatchBounds[i + 1]
|
||||
return some(int(bounds.a) .. int(bounds.b-1))
|
||||
else:
|
||||
return none(Slice[int])
|
||||
return none(HSlice[int, int])
|
||||
|
||||
proc `[]`*(pattern: Captures, i: int): string =
|
||||
let pattern = RegexMatch(pattern)
|
||||
@@ -272,10 +272,10 @@ proc `[]`*(pattern: Captures, i: int): string =
|
||||
proc match*(pattern: RegexMatch): string =
|
||||
return pattern.captures[-1]
|
||||
|
||||
proc matchBounds*(pattern: RegexMatch): Slice[int] =
|
||||
proc matchBounds*(pattern: RegexMatch): HSlice[int, int] =
|
||||
return pattern.captureBounds[-1].get
|
||||
|
||||
proc `[]`*(pattern: CaptureBounds, name: string): Option[Slice[int]] =
|
||||
proc `[]`*(pattern: CaptureBounds, name: string): Option[HSlice[int, int]] =
|
||||
let pattern = RegexMatch(pattern)
|
||||
return pattern.captureBounds[pattern.pattern.captureNameToId.fget(name)]
|
||||
|
||||
@@ -295,13 +295,13 @@ proc toTable*(pattern: Captures, default: string = nil): Table[string, string] =
|
||||
result = initTable[string, string]()
|
||||
toTableImpl(nextVal == nil)
|
||||
|
||||
proc toTable*(pattern: CaptureBounds, default = none(Slice[int])):
|
||||
Table[string, Option[Slice[int]]] =
|
||||
result = initTable[string, Option[Slice[int]]]()
|
||||
proc toTable*(pattern: CaptureBounds, default = none(HSlice[int, int])):
|
||||
Table[string, Option[HSlice[int, int]]] =
|
||||
result = initTable[string, Option[HSlice[int, int]]]()
|
||||
toTableImpl(nextVal.isNone)
|
||||
|
||||
template itemsImpl(cond: untyped) {.dirty.} =
|
||||
for i in 0 .. <RegexMatch(pattern).pattern.captureCount:
|
||||
for i in 0 ..< RegexMatch(pattern).pattern.captureCount:
|
||||
let nextVal = pattern[i]
|
||||
# done in this roundabout way to avoid multiple yields (potential code
|
||||
# bloat)
|
||||
@@ -309,13 +309,13 @@ template itemsImpl(cond: untyped) {.dirty.} =
|
||||
yield nextYieldVal
|
||||
|
||||
|
||||
iterator items*(pattern: CaptureBounds, default = none(Slice[int])): Option[Slice[int]] =
|
||||
iterator items*(pattern: CaptureBounds, default = none(HSlice[int, int])): Option[HSlice[int, int]] =
|
||||
itemsImpl(nextVal.isNone)
|
||||
|
||||
iterator items*(pattern: Captures, default: string = nil): string =
|
||||
itemsImpl(nextVal == nil)
|
||||
|
||||
proc toSeq*(pattern: CaptureBounds, default = none(Slice[int])): seq[Option[Slice[int]]] =
|
||||
proc toSeq*(pattern: CaptureBounds, default = none(HSlice[int, int])): seq[Option[HSlice[int, int]]] =
|
||||
accumulateResult(pattern.items(default))
|
||||
|
||||
proc toSeq*(pattern: Captures, default: string = nil): seq[string] =
|
||||
@@ -396,8 +396,6 @@ proc extractOptions(pattern: string): tuple[pattern: string, flags: int, study:
|
||||
|
||||
# }}}
|
||||
|
||||
type UncheckedArray {.unchecked.}[T] = array[0 .. 0, T]
|
||||
|
||||
proc destroyRegex(pattern: Regex) =
|
||||
pcre.free_substring(cast[cstring](pattern.pcreObj))
|
||||
pattern.pcreObj = nil
|
||||
@@ -412,7 +410,7 @@ proc getNameToNumberTable(pattern: Regex): Table[string, int] =
|
||||
|
||||
result = initTable[string, int]()
|
||||
|
||||
for i in 0 .. <entryCount:
|
||||
for i in 0 ..< entryCount:
|
||||
let pos = i * entrySize
|
||||
let num = (int(table[pos]) shl 8) or int(table[pos + 1]) - 1
|
||||
var name = ""
|
||||
@@ -464,7 +462,7 @@ proc matchImpl(str: string, pattern: Regex, start, endpos: int, flags: int): Opt
|
||||
# 1x capture count as slack space for PCRE
|
||||
let vecsize = (pattern.captureCount() + 1) * 3
|
||||
# div 2 because each element is 2 cints long
|
||||
myResult.pcreMatchBounds = newSeq[Slice[cint]](ceil(vecsize / 2).int)
|
||||
myResult.pcreMatchBounds = newSeq[HSlice[cint, cint]](ceil(vecsize / 2).int)
|
||||
myResult.pcreMatchBounds.setLen(vecsize div 3)
|
||||
|
||||
let strlen = if endpos == int.high: str.len else: endpos+1
|
||||
|
||||
@@ -13,10 +13,6 @@
|
||||
## We had to de-deprecate this module since too much code relies on it
|
||||
## and many people prefer its API over ``nre``'s.
|
||||
##
|
||||
## **Note:** The 're' proc defaults to the **extended regular expression
|
||||
## syntax** which lets you use whitespace freely to make your regexes readable.
|
||||
## However, this means matching whitespace requires ``\s`` or something similar.
|
||||
##
|
||||
## This module is implemented by providing a wrapper around the
|
||||
## `PRCE (Perl-Compatible Regular Expressions) <http://www.pcre.org>`_
|
||||
## C library. This means that your application will depend on the PRCE
|
||||
@@ -78,7 +74,7 @@ proc finalizeRegEx(x: Regex) =
|
||||
if not isNil(x.e):
|
||||
pcre.free_substring(cast[cstring](x.e))
|
||||
|
||||
proc re*(s: string, flags = {reExtended, reStudy}): Regex =
|
||||
proc re*(s: string, flags = {reStudy}): Regex =
|
||||
## Constructor of regular expressions.
|
||||
##
|
||||
## Note that Nim's
|
||||
@@ -96,6 +92,13 @@ proc re*(s: string, flags = {reExtended, reStudy}): Regex =
|
||||
result.e = pcre.study(result.h, options, addr msg)
|
||||
if not isNil(msg): raiseInvalidRegex($msg)
|
||||
|
||||
proc rex*(s: string, flags = {reStudy, reExtended}): Regex =
|
||||
## Constructor for extended regular expressions.
|
||||
##
|
||||
## The extended means that comments starting with `#` and
|
||||
## whitespace are ignored.
|
||||
result = re(s, flags)
|
||||
|
||||
proc bufSubstr(b: cstring, sPos, ePos: int): string {.inline.} =
|
||||
## Return a Nim string built from a slice of a cstring buffer.
|
||||
## Don't assume cstring is '\0' terminated
|
||||
|
||||
@@ -63,16 +63,16 @@ proc recvLine*(sock: SecureSocket, line: var TaintedString): bool =
|
||||
setLen(line.string, 0)
|
||||
while true:
|
||||
var c: array[0..0, char]
|
||||
var n = BIO_read(sock.bio, c, c.len.cint)
|
||||
var n = BIO_read(sock.bio, addr c, c.len.cint)
|
||||
if n <= 0: return false
|
||||
if c[0] == '\r':
|
||||
n = BIO_read(sock.bio, c, c.len.cint)
|
||||
n = BIO_read(sock.bio, addr c, c.len.cint)
|
||||
if n > 0 and c[0] == '\L':
|
||||
return true
|
||||
elif n <= 0:
|
||||
return false
|
||||
elif c[0] == '\L': return true
|
||||
add(line.string, c)
|
||||
add(line.string, c[0])
|
||||
|
||||
|
||||
proc send*(sock: SecureSocket, data: string) =
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user