mirror of
https://github.com/nim-lang/Nim.git
synced 2026-01-04 12:07:51 +00:00
Merge branch 'devel' into uint-range-checks
This commit is contained in:
35
changelog.md
35
changelog.md
@@ -10,11 +10,32 @@
|
||||
to UTF-8. Use the new switch `-d:nimDontSetUtf8CodePage` to disable this
|
||||
feature.
|
||||
|
||||
- The language definition and compiler are now stricter about ``gensym``'ed
|
||||
symbols in hygienic templates. See the section in the
|
||||
[manual](https://nim-lang.org/docs/manual.html#templates-hygiene-in-templates)
|
||||
for further details. Use the compiler switch `--useVersion:0.19` for a
|
||||
transition period.
|
||||
|
||||
|
||||
### Breaking changes in the standard library
|
||||
|
||||
- We removed `unicode.Rune16` without any deprecation period as the name
|
||||
was wrong (see the [RFC](https://github.com/nim-lang/RFCs/issues/151) for details)
|
||||
and we didn't find any usages of it in the wild. If you still need it, add this
|
||||
piece of code to your project:
|
||||
|
||||
```nim
|
||||
|
||||
type
|
||||
Rune16* = distinct int16
|
||||
|
||||
```
|
||||
|
||||
|
||||
### Breaking changes in the compiler
|
||||
|
||||
- A bug allowing `int` to be implicitly converted to range types of smaller size (e.g `range[0'i8..10'i8]`) has been fixed.
|
||||
|
||||
|
||||
## Library additions
|
||||
|
||||
@@ -29,7 +50,7 @@
|
||||
|
||||
- Added `os.delEnv` and `nimscript.delEnv`. (#11466)
|
||||
|
||||
- Enable Oid usage in hashtables. (#11472)
|
||||
- Enabled Oid usage in hashtables. (#11472)
|
||||
|
||||
- Added `unsafeColumnAt` procs, that return unsafe cstring from InstantRow. (#11647)
|
||||
|
||||
@@ -42,6 +63,11 @@
|
||||
|
||||
## Language additions
|
||||
|
||||
- Inline iterators returning `lent T` types are now supported, similarly to iterators returning `var T`:
|
||||
```nim
|
||||
iterator myitems[T](x: openarray[T]): lent T
|
||||
iterator mypairs[T](x: openarray[T]): tuple[idx: int, val: lent T]
|
||||
```
|
||||
|
||||
## Language changes
|
||||
|
||||
@@ -51,6 +77,13 @@
|
||||
- The Nim compiler now does not recompile the Nim project via ``nim c -r`` if
|
||||
no dependent Nim file changed. This feature can be overridden by
|
||||
the ``--forceBuild`` command line option.
|
||||
- The Nim compiler now warns about unused module imports. You can use a
|
||||
top level ``{.used.}`` pragma in the module that you want to be importable
|
||||
without producing this warning.
|
||||
- The "testament" testing tool's name was changed
|
||||
from `tester` to `testament` and is generally available as a tool to run Nim
|
||||
tests automatically.
|
||||
|
||||
|
||||
### Compiler changes
|
||||
|
||||
|
||||
@@ -622,8 +622,6 @@ type
|
||||
mUnaryMinusI, mUnaryMinusI64, mAbsI, mNot,
|
||||
mUnaryPlusI, mBitnotI,
|
||||
mUnaryPlusF64, mUnaryMinusF64, mAbsF64,
|
||||
mToFloat, mToBiggestFloat,
|
||||
mToInt, mToBiggestInt,
|
||||
mCharToStr, mBoolToStr, mIntToStr, mInt64ToStr, mFloatToStr, mCStrToStr,
|
||||
mStrToStr, mEnumToStr,
|
||||
mAnd, mOr,
|
||||
@@ -692,8 +690,6 @@ const
|
||||
mEqRef, mEqProc, mEqUntracedRef, mLePtr, mLtPtr, mEqCString, mXor,
|
||||
mUnaryMinusI, mUnaryMinusI64, mAbsI, mNot, mUnaryPlusI, mBitnotI,
|
||||
mUnaryPlusF64, mUnaryMinusF64, mAbsF64,
|
||||
mToFloat, mToBiggestFloat,
|
||||
mToInt, mToBiggestInt,
|
||||
mCharToStr, mBoolToStr, mIntToStr, mInt64ToStr, mFloatToStr, mCStrToStr,
|
||||
mStrToStr, mEnumToStr,
|
||||
mAnd, mOr,
|
||||
@@ -903,6 +899,7 @@ type
|
||||
size*: BiggestInt # the size of the type in bytes
|
||||
# -1 means that the size is unkwown
|
||||
align*: int16 # the type's alignment requirements
|
||||
paddingAtEnd*: int16 #
|
||||
lockLevel*: TLockLevel # lock level as required for deadlock checking
|
||||
loc*: TLoc
|
||||
typeInst*: PType # for generic instantiations the tyGenericInst that led to this
|
||||
@@ -1059,7 +1056,7 @@ template `[]`*(n: Indexable, i: BackwardsIndex): Indexable = n[n.len - i.int]
|
||||
template `[]=`*(n: Indexable, i: BackwardsIndex; x: Indexable) = n[n.len - i.int] = x
|
||||
|
||||
when defined(useNodeIds):
|
||||
const nodeIdToDebug* = 2322967# 2322968
|
||||
const nodeIdToDebug* = -1 # 2322968
|
||||
var gNodeId: int
|
||||
|
||||
proc newNode*(kind: TNodeKind): PNode =
|
||||
@@ -1500,14 +1497,14 @@ proc propagateToOwner*(owner, elem: PType) =
|
||||
if tfHasAsgn in elem.flags:
|
||||
let o2 = owner.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
if o2.kind in {tyTuple, tyObject, tyArray,
|
||||
tySequence, tyOpt, tySet, tyDistinct}:
|
||||
tySequence, tyOpt, tySet, tyDistinct, tyOpenArray, tyVarargs}:
|
||||
o2.flags.incl tfHasAsgn
|
||||
owner.flags.incl tfHasAsgn
|
||||
|
||||
if tfHasOwned in elem.flags:
|
||||
let o2 = owner.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
if o2.kind in {tyTuple, tyObject, tyArray,
|
||||
tySequence, tyOpt, tySet, tyDistinct}:
|
||||
tySequence, tyOpt, tySet, tyDistinct, tyOpenArray, tyVarargs}:
|
||||
o2.flags.incl tfHasOwned
|
||||
owner.flags.incl tfHasOwned
|
||||
|
||||
|
||||
@@ -79,9 +79,19 @@ proc genBoundsCheck(p: BProc; arr, a, b: TLoc)
|
||||
proc openArrayLoc(p: BProc, n: PNode): Rope =
|
||||
var a: TLoc
|
||||
|
||||
let q = skipConv(n)
|
||||
var q = skipConv(n)
|
||||
var skipped = false
|
||||
while q.kind == nkStmtListExpr and q.len > 0:
|
||||
skipped = true
|
||||
q = q.lastSon
|
||||
if getMagic(q) == mSlice:
|
||||
# magic: pass slice to openArray:
|
||||
if skipped:
|
||||
q = skipConv(n)
|
||||
while q.kind == nkStmtListExpr and q.len > 0:
|
||||
for i in 0..q.len-2:
|
||||
genStmts(p, q[i])
|
||||
q = q.lastSon
|
||||
var b, c: TLoc
|
||||
initLocExpr(p, q[1], a)
|
||||
initLocExpr(p, q[2], b)
|
||||
@@ -90,21 +100,23 @@ proc openArrayLoc(p: BProc, n: PNode): Rope =
|
||||
if optBoundsCheck in p.options:
|
||||
genBoundsCheck(p, a, b, c)
|
||||
let ty = skipTypes(a.t, abstractVar+{tyPtr})
|
||||
let dest = getTypeDesc(p.module, n.typ.sons[0])
|
||||
case ty.kind
|
||||
of tyArray:
|
||||
let first = toInt64(firstOrd(p.config, ty))
|
||||
if first == 0:
|
||||
result = "($1)+($2), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c)]
|
||||
result = "($4*)(($1)+($2)), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c), dest]
|
||||
else:
|
||||
result = "($1)+(($2)-($4)), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c), intLiteral(first)]
|
||||
of tyOpenArray, tyVarargs, tyUncheckedArray:
|
||||
result = "($1)+($2), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c)]
|
||||
result = "($5*)($1)+(($2)-($4)), ($3)-($2)+1" %
|
||||
[rdLoc(a), rdLoc(b), rdLoc(c), intLiteral(first), dest]
|
||||
of tyOpenArray, tyVarargs, tyUncheckedArray, tyCString:
|
||||
result = "($4*)($1)+($2), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c), dest]
|
||||
of tyString, tySequence:
|
||||
if skipTypes(n.typ, abstractInst).kind == tyVar and
|
||||
not compileToCpp(p.module):
|
||||
result = "(*$1)$4+($2), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c), dataField(p)]
|
||||
result = "($5*)(*$1)$4+($2), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c), dataField(p), dest]
|
||||
else:
|
||||
result = "$1$4+($2), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c), dataField(p)]
|
||||
result = "($5*)$1$4+($2), ($3)-($2)+1" % [rdLoc(a), rdLoc(b), rdLoc(c), dataField(p), dest]
|
||||
else:
|
||||
internalError(p.config, "openArrayLoc: " & typeToString(a.t))
|
||||
else:
|
||||
@@ -184,8 +196,8 @@ template genParamLoop(params) {.dirty.} =
|
||||
if params != nil: add(params, ~", ")
|
||||
add(params, genArgNoParam(p, ri.sons[i]))
|
||||
|
||||
proc addActualPrefixForHCR(res: var Rope, module: PSym, sym: PSym) =
|
||||
if sym.flags * {sfImportc, sfNonReloadable} == {} and
|
||||
proc addActualSuffixForHCR(res: var Rope, module: PSym, sym: PSym) =
|
||||
if sym.flags * {sfImportc, sfNonReloadable} == {} and sym.loc.k == locProc and
|
||||
(sym.typ.callConv == ccInline or sym.owner.id == module.id):
|
||||
res = res & "_actual".rope
|
||||
|
||||
@@ -203,7 +215,7 @@ proc genPrefixCall(p: BProc, le, ri: PNode, d: var TLoc) =
|
||||
genParamLoop(params)
|
||||
var callee = rdLoc(op)
|
||||
if p.hcrOn and ri.sons[0].kind == nkSym:
|
||||
callee.addActualPrefixForHCR(p.module.module, ri.sons[0].sym)
|
||||
callee.addActualSuffixForHCR(p.module.module, ri.sons[0].sym)
|
||||
fixupCall(p, le, ri, d, callee, params)
|
||||
|
||||
proc genClosureCall(p: BProc, le, ri: PNode, d: var TLoc) =
|
||||
|
||||
@@ -666,14 +666,6 @@ proc unaryArith(p: BProc, e: PNode, d: var TLoc, op: TMagic) =
|
||||
of mAbsF64:
|
||||
applyFormat("($1 < 0? -($1) : ($1))")
|
||||
# BUGFIX: fabs() makes problems for Tiny C
|
||||
of mToFloat:
|
||||
applyFormat("((double) ($1))")
|
||||
of mToBiggestFloat:
|
||||
applyFormat("((double) ($1))")
|
||||
of mToInt:
|
||||
applyFormat("float64ToInt32($1)")
|
||||
of mToBiggestInt:
|
||||
applyFormat("float64ToInt64($1)")
|
||||
else:
|
||||
assert false, $op
|
||||
|
||||
@@ -2036,7 +2028,7 @@ proc genMove(p: BProc; n: PNode; d: var TLoc) =
|
||||
resetLoc(p, a)
|
||||
|
||||
proc genDestroy(p: BProc; n: PNode) =
|
||||
if optNimV2 in p.config.globalOptions:
|
||||
if p.config.selectedGC == gcDestructors:
|
||||
let arg = n[1].skipAddr
|
||||
let t = arg.typ.skipTypes(abstractInst)
|
||||
case t.kind
|
||||
@@ -2094,7 +2086,7 @@ proc genEnumToStr(p: BProc, e: PNode, d: var TLoc) =
|
||||
proc genMagicExpr(p: BProc, e: PNode, d: var TLoc, op: TMagic) =
|
||||
case op
|
||||
of mOr, mAnd: genAndOr(p, e, d, op)
|
||||
of mNot..mToBiggestInt: unaryArith(p, e, d, op)
|
||||
of mNot..mAbsF64: unaryArith(p, e, d, op)
|
||||
of mUnaryMinusI..mAbsI: unaryArithOverflow(p, e, d, op)
|
||||
of mAddF64..mDivF64: binaryFloatArith(p, e, d, op)
|
||||
of mShrI..mXor: binaryArith(p, e, d, op)
|
||||
@@ -2276,7 +2268,7 @@ proc genMagicExpr(p: BProc, e: PNode, d: var TLoc, op: TMagic) =
|
||||
of mAccessEnv: unaryExpr(p, e, d, "$1.ClE_0")
|
||||
of mSlice:
|
||||
localError(p.config, e.info, "invalid context for 'toOpenArray'; " &
|
||||
" 'toOpenArray' is only valid within a call expression")
|
||||
"'toOpenArray' is only valid within a call expression")
|
||||
else:
|
||||
when defined(debugMagics):
|
||||
echo p.prc.name.s, " ", p.prc.id, " ", p.prc.flags, " ", p.prc.ast[genericParamsPos].kind
|
||||
|
||||
@@ -199,7 +199,7 @@ proc blockLeaveActions(p: BProc, howManyTrys, howManyExcepts: int) =
|
||||
# Called by return and break stmts.
|
||||
# Deals with issues faced when jumping out of try/except/finally stmts,
|
||||
|
||||
var stack = newSeq[tuple[n: PNode, inExcept: bool]](0)
|
||||
var stack = newSeq[tuple[fin: PNode, inExcept: bool]](0)
|
||||
|
||||
for i in 1 .. howManyTrys:
|
||||
let tryStmt = p.nestedTryStmts.pop
|
||||
@@ -214,9 +214,9 @@ proc blockLeaveActions(p: BProc, howManyTrys, howManyExcepts: int) =
|
||||
|
||||
# Find finally-stmt for this try-stmt
|
||||
# and generate a copy of its sons
|
||||
var finallyStmt = lastSon(tryStmt.n)
|
||||
if finallyStmt.kind == nkFinally:
|
||||
genStmts(p, finallyStmt.sons[0])
|
||||
var finallyStmt = tryStmt.fin
|
||||
if finallyStmt != nil:
|
||||
genStmts(p, finallyStmt[0])
|
||||
|
||||
# push old elements again:
|
||||
for i in countdown(howManyTrys-1, 0):
|
||||
@@ -675,8 +675,8 @@ proc genRaiseStmt(p: BProc, t: PNode) =
|
||||
if p.nestedTryStmts.len > 0 and p.nestedTryStmts[^1].inExcept:
|
||||
# if the current try stmt have a finally block,
|
||||
# we must execute it before reraising
|
||||
var finallyBlock = p.nestedTryStmts[^1].n[^1]
|
||||
if finallyBlock.kind == nkFinally:
|
||||
let finallyBlock = p.nestedTryStmts[^1].fin
|
||||
if finallyBlock != nil:
|
||||
genSimpleBlock(p, finallyBlock[0])
|
||||
if t[0].kind != nkEmpty:
|
||||
var a: TLoc
|
||||
@@ -924,7 +924,8 @@ proc genTryCpp(p: BProc, t: PNode, d: var TLoc) =
|
||||
getTemp(p, t.typ, d)
|
||||
genLineDir(p, t)
|
||||
discard cgsym(p.module, "popCurrentExceptionEx")
|
||||
add(p.nestedTryStmts, (t, false))
|
||||
let fin = if t[^1].kind == nkFinally: t[^1] else: nil
|
||||
add(p.nestedTryStmts, (fin, false))
|
||||
startBlock(p, "try {$n")
|
||||
expr(p, t[0], d)
|
||||
endBlock(p)
|
||||
@@ -1021,8 +1022,9 @@ proc genTry(p: BProc, t: PNode, d: var TLoc) =
|
||||
else:
|
||||
linefmt(p, cpsStmts, "$1.status = setjmp($1.context);$n", [safePoint])
|
||||
startBlock(p, "if ($1.status == 0) {$n", [safePoint])
|
||||
var length = sonsLen(t)
|
||||
add(p.nestedTryStmts, (t, quirkyExceptions))
|
||||
let length = sonsLen(t)
|
||||
let fin = if t[^1].kind == nkFinally: t[^1] else: nil
|
||||
add(p.nestedTryStmts, (fin, quirkyExceptions))
|
||||
expr(p, t.sons[0], d)
|
||||
if not quirkyExceptions:
|
||||
linefmt(p, cpsStmts, "#popSafePoint();$n", [])
|
||||
@@ -1159,36 +1161,10 @@ proc genEmit(p: BProc, t: PNode) =
|
||||
genLineDir(p, t)
|
||||
line(p, cpsStmts, s)
|
||||
|
||||
proc genBreakPoint(p: BProc, t: PNode) =
|
||||
var name: string
|
||||
if optEndb in p.options:
|
||||
if t.kind == nkExprColonExpr:
|
||||
assert(t.sons[1].kind in {nkStrLit..nkTripleStrLit})
|
||||
name = normalize(t.sons[1].strVal)
|
||||
else:
|
||||
inc(p.module.g.breakPointId)
|
||||
name = "bp" & $p.module.g.breakPointId
|
||||
genLineDir(p, t) # BUGFIX
|
||||
appcg(p.module, p.module.g.breakpoints,
|
||||
"#dbgRegisterBreakpoint($1, (NCSTRING)$2, (NCSTRING)$3);$n", [
|
||||
toLinenumber(t.info), makeCString(toFilename(p.config, t.info)),
|
||||
makeCString(name)])
|
||||
|
||||
proc genWatchpoint(p: BProc, n: PNode) =
|
||||
if optEndb notin p.options: return
|
||||
var a: TLoc
|
||||
initLocExpr(p, n.sons[1], a)
|
||||
let typ = skipTypes(n.sons[1].typ, abstractVarRange)
|
||||
lineCg(p, cpsStmts, "#dbgRegisterWatchpoint($1, (NCSTRING)$2, $3);$n",
|
||||
[addrLoc(p.config, a), makeCString(renderTree(n.sons[1])),
|
||||
genTypeInfo(p.module, typ, n.info)])
|
||||
|
||||
proc genPragma(p: BProc, n: PNode) =
|
||||
for it in n.sons:
|
||||
case whichPragma(it)
|
||||
of wEmit: genEmit(p, it)
|
||||
of wBreakpoint: genBreakPoint(p, it)
|
||||
of wWatchPoint: genWatchpoint(p, it)
|
||||
of wInjectStmt:
|
||||
var p = newProc(nil, p.module)
|
||||
p.options = p.options - {optLineTrace, optStackTrace}
|
||||
|
||||
@@ -155,6 +155,7 @@ proc mapType(conf: ConfigRef; typ: PType): TCTypeKind =
|
||||
of tyNone, tyTyped: result = ctVoid
|
||||
of tyBool: result = ctBool
|
||||
of tyChar: result = ctChar
|
||||
of tyNil: result = ctPtr
|
||||
of tySet: result = mapSetType(conf, typ)
|
||||
of tyOpenArray, tyArray, tyVarargs, tyUncheckedArray: result = ctArray
|
||||
of tyObject, tyTuple: result = ctStruct
|
||||
@@ -469,7 +470,7 @@ proc genProcParams(m: BModule, t: PType, rettype, params: var Rope,
|
||||
add(params, param.loc.r)
|
||||
# declare the len field for open arrays:
|
||||
var arr = param.typ
|
||||
if arr.kind in {tyVar, tyLent}: arr = arr.lastSon
|
||||
if arr.kind in {tyVar, tyLent, tySink}: arr = arr.lastSon
|
||||
var j = 0
|
||||
while arr.kind in {tyOpenArray, tyVarargs}:
|
||||
# this fixes the 'sort' bug:
|
||||
@@ -477,7 +478,7 @@ proc genProcParams(m: BModule, t: PType, rettype, params: var Rope,
|
||||
# need to pass hidden parameter:
|
||||
addf(params, ", NI $1Len_$2", [param.loc.r, j.rope])
|
||||
inc(j)
|
||||
arr = arr.sons[0]
|
||||
arr = arr.sons[0].skipTypes({tySink})
|
||||
if t.sons[0] != nil and isInvalidReturnType(m.config, t.sons[0]):
|
||||
var arr = t.sons[0]
|
||||
if params != nil: add(params, ", ")
|
||||
@@ -956,7 +957,7 @@ proc genProcHeader(m: BModule, prc: PSym, asPtr: bool = false): Rope =
|
||||
fillLoc(prc.loc, locProc, prc.ast[namePos], mangleName(m, prc), OnUnknown)
|
||||
genProcParams(m, prc.typ, rettype, params, check)
|
||||
# handle the 2 options for hotcodereloading codegen - function pointer
|
||||
# (instead of forward declaration) or header for function budy with "_actual" postfix
|
||||
# (instead of forward declaration) or header for function body with "_actual" postfix
|
||||
let asPtrStr = rope(if asPtr: "_PTR" else: "")
|
||||
var name = prc.loc.r
|
||||
if isReloadable(m, prc) and not asPtr:
|
||||
|
||||
@@ -264,13 +264,7 @@ proc genLineDir(p: BProc, t: PNode) =
|
||||
if optEmbedOrigSrc in p.config.globalOptions:
|
||||
add(p.s(cpsStmts), ~"//" & sourceLine(p.config, t.info) & "\L")
|
||||
genCLineDir(p.s(cpsStmts), toFullPath(p.config, t.info), line, p.config)
|
||||
if ({optStackTrace, optEndb} * p.options == {optStackTrace, optEndb}) and
|
||||
(p.prc == nil or sfPure notin p.prc.flags):
|
||||
if freshLineInfo(p, t.info):
|
||||
linefmt(p, cpsStmts, "#endb($1, $2);$N",
|
||||
[line, makeCString(toFilename(p.config, t.info))])
|
||||
elif ({optLineTrace, optStackTrace} * p.options ==
|
||||
{optLineTrace, optStackTrace}) and
|
||||
if ({optLineTrace, optStackTrace} * p.options == {optLineTrace, optStackTrace}) and
|
||||
(p.prc == nil or sfPure notin p.prc.flags) and t.info.fileIndex != InvalidFileIdx:
|
||||
if freshLineInfo(p, t.info):
|
||||
linefmt(p, cpsStmts, "nimln_($1, $2);$n",
|
||||
@@ -479,19 +473,6 @@ proc deinitGCFrame(p: BProc): Rope =
|
||||
result = ropecg(p.module,
|
||||
"if (((NU)&GCFRAME_) < 4096) #nimGCFrame(&GCFRAME_);$n", [])
|
||||
|
||||
proc localDebugInfo(p: BProc, s: PSym, retType: PType) =
|
||||
if {optStackTrace, optEndb} * p.options != {optStackTrace, optEndb}: return
|
||||
# XXX work around a bug: No type information for open arrays possible:
|
||||
if skipTypes(s.typ, abstractVar).kind in {tyOpenArray, tyVarargs}: return
|
||||
var a = "&" & s.loc.r
|
||||
if s.kind == skParam and ccgIntroducedPtr(p.config, s, retType): a = s.loc.r
|
||||
lineF(p, cpsInit,
|
||||
"FR_.s[$1].address = (void*)$3; FR_.s[$1].typ = $4; FR_.s[$1].name = $2;$n",
|
||||
[p.maxFrameLen.rope, makeCString(normalize(s.name.s)), a,
|
||||
genTypeInfo(p.module, s.loc.t, s.info)])
|
||||
inc(p.maxFrameLen)
|
||||
inc p.blocks[p.blocks.len-1].frameLen
|
||||
|
||||
proc localVarDecl(p: BProc; n: PNode): Rope =
|
||||
let s = n.sym
|
||||
if s.loc.k == locNone:
|
||||
@@ -515,7 +496,6 @@ proc assignLocalVar(p: BProc, n: PNode) =
|
||||
let nl = if optLineDir in p.config.options: "" else: "\L"
|
||||
let decl = localVarDecl(p, n) & ";" & nl
|
||||
line(p, cpsLocals, decl)
|
||||
localDebugInfo(p, n.sym, nil)
|
||||
|
||||
include ccgthreadvars
|
||||
|
||||
@@ -562,17 +542,10 @@ proc assignGlobalVar(p: BProc, n: PNode) =
|
||||
if p.withinLoop > 0:
|
||||
# fixes tests/run/tzeroarray:
|
||||
resetLoc(p, s.loc)
|
||||
if p.module.module.options * {optStackTrace, optEndb} ==
|
||||
{optStackTrace, optEndb}:
|
||||
appcg(p.module, p.module.s[cfsDebugInit],
|
||||
"#dbgRegisterGlobal($1, &$2, $3);$n",
|
||||
[makeCString(normalize(s.owner.name.s & '.' & s.name.s)),
|
||||
s.loc.r, genTypeInfo(p.module, s.typ, n.info)])
|
||||
|
||||
proc assignParam(p: BProc, s: PSym, retType: PType) =
|
||||
assert(s.loc.r != nil)
|
||||
scopeMangledParam(p, s)
|
||||
localDebugInfo(p, s, retType)
|
||||
|
||||
proc fillProcLoc(m: BModule; n: PNode) =
|
||||
let sym = n.sym
|
||||
@@ -689,7 +662,7 @@ proc loadDynamicLib(m: BModule, lib: PLib) =
|
||||
[loadlib, genStringLiteral(m, lib.path)])
|
||||
else:
|
||||
var p = newProc(nil, m)
|
||||
p.options = p.options - {optStackTrace, optEndb}
|
||||
p.options = p.options - {optStackTrace}
|
||||
var dest: TLoc
|
||||
initLoc(dest, locTemp, lib.path, OnStack)
|
||||
dest.r = getTempName(m)
|
||||
@@ -784,7 +757,7 @@ proc cgsym(m: BModule, name: string): Rope =
|
||||
rawMessage(m.config, errGenerated, "system module needs: " & name)
|
||||
result = sym.loc.r
|
||||
if m.hcrOn and sym != nil and sym.kind in {skProc..skIterator}:
|
||||
result.addActualPrefixForHCR(m.module, sym)
|
||||
result.addActualSuffixForHCR(m.module, sym)
|
||||
|
||||
proc generateHeaders(m: BModule) =
|
||||
add(m.s[cfsHeaders], "\L#include \"nimbase.h\"\L")
|
||||
@@ -1033,6 +1006,11 @@ proc genProcAux(m: BModule, prc: PSym) =
|
||||
generatedProc = ropecg(p.module, "$N$1 {$n$2$3$4}$N$N",
|
||||
[header, p.s(cpsLocals), p.s(cpsInit), p.s(cpsStmts)])
|
||||
else:
|
||||
if m.hcrOn and isReloadable(m, prc):
|
||||
# Add forward declaration for "_actual"-suffixed functions defined in the same module (or inline).
|
||||
# This fixes the use of methods and also the case when 2 functions within the same module
|
||||
# call each other using directly the "_actual" versions (an optimization) - see issue #11608
|
||||
addf(m.s[cfsProcHeaders], "$1;\n", [header])
|
||||
generatedProc = ropecg(p.module, "$N$1 {$N", [header])
|
||||
add(generatedProc, initGCFrame(p))
|
||||
if optStackTrace in prc.options:
|
||||
@@ -1313,7 +1291,6 @@ proc genMainProc(m: BModule) =
|
||||
PreMainBody = "$N" &
|
||||
"void PreMainInner(void) {$N" &
|
||||
"$2" &
|
||||
"$3" &
|
||||
"}$N$N" &
|
||||
PosixCmdLine &
|
||||
"void PreMain(void) {$N" &
|
||||
@@ -1403,17 +1380,11 @@ proc genMainProc(m: BModule) =
|
||||
elif m.config.target.targetOS == osGenode:
|
||||
m.includeHeader("<libc/component.h>")
|
||||
|
||||
if optEndb in m.config.options:
|
||||
for i in 0..<m.config.m.fileInfos.len:
|
||||
m.g.breakpoints.addf("dbgRegisterFilename($1);$N",
|
||||
[m.config.m.fileInfos[i].projPath.string.makeCString])
|
||||
|
||||
let initStackBottomCall =
|
||||
if m.config.target.targetOS == osStandalone or m.config.selectedGC == gcNone: "".rope
|
||||
else: ropecg(m, "\t#initStackBottomWith((void *)&inner);$N", [])
|
||||
inc(m.labels)
|
||||
appcg(m, m.s[cfsProcs], PreMainBody, [
|
||||
m.g.mainDatInit, m.g.breakpoints, m.g.otherModsInit])
|
||||
appcg(m, m.s[cfsProcs], PreMainBody, [m.g.mainDatInit, m.g.otherModsInit])
|
||||
|
||||
if m.config.target.targetOS == osWindows and
|
||||
m.config.globalOptions * {optGenGuiApp, optGenDynLib} != {}:
|
||||
@@ -2019,11 +1990,6 @@ proc myClose(graph: ModuleGraph; b: PPassContext, n: PNode): PNode =
|
||||
if emulatedThreadVars(m.config) and m.config.target.targetOS != osStandalone:
|
||||
discard cgsym(m, "initThreadVarsEmulation")
|
||||
|
||||
if m.g.breakpoints != nil:
|
||||
discard cgsym(m, "dbgRegisterBreakpoint")
|
||||
if optEndb in m.config.options:
|
||||
discard cgsym(m, "dbgRegisterFilename")
|
||||
|
||||
if m.g.forwardedProcs.len == 0:
|
||||
incl m.flags, objHasKidsValid
|
||||
let disp = generateMethodDispatchers(graph)
|
||||
|
||||
@@ -73,7 +73,7 @@ type
|
||||
noSafePoints*: bool # the proc doesn't use safe points in exception handling
|
||||
lastLineInfo*: TLineInfo # to avoid generating excessive 'nimln' statements
|
||||
currLineInfo*: TLineInfo # AST codegen will make this superfluous
|
||||
nestedTryStmts*: seq[tuple[n: PNode, inExcept: bool]]
|
||||
nestedTryStmts*: seq[tuple[fin: PNode, inExcept: bool]]
|
||||
# in how many nested try statements we are
|
||||
# (the vars must be volatile then)
|
||||
# bool is true when are in the except part of a try block
|
||||
@@ -117,8 +117,6 @@ type
|
||||
modulesClosed*: seq[BModule] # list of the same compiled modules, but in the order they were closed
|
||||
forwardedProcs*: seq[PSym] # proc:s that did not yet have a body
|
||||
generatedHeader*: BModule
|
||||
breakPointId*: int
|
||||
breakpoints*: Rope # later the breakpoints are inserted into the main proc
|
||||
typeInfoMarker*: TypeCacheWithOwner
|
||||
config*: ConfigRef
|
||||
graph*: ModuleGraph
|
||||
|
||||
@@ -266,7 +266,7 @@ proc testCompileOption*(conf: ConfigRef; switch: string, info: TLineInfo): bool
|
||||
of "threadanalysis": result = contains(conf.globalOptions, optThreadAnalysis)
|
||||
of "stacktrace": result = contains(conf.options, optStackTrace)
|
||||
of "linetrace": result = contains(conf.options, optLineTrace)
|
||||
of "debugger": result = contains(conf.options, optEndb)
|
||||
of "debugger": result = contains(conf.globalOptions, optCDebug)
|
||||
of "profiler": result = contains(conf.options, optProfiler)
|
||||
of "memtracker": result = contains(conf.options, optMemTracker)
|
||||
of "checks", "x": result = conf.options * ChecksOptions == ChecksOptions
|
||||
@@ -437,30 +437,31 @@ proc processSwitch*(switch, arg: string, pass: TCmdLinePass, info: TLineInfo;
|
||||
processOnOffSwitchG(conf, {optWholeProject}, arg, pass, info)
|
||||
of "gc":
|
||||
expectArg(conf, switch, arg, pass, info)
|
||||
case arg.normalize
|
||||
of "boehm":
|
||||
conf.selectedGC = gcBoehm
|
||||
defineSymbol(conf.symbols, "boehmgc")
|
||||
of "refc":
|
||||
conf.selectedGC = gcRefc
|
||||
of "v2":
|
||||
message(conf, info, warnDeprecated, "--gc:v2 is deprecated; using default gc")
|
||||
of "markandsweep":
|
||||
conf.selectedGC = gcMarkAndSweep
|
||||
defineSymbol(conf.symbols, "gcmarkandsweep")
|
||||
of "destructors":
|
||||
conf.selectedGC = gcDestructors
|
||||
defineSymbol(conf.symbols, "gcdestructors")
|
||||
of "go":
|
||||
conf.selectedGC = gcGo
|
||||
defineSymbol(conf.symbols, "gogc")
|
||||
of "none":
|
||||
conf.selectedGC = gcNone
|
||||
defineSymbol(conf.symbols, "nogc")
|
||||
of "stack", "regions":
|
||||
conf.selectedGC= gcRegions
|
||||
defineSymbol(conf.symbols, "gcregions")
|
||||
else: localError(conf, info, errNoneBoehmRefcExpectedButXFound % arg)
|
||||
if pass in {passCmd2, passPP}:
|
||||
case arg.normalize
|
||||
of "boehm":
|
||||
conf.selectedGC = gcBoehm
|
||||
defineSymbol(conf.symbols, "boehmgc")
|
||||
of "refc":
|
||||
conf.selectedGC = gcRefc
|
||||
of "v2":
|
||||
message(conf, info, warnDeprecated, "--gc:v2 is deprecated; using default gc")
|
||||
of "markandsweep":
|
||||
conf.selectedGC = gcMarkAndSweep
|
||||
defineSymbol(conf.symbols, "gcmarkandsweep")
|
||||
of "destructors":
|
||||
conf.selectedGC = gcDestructors
|
||||
defineSymbol(conf.symbols, "gcdestructors")
|
||||
of "go":
|
||||
conf.selectedGC = gcGo
|
||||
defineSymbol(conf.symbols, "gogc")
|
||||
of "none":
|
||||
conf.selectedGC = gcNone
|
||||
defineSymbol(conf.symbols, "nogc")
|
||||
of "stack", "regions":
|
||||
conf.selectedGC= gcRegions
|
||||
defineSymbol(conf.symbols, "gcregions")
|
||||
else: localError(conf, info, errNoneBoehmRefcExpectedButXFound % arg)
|
||||
of "warnings", "w":
|
||||
if processOnOffSwitchOrList(conf, {optWarns}, arg, pass, info): listWarnings(conf)
|
||||
of "warning": processSpecificNote(arg, wWarning, pass, info, switch, conf)
|
||||
@@ -473,24 +474,18 @@ proc processSwitch*(switch, arg: string, pass: TCmdLinePass, info: TLineInfo;
|
||||
of "linetrace": processOnOffSwitch(conf, {optLineTrace}, arg, pass, info)
|
||||
of "debugger":
|
||||
case arg.normalize
|
||||
of "on", "endb":
|
||||
conf.options.incl optEndb
|
||||
defineSymbol(conf.symbols, "endb")
|
||||
of "off":
|
||||
conf.options.excl optEndb
|
||||
undefSymbol(conf.symbols, "endb")
|
||||
of "native", "gdb":
|
||||
incl(conf.globalOptions, optCDebug)
|
||||
conf.options = conf.options + {optLineDir} - {optEndb}
|
||||
of "on", "native", "gdb":
|
||||
conf.globalOptions.incl optCDebug
|
||||
conf.options.incl optLineDir
|
||||
#defineSymbol(conf.symbols, "nimTypeNames") # type names are used in gdb pretty printing
|
||||
undefSymbol(conf.symbols, "endb")
|
||||
of "off":
|
||||
conf.globalOptions.excl optCDebug
|
||||
else:
|
||||
localError(conf, info, "expected endb|gdb but found " & arg)
|
||||
localError(conf, info, "expected native|gdb|on|off but found " & arg)
|
||||
of "g": # alias for --debugger:native
|
||||
incl(conf.globalOptions, optCDebug)
|
||||
conf.options = conf.options + {optLineDir} - {optEndb}
|
||||
conf.globalOptions.incl optCDebug
|
||||
conf.options.incl optLineDir
|
||||
#defineSymbol(conf.symbols, "nimTypeNames") # type names are used in gdb pretty printing
|
||||
undefSymbol(conf.symbols, "endb")
|
||||
of "profiler":
|
||||
processOnOffSwitch(conf, {optProfiler}, arg, pass, info)
|
||||
if optProfiler in conf.options: defineSymbol(conf.symbols, "profiler")
|
||||
@@ -788,6 +783,15 @@ proc processSwitch*(switch, arg: string, pass: TCmdLinePass, info: TLineInfo;
|
||||
of "expandmacro":
|
||||
expectArg(conf, switch, arg, pass, info)
|
||||
conf.macrosToExpand[arg] = "T"
|
||||
of "useversion":
|
||||
expectArg(conf, switch, arg, pass, info)
|
||||
case arg
|
||||
of "0.19":
|
||||
conf.globalOptions.incl optNimV019
|
||||
of "1.0":
|
||||
discard "the default"
|
||||
else:
|
||||
localError(conf, info, "unknown Nim version; currently supported values are: {0.19, 1.0}")
|
||||
of "":
|
||||
conf.projectName = "-"
|
||||
else:
|
||||
|
||||
@@ -97,3 +97,5 @@ proc initDefines*(symbols: StringTableRef) =
|
||||
|
||||
defineSymbol("nimFixedOwned")
|
||||
defineSymbol("nimHasStyleChecks")
|
||||
defineSymbol("nimToOpenArrayCString")
|
||||
defineSymbol("nimHasUsed")
|
||||
|
||||
@@ -607,14 +607,9 @@ proc aliases(obj, field: PNode): bool =
|
||||
if sameTrees(obj, n): return true
|
||||
case n.kind
|
||||
of nkDotExpr, nkCheckedFieldExpr, nkHiddenSubConv, nkHiddenStdConv,
|
||||
nkObjDownConv, nkObjUpConv, nkHiddenDeref, nkDerefExpr:
|
||||
nkObjDownConv, nkObjUpConv, nkHiddenAddr, nkAddr, nkBracketExpr,
|
||||
nkHiddenDeref, nkDerefExpr:
|
||||
n = n[0]
|
||||
of nkBracketExpr:
|
||||
let x = n[0]
|
||||
if x.typ != nil and x.typ.skipTypes(abstractInst).kind == tyTuple:
|
||||
n = x
|
||||
else:
|
||||
break
|
||||
else:
|
||||
break
|
||||
return false
|
||||
@@ -652,7 +647,7 @@ proc isAnalysableFieldAccess*(orig: PNode; owner: PSym): bool =
|
||||
while true:
|
||||
case n.kind
|
||||
of nkDotExpr, nkCheckedFieldExpr, nkHiddenSubConv, nkHiddenStdConv,
|
||||
nkObjDownConv, nkObjUpConv:
|
||||
nkObjDownConv, nkObjUpConv, nkHiddenAddr, nkAddr, nkBracketExpr:
|
||||
n = n[0]
|
||||
of nkHiddenDeref, nkDerefExpr:
|
||||
# We "own" sinkparam[].loc but not ourVar[].location as it is a nasty
|
||||
@@ -660,12 +655,6 @@ proc isAnalysableFieldAccess*(orig: PNode; owner: PSym): bool =
|
||||
n = n[0]
|
||||
return n.kind == nkSym and n.sym.owner == owner and (isSinkParam(n.sym) or
|
||||
n.sym.typ.skipTypes(abstractInst-{tyOwned}).kind in {tyOwned})
|
||||
of nkBracketExpr:
|
||||
let x = n[0]
|
||||
if x.typ != nil and x.typ.skipTypes(abstractInst).kind == tyTuple:
|
||||
n = x
|
||||
else:
|
||||
break
|
||||
else:
|
||||
break
|
||||
# XXX Allow closure deref operations here if we know
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
## Template evaluation engine. Now hygienic.
|
||||
|
||||
import
|
||||
strutils, options, ast, astalgo, msgs, renderer, lineinfos
|
||||
strutils, options, ast, astalgo, msgs, renderer, lineinfos, idents
|
||||
|
||||
type
|
||||
TemplCtx = object
|
||||
@@ -20,6 +20,7 @@ type
|
||||
mapping: TIdTable # every gensym'ed symbol needs to be mapped to some
|
||||
# new symbol
|
||||
config: ConfigRef
|
||||
ic: IdentCache
|
||||
|
||||
proc copyNode(ctx: TemplCtx, a, b: PNode): PNode =
|
||||
result = copyNode(a)
|
||||
@@ -52,7 +53,11 @@ proc evalTemplateAux(templ, actual: PNode, c: var TemplCtx, result: PNode) =
|
||||
#if x.kind == skParam and x.owner.kind == skModule:
|
||||
# internalAssert c.config, false
|
||||
idTablePut(c.mapping, s, x)
|
||||
result.add newSymNode(x, if c.instLines: actual.info else: templ.info)
|
||||
if sfGenSym in s.flags and optNimV019 notin c.config.globalOptions:
|
||||
result.add newIdentNode(getIdent(c.ic, x.name.s & "`gensym" & $x.id),
|
||||
if c.instLines: actual.info else: templ.info)
|
||||
else:
|
||||
result.add newSymNode(x, if c.instLines: actual.info else: templ.info)
|
||||
else:
|
||||
result.add copyNode(c, templ, actual)
|
||||
of nkNone..nkIdent, nkType..nkNilLit: # atom
|
||||
@@ -160,7 +165,9 @@ proc wrapInComesFrom*(info: TLineInfo; sym: PSym; res: PNode): PNode =
|
||||
result.typ = res.typ
|
||||
|
||||
proc evalTemplate*(n: PNode, tmpl, genSymOwner: PSym;
|
||||
conf: ConfigRef; fromHlo=false): PNode =
|
||||
conf: ConfigRef;
|
||||
ic: IdentCache;
|
||||
fromHlo=false): PNode =
|
||||
inc(conf.evalTemplateCounter)
|
||||
if conf.evalTemplateCounter > evalTemplateLimit:
|
||||
globalError(conf, n.info, errTemplateInstantiationTooNested)
|
||||
@@ -172,6 +179,7 @@ proc evalTemplate*(n: PNode, tmpl, genSymOwner: PSym;
|
||||
ctx.owner = tmpl
|
||||
ctx.genSymOwner = genSymOwner
|
||||
ctx.config = conf
|
||||
ctx.ic = ic
|
||||
initIdTable(ctx.mapping)
|
||||
|
||||
let body = tmpl.getBody
|
||||
|
||||
@@ -138,9 +138,6 @@ import
|
||||
strutils, options, dfa, lowerings, tables, modulegraphs, msgs,
|
||||
lineinfos, parampatterns, sighashes
|
||||
|
||||
const
|
||||
InterestingSyms = {skVar, skResult, skLet, skForVar, skTemp}
|
||||
|
||||
type
|
||||
Con = object
|
||||
owner: PSym
|
||||
@@ -217,43 +214,6 @@ proc isLastRead(n: PNode; c: var Con): bool =
|
||||
dbg:
|
||||
echo "ugh ", c.otherRead.isNil, " ", result
|
||||
|
||||
when false:
|
||||
let s = n.sym
|
||||
var pcs: seq[int] = @[instr+1]
|
||||
var takenGotos: IntSet
|
||||
var takenForks = initIntSet()
|
||||
while pcs.len > 0:
|
||||
var pc = pcs.pop
|
||||
|
||||
takenGotos = initIntSet()
|
||||
while pc < c.g.len:
|
||||
case c.g[pc].kind
|
||||
of def:
|
||||
if c.g[pc].sym == s:
|
||||
# the path lead to a redefinition of 's' --> abandon it.
|
||||
break
|
||||
inc pc
|
||||
of use:
|
||||
if c.g[pc].sym == s:
|
||||
c.otherRead = c.g[pc].n
|
||||
return false
|
||||
inc pc
|
||||
of goto:
|
||||
# we must leave endless loops eventually:
|
||||
if not takenGotos.containsOrIncl(pc):
|
||||
pc = pc + c.g[pc].dest
|
||||
else:
|
||||
inc pc
|
||||
of fork:
|
||||
# we follow the next instruction but push the dest onto our "work" stack:
|
||||
if not takenForks.containsOrIncl(pc):
|
||||
pcs.add pc + c.g[pc].dest
|
||||
inc pc
|
||||
of InstrKind.join:
|
||||
inc pc
|
||||
#echo c.graph.config $ n.info, " last read here!"
|
||||
return true
|
||||
|
||||
proc initialized(code: ControlFlowGraph; pc: int,
|
||||
init, uninit: var IntSet; comesFrom: int): int =
|
||||
## Computes the set of definitely initialized variables accross all code paths
|
||||
@@ -290,9 +250,6 @@ proc initialized(code: ControlFlowGraph; pc: int,
|
||||
inc pc
|
||||
return pc
|
||||
|
||||
template interestingSym(s: PSym): bool =
|
||||
s.owner == c.owner and s.kind in InterestingSyms and hasDestructor(s.typ)
|
||||
|
||||
template isUnpackedTuple(s: PSym): bool =
|
||||
## we move out all elements of unpacked tuples,
|
||||
## hence unpacked tuples themselves don't need to be destroyed
|
||||
@@ -353,8 +310,8 @@ proc canBeMoved(t: PType): bool {.inline.} =
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
result = t.kind != tyRef and t.attachedOps[attachedSink] != nil
|
||||
|
||||
proc genSink(c: Con; t: PType; dest, ri: PNode): PNode =
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
proc genSink(c: Con; dest, ri: PNode): PNode =
|
||||
let t = dest.typ.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
let k = if t.attachedOps[attachedSink] != nil: attachedSink
|
||||
else: attachedAsgn
|
||||
if t.attachedOps[k] != nil:
|
||||
@@ -365,20 +322,20 @@ proc genSink(c: Con; t: PType; dest, ri: PNode): PNode =
|
||||
# we generate a fast assignment in this case:
|
||||
result = newTree(nkFastAsgn, dest)
|
||||
|
||||
proc genCopy(c: var Con; t: PType; dest, ri: PNode): PNode =
|
||||
proc genCopyNoCheck(c: Con; dest, ri: PNode): PNode =
|
||||
let t = dest.typ.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
result = genOp(c, t, attachedAsgn, dest, ri)
|
||||
|
||||
proc genCopy(c: var Con; dest, ri: PNode): PNode =
|
||||
let t = dest.typ
|
||||
if tfHasOwned in t.flags:
|
||||
# try to improve the error message here:
|
||||
if c.otherRead == nil: discard isLastRead(ri, c)
|
||||
checkForErrorPragma(c, t, ri, "=")
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
result = genOp(c, t, attachedAsgn, dest, ri)
|
||||
genCopyNoCheck(c, dest, ri)
|
||||
|
||||
proc genCopyNoCheck(c: Con; t: PType; dest, ri: PNode): PNode =
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
result = genOp(c, t, attachedAsgn, dest, ri)
|
||||
|
||||
proc genDestroy(c: Con; t: PType; dest: PNode): PNode =
|
||||
let t = t.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
proc genDestroy(c: Con; dest: PNode): PNode =
|
||||
let t = dest.typ.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
result = genOp(c, t, attachedDestructor, dest, nil)
|
||||
|
||||
proc addTopVar(c: var Con; v: PNode) =
|
||||
@@ -390,20 +347,10 @@ proc getTemp(c: var Con; typ: PType; info: TLineInfo): PNode =
|
||||
result = newSymNode(sym)
|
||||
c.addTopVar(result)
|
||||
|
||||
proc p(n: PNode; c: var Con): PNode
|
||||
|
||||
template recurse(n, dest) =
|
||||
for i in 0..<n.len:
|
||||
dest.add p(n[i], c)
|
||||
|
||||
proc genMagicCall(n: PNode; c: var Con; magicname: string; m: TMagic): PNode =
|
||||
result = newNodeI(nkCall, n.info)
|
||||
result.add(newSymNode(createMagic(c.graph, magicname, m)))
|
||||
result.add n
|
||||
|
||||
proc genWasMoved(n: PNode; c: var Con): PNode =
|
||||
# The mWasMoved builtin does not take the address.
|
||||
result = genMagicCall(n, c, "wasMoved", mWasMoved)
|
||||
result = newNodeI(nkCall, n.info)
|
||||
result.add(newSymNode(createMagic(c.graph, "wasMoved", mWasMoved)))
|
||||
result.add n #mWasMoved does not take the address
|
||||
|
||||
proc genDefaultCall(t: PType; c: Con; info: TLineInfo): PNode =
|
||||
result = newNodeI(nkCall, info)
|
||||
@@ -422,9 +369,9 @@ proc destructiveMoveVar(n: PNode; c: var Con): PNode =
|
||||
let tempAsNode = newSymNode(temp)
|
||||
|
||||
var vpart = newNodeI(nkIdentDefs, tempAsNode.info, 3)
|
||||
vpart.sons[0] = tempAsNode
|
||||
vpart.sons[1] = c.emptyNode
|
||||
vpart.sons[2] = n
|
||||
vpart[0] = tempAsNode
|
||||
vpart[1] = c.emptyNode
|
||||
vpart[2] = n
|
||||
add(v, vpart)
|
||||
|
||||
result.add v
|
||||
@@ -437,27 +384,9 @@ proc sinkParamIsLastReadCheck(c: var Con, s: PNode) =
|
||||
localError(c.graph.config, c.otherRead.info, "sink parameter `" & $s.sym.name.s &
|
||||
"` is already consumed at " & toFileLineCol(c. graph.config, s.info))
|
||||
|
||||
proc passCopyToSink(n: PNode; c: var Con): PNode =
|
||||
result = newNodeIT(nkStmtListExpr, n.info, n.typ)
|
||||
let tmp = getTemp(c, n.typ, n.info)
|
||||
# XXX This is only required if we are in a loop. Since we move temporaries
|
||||
# out of loops we need to mark it as 'wasMoved'.
|
||||
result.add genWasMoved(tmp, c)
|
||||
if hasDestructor(n.typ):
|
||||
var m = genCopy(c, n.typ, tmp, n)
|
||||
m.add p(n, c)
|
||||
result.add m
|
||||
if isLValue(n):
|
||||
message(c.graph.config, n.info, hintPerformance,
|
||||
("passing '$1' to a sink parameter introduces an implicit copy; " &
|
||||
"use 'move($1)' to prevent it") % $n)
|
||||
else:
|
||||
result.add newTree(nkAsgn, tmp, p(n, c))
|
||||
result.add tmp
|
||||
|
||||
proc isDangerousSeq(t: PType): bool {.inline.} =
|
||||
let t = t.skipTypes(abstractInst)
|
||||
result = t.kind == tySequence and tfHasOwned notin t.sons[0].flags
|
||||
result = t.kind == tySequence and tfHasOwned notin t[0].flags
|
||||
|
||||
proc containsConstSeq(n: PNode): bool =
|
||||
if n.kind == nkBracket and n.len > 0 and n.typ != nil and isDangerousSeq(n.typ):
|
||||
@@ -467,19 +396,151 @@ proc containsConstSeq(n: PNode): bool =
|
||||
of nkExprEqExpr, nkExprColonExpr, nkHiddenStdConv, nkHiddenSubConv:
|
||||
result = containsConstSeq(n[1])
|
||||
of nkObjConstr, nkClosure:
|
||||
for i in 1 ..< n.len:
|
||||
for i in 1..<n.len:
|
||||
if containsConstSeq(n[i]): return true
|
||||
of nkCurly, nkBracket, nkPar, nkTupleConstr:
|
||||
for i in 0 ..< n.len:
|
||||
if containsConstSeq(n[i]): return true
|
||||
for son in n:
|
||||
if containsConstSeq(son): return true
|
||||
else: discard
|
||||
|
||||
proc pArg(arg: PNode; c: var Con; isSink: bool): PNode =
|
||||
template pArgIfTyped(argPart: PNode): PNode =
|
||||
# typ is nil if we are in if/case expr branch with noreturn
|
||||
if argPart.typ == nil: p(argPart, c)
|
||||
else: pArg(argPart, c, isSink)
|
||||
proc pExpr(n: PNode; c: var Con): PNode
|
||||
proc pArg(arg: PNode; c: var Con; isSink: bool): PNode
|
||||
proc pStmt(n: PNode; c: var Con): PNode
|
||||
proc moveOrCopy(dest, ri: PNode; c: var Con): PNode
|
||||
|
||||
template isExpression(n: PNode): bool =
|
||||
(not isEmptyType(n.typ)) or (n.kind in nkLiterals + {nkNilLit, nkRange})
|
||||
|
||||
proc recurse(n: PNode, c: var Con, processProc: proc): PNode =
|
||||
if n.sons.len == 0: return n
|
||||
case n.kind:
|
||||
of nkIfStmt, nkIfExpr:
|
||||
result = copyNode(n)
|
||||
for son in n:
|
||||
var branch = copyNode(son)
|
||||
if son.kind in {nkElifBranch, nkElifExpr}:
|
||||
if son[0].kind == nkBreakState:
|
||||
var copy = copyNode(son[0])
|
||||
copy.add pExpr(son[0][0], c)
|
||||
branch.add copy
|
||||
else:
|
||||
branch.add pExpr(son[0], c) #The condition
|
||||
branch.add processProc(son[1], c)
|
||||
else:
|
||||
branch.add processProc(son[0], c)
|
||||
result.add branch
|
||||
of nkWhen:
|
||||
# This should be a "when nimvm" node.
|
||||
result = copyTree(n)
|
||||
result[1][0] = processProc(result[1][0], c)
|
||||
of nkStmtList, nkStmtListExpr, nkTryStmt, nkFinally, nkPragmaBlock:
|
||||
result = copyNode(n)
|
||||
for i in 0..<n.len-1:
|
||||
result.add pStmt(n[i], c)
|
||||
result.add processProc(n[^1], c)
|
||||
of nkBlockStmt, nkBlockExpr:
|
||||
result = copyNode(n)
|
||||
result.add n[0]
|
||||
result.add processProc(n[1], c)
|
||||
of nkExceptBranch:
|
||||
result = copyNode(n)
|
||||
if n.len == 2:
|
||||
result.add n[0]
|
||||
for i in 1..<n.len:
|
||||
result.add processProc(n[i], c)
|
||||
else:
|
||||
for i in 0..<n.len:
|
||||
result.add processProc(n[i], c)
|
||||
of nkCaseStmt:
|
||||
result = copyNode(n)
|
||||
result.add pExpr(n[0], c)
|
||||
for i in 1..<n.len:
|
||||
var branch: PNode
|
||||
if n[i].kind == nkOfBranch:
|
||||
branch = n[i] # of branch conditions are constants
|
||||
branch[^1] = processProc(n[i][^1], c)
|
||||
elif n[i].kind in {nkElifBranch, nkElifExpr}:
|
||||
branch = copyNode(n[i])
|
||||
branch.add pExpr(n[i][0], c) #The condition
|
||||
branch.add processProc(n[i][1], c)
|
||||
else:
|
||||
branch = copyNode(n[i])
|
||||
if n[i][0].kind == nkNilLit: #XXX: Fix semCase to instead gen nkEmpty for cases that are never reached instead
|
||||
branch.add c.emptyNode
|
||||
else:
|
||||
branch.add processProc(n[i][0], c)
|
||||
result.add branch
|
||||
else:
|
||||
assert(false, $n.kind)
|
||||
|
||||
proc pExpr(n: PNode; c: var Con): PNode =
|
||||
assert(isExpression(n), $n.kind)
|
||||
case n.kind
|
||||
of nkCallKinds:
|
||||
let parameters = n[0].typ
|
||||
let L = if parameters != nil: parameters.len else: 0
|
||||
for i in 1..<n.len:
|
||||
n[i] = pArg(n[i], c, i < L and isSinkTypeForParam(parameters[i]))
|
||||
result = n
|
||||
of nkBracket:
|
||||
result = copyTree(n)
|
||||
for i in 0..<n.len:
|
||||
# everything that is passed to an array constructor is consumed,
|
||||
# so these all act like 'sink' parameters:
|
||||
result[i] = pArg(n[i], c, isSink = true)
|
||||
of nkObjConstr:
|
||||
result = copyTree(n)
|
||||
for i in 1..<n.len:
|
||||
# everything that is passed to an object constructor is consumed,
|
||||
# so these all act like 'sink' parameters:
|
||||
result[i][1] = pArg(n[i][1], c, isSink = true)
|
||||
of nkTupleConstr, nkClosure:
|
||||
result = copyTree(n)
|
||||
for i in ord(n.kind == nkClosure)..<n.len:
|
||||
# everything that is passed to an tuple constructor is consumed,
|
||||
# so these all act like 'sink' parameters:
|
||||
if n[i].kind == nkExprColonExpr:
|
||||
result[i][1] = pArg(n[i][1], c, isSink = true)
|
||||
else:
|
||||
result[i] = pArg(n[i], c, isSink = true)
|
||||
of nkCast, nkHiddenStdConv, nkHiddenSubConv, nkConv:
|
||||
result = copyNode(n)
|
||||
result.add n[0] #Destination type
|
||||
result.add pExpr(n[1], c) #Analyse inner expression
|
||||
of nkBracketExpr, nkCurly, nkRange, nkChckRange, nkChckRange64, nkChckRangeF,
|
||||
nkObjDownConv, nkObjUpConv, nkStringToCString, nkCStringToString,
|
||||
nkDotExpr, nkCheckedFieldExpr:
|
||||
result = copyNode(n)
|
||||
for son in n:
|
||||
result.add pExpr(son, c)
|
||||
of nkAddr, nkHiddenAddr, nkDerefExpr, nkHiddenDeref:
|
||||
result = copyNode(n)
|
||||
result.add pExpr(n[0], c)
|
||||
of nkNone..nkNilLit, nkTypeSection, nkProcDef, nkConverterDef, nkMethodDef,
|
||||
nkIteratorDef, nkMacroDef, nkTemplateDef, nkLambda, nkDo, nkFuncDef:
|
||||
result = n
|
||||
else:
|
||||
result = recurse(n, c, pExpr)
|
||||
|
||||
proc passCopyToSink(n: PNode; c: var Con): PNode =
|
||||
result = newNodeIT(nkStmtListExpr, n.info, n.typ)
|
||||
let tmp = getTemp(c, n.typ, n.info)
|
||||
# XXX This is only required if we are in a loop. Since we move temporaries
|
||||
# out of loops we need to mark it as 'wasMoved'.
|
||||
result.add genWasMoved(tmp, c)
|
||||
if hasDestructor(n.typ):
|
||||
var m = genCopy(c, tmp, n)
|
||||
m.add pExpr(n, c)
|
||||
result.add m
|
||||
if isLValue(n):
|
||||
message(c.graph.config, n.info, hintPerformance,
|
||||
("passing '$1' to a sink parameter introduces an implicit copy; " &
|
||||
"use 'move($1)' to prevent it") % $n)
|
||||
else:
|
||||
result.add newTree(nkAsgn, tmp, pExpr(n, c))
|
||||
result.add tmp
|
||||
|
||||
proc pArg(arg: PNode; c: var Con; isSink: bool): PNode =
|
||||
if isSink:
|
||||
if arg.kind in nkCallKinds:
|
||||
# recurse but skip the call expression in order to prevent
|
||||
@@ -495,8 +556,8 @@ proc pArg(arg: PNode; c: var Con; isSink: bool): PNode =
|
||||
# sink parameter (bug #11524). Note that the string implemenation is
|
||||
# different and can deal with 'const string sunk into var'.
|
||||
result = passCopyToSink(arg, c)
|
||||
elif arg.kind in {nkBracket, nkObjConstr, nkTupleConstr, nkCharLit..nkTripleStrLit}:
|
||||
discard "object construction to sink parameter: nothing to do"
|
||||
elif arg.kind in {nkBracket, nkObjConstr, nkTupleConstr} + nkLiterals:
|
||||
# object construction to sink parameter: nothing to do
|
||||
result = arg
|
||||
elif arg.kind == nkSym and isSinkParam(arg.sym):
|
||||
# Sinked params can be consumed only once. We need to reset the memory
|
||||
@@ -507,205 +568,218 @@ proc pArg(arg: PNode; c: var Con; isSink: bool): PNode =
|
||||
# it is the last read, can be sinked. We need to reset the memory
|
||||
# to disable the destructor which we have not elided
|
||||
result = destructiveMoveVar(arg, c)
|
||||
elif arg.kind in {nkBlockExpr, nkBlockStmt}:
|
||||
result = copyNode(arg)
|
||||
result.add arg[0]
|
||||
result.add pArg(arg[1], c, isSink)
|
||||
elif arg.kind == nkStmtListExpr:
|
||||
result = copyNode(arg)
|
||||
for i in 0..arg.len-2:
|
||||
result.add p(arg[i], c)
|
||||
result.add pArg(arg[^1], c, isSink)
|
||||
elif arg.kind in {nkIfExpr, nkIfStmt}:
|
||||
result = copyNode(arg)
|
||||
for i in 0..<arg.len:
|
||||
var branch = copyNode(arg[i])
|
||||
if arg[i].kind in {nkElifBranch, nkElifExpr}:
|
||||
branch.add p(arg[i][0], c)
|
||||
branch.add pArgIfTyped(arg[i][1])
|
||||
else:
|
||||
branch.add pArgIfTyped(arg[i][0])
|
||||
result.add branch
|
||||
elif arg.kind == nkCaseStmt:
|
||||
result = copyNode(arg)
|
||||
result.add p(arg[0], c)
|
||||
for i in 1..<arg.len:
|
||||
var branch: PNode
|
||||
if arg[i].kind == nkOfBranch:
|
||||
branch = arg[i] # of branch conditions are constants
|
||||
branch[^1] = pArgIfTyped(arg[i][^1])
|
||||
elif arg[i].kind in {nkElifBranch, nkElifExpr}:
|
||||
branch = copyNode(arg[i])
|
||||
branch.add p(arg[i][0], c)
|
||||
branch.add pArgIfTyped(arg[i][1])
|
||||
else:
|
||||
branch = copyNode(arg[i])
|
||||
branch.add pArgIfTyped(arg[i][0])
|
||||
result.add branch
|
||||
elif isAnalysableFieldAccess(arg, c.owner) and isLastRead(arg, c):
|
||||
result = destructiveMoveVar(arg, c)
|
||||
elif arg.kind in {nkStmtListExpr, nkBlockExpr, nkBlockStmt}:
|
||||
result = recurse(arg, c, proc(n: PNode, c: var Con): PNode = pArg(n, c, isSink))
|
||||
elif arg.kind in {nkIfExpr, nkIfStmt, nkCaseStmt}:
|
||||
result = recurse(arg, c, proc(n: PNode, c: var Con): PNode =
|
||||
if n.typ == nil: pStmt(n, c) #in if/case expr branch with noreturn
|
||||
else: pArg(n, c, isSink))
|
||||
else:
|
||||
# an object that is not temporary but passed to a 'sink' parameter
|
||||
# results in a copy.
|
||||
result = passCopyToSink(arg, c)
|
||||
elif arg.kind == nkBracket:
|
||||
# Treat `f([...])` like `f(...)`
|
||||
result = copyNode(arg)
|
||||
for son in arg:
|
||||
result.add pArg(son, c, isSinkTypeForParam(son.typ))
|
||||
elif arg.kind in nkCallKinds and arg.typ != nil and hasDestructor(arg.typ):
|
||||
# produce temp creation
|
||||
result = newNodeIT(nkStmtListExpr, arg.info, arg.typ)
|
||||
let tmp = getTemp(c, arg.typ, arg.info)
|
||||
let res = pExpr(arg, c)
|
||||
var sinkExpr = genSink(c, tmp, res)
|
||||
sinkExpr.add res
|
||||
result.add sinkExpr
|
||||
result.add tmp
|
||||
c.destroys.add genDestroy(c, tmp)
|
||||
else:
|
||||
result = p(arg, c)
|
||||
result = pExpr(arg, c)
|
||||
|
||||
proc isCursor(n: PNode): bool {.inline.} =
|
||||
result = n.kind == nkSym and sfCursor in n.sym.flags
|
||||
|
||||
proc keepVar(n, it: PNode, c: var Con): PNode =
|
||||
# keep the var but transform 'ri':
|
||||
result = copyNode(n)
|
||||
var itCopy = copyNode(it)
|
||||
for j in 0..<it.len-1:
|
||||
itCopy.add it[j]
|
||||
if isExpression(it[^1]):
|
||||
itCopy.add pExpr(it[^1], c)
|
||||
else:
|
||||
itCopy.add pStmt(it[^1], c)
|
||||
result.add itCopy
|
||||
|
||||
proc pStmt(n: PNode; c: var Con): PNode =
|
||||
#assert(not isExpression(n) or implicitlyDiscardable(n), $n.kind)
|
||||
case n.kind
|
||||
of nkVarSection, nkLetSection:
|
||||
# transform; var x = y to var x; x op y where op is a move or copy
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
for it in n:
|
||||
var ri = it[^1]
|
||||
if it.kind == nkVarTuple and hasDestructor(ri.typ):
|
||||
let x = lowerTupleUnpacking(c.graph, it, c.owner)
|
||||
result.add pStmt(x, c)
|
||||
elif it.kind == nkIdentDefs and hasDestructor(it[0].typ) and not isCursor(it[0]):
|
||||
for j in 0..<it.len-2:
|
||||
let v = it[j]
|
||||
if v.kind == nkSym:
|
||||
if sfCompileTime in v.sym.flags: continue
|
||||
# move the variable declaration to the top of the frame:
|
||||
c.addTopVar v
|
||||
# make sure it's destroyed at the end of the proc:
|
||||
if not isUnpackedTuple(it[0].sym):
|
||||
c.destroys.add genDestroy(c, v)
|
||||
if ri.kind == nkEmpty and c.inLoop > 0:
|
||||
ri = genDefaultCall(v.typ, c, v.info)
|
||||
if ri.kind != nkEmpty:
|
||||
let r = moveOrCopy(v, ri, c)
|
||||
result.add r
|
||||
else:
|
||||
result.add keepVar(n, it, c)
|
||||
of nkCallKinds:
|
||||
let parameters = n[0].typ
|
||||
let L = if parameters != nil: parameters.len else: 0
|
||||
for i in 1..<n.len:
|
||||
n[i] = pArg(n[i], c, i < L and isSinkTypeForParam(parameters[i]))
|
||||
result = n
|
||||
of nkDiscardStmt:
|
||||
if n[0].kind != nkEmpty:
|
||||
n[0] = pArg(n[0], c, false)
|
||||
result = n
|
||||
of nkReturnStmt:
|
||||
result = copyNode(n)
|
||||
result.add pStmt(n[0], c)
|
||||
of nkYieldStmt:
|
||||
result = copyNode(n)
|
||||
result.add pExpr(n[0], c)
|
||||
of nkAsgn, nkFastAsgn:
|
||||
if hasDestructor(n[0].typ) and n[1].kind notin {nkProcDef, nkDo, nkLambda}:
|
||||
# rule (self-assignment-removal):
|
||||
if n[1].kind == nkSym and n[0].kind == nkSym and n[0].sym == n[1].sym:
|
||||
result = newNodeI(nkEmpty, n.info)
|
||||
else:
|
||||
result = moveOrCopy(n[0], n[1], c)
|
||||
else:
|
||||
result = copyNode(n)
|
||||
result.add n[0]
|
||||
result.add pExpr(n[1], c)
|
||||
of nkRaiseStmt:
|
||||
if optNimV2 in c.graph.config.globalOptions and n[0].kind != nkEmpty:
|
||||
if n[0].kind in nkCallKinds:
|
||||
let call = pExpr(n[0], c) #pExpr?
|
||||
result = copyNode(n)
|
||||
result.add call
|
||||
else:
|
||||
let tmp = getTemp(c, n[0].typ, n.info)
|
||||
var m = genCopyNoCheck(c, tmp, n[0])
|
||||
|
||||
m.add pExpr(n[0], c)
|
||||
result = newTree(nkStmtList, genWasMoved(tmp, c), m)
|
||||
var toDisarm = n[0]
|
||||
if toDisarm.kind == nkStmtListExpr: toDisarm = toDisarm.lastSon
|
||||
if toDisarm.kind == nkSym and toDisarm.sym.owner == c.owner:
|
||||
result.add genWasMoved(toDisarm, c)
|
||||
result.add newTree(nkRaiseStmt, tmp)
|
||||
else:
|
||||
result = copyNode(n)
|
||||
result.add if n[0].kind == nkEmpty: n[0]
|
||||
else: pExpr(n[0], c)
|
||||
of nkNone..nkType, nkTypeSection, nkProcDef, nkConverterDef, nkMethodDef,
|
||||
nkIteratorDef, nkMacroDef, nkTemplateDef, nkLambda, nkDo, nkFuncDef,
|
||||
nkConstSection, nkConstDef, nkIncludeStmt, nkImportStmt, nkExportStmt,
|
||||
nkPragma, nkCommentStmt, nkBreakStmt:
|
||||
result = n
|
||||
# Recurse
|
||||
of nkWhileStmt:
|
||||
result = copyNode(n)
|
||||
inc c.inLoop
|
||||
result.add pExpr(n[0], c)
|
||||
result.add pStmt(n[1], c)
|
||||
dec c.inLoop
|
||||
else:
|
||||
result = recurse(n, c, pStmt)
|
||||
|
||||
proc moveOrCopy(dest, ri: PNode; c: var Con): PNode =
|
||||
assert(isExpression(ri), $ri.kind)
|
||||
# unfortunately, this needs to be kept consistent with the cases
|
||||
# we handle in the 'case of' statement below:
|
||||
const movableNodeKinds = (nkCallKinds + {nkSym, nkTupleConstr, nkObjConstr,
|
||||
nkBracket, nkBracketExpr, nkNilLit})
|
||||
|
||||
template moveOrCopyIfTyped(riPart: PNode): PNode =
|
||||
# typ is nil if we are in if/case expr branch with noreturn
|
||||
if riPart.typ == nil: p(riPart, c)
|
||||
else: moveOrCopy(dest, riPart, c)
|
||||
|
||||
#XXX: All these nkStmtList results will cause problems in recursive moveOrCopy calls
|
||||
case ri.kind
|
||||
of nkCallKinds:
|
||||
result = genSink(c, dest.typ, dest, ri)
|
||||
# watch out and no not transform 'ri' twice if it's a call:
|
||||
let ri2 = copyNode(ri)
|
||||
let parameters = ri[0].typ
|
||||
let L = if parameters != nil: parameters.len else: 0
|
||||
ri2.add ri[0]
|
||||
for i in 1..<ri.len:
|
||||
ri2.add pArg(ri[i], c, i < L and isSinkTypeForParam(parameters[i]))
|
||||
#recurse(ri, ri2)
|
||||
result.add ri2
|
||||
result = genSink(c, dest, ri)
|
||||
result.add pExpr(ri, c)
|
||||
of nkBracketExpr:
|
||||
if ri[0].kind == nkSym and isUnpackedTuple(ri[0].sym):
|
||||
# unpacking of tuple: move out the elements
|
||||
result = genSink(c, dest.typ, dest, ri)
|
||||
result.add p(ri, c)
|
||||
result = genSink(c, dest, ri)
|
||||
result.add pExpr(ri, c)
|
||||
elif isAnalysableFieldAccess(ri, c.owner) and isLastRead(ri, c):
|
||||
# Rule 3: `=sink`(x, z); wasMoved(z)
|
||||
var snk = genSink(c, dest.typ, dest, ri)
|
||||
var snk = genSink(c, dest, ri)
|
||||
snk.add ri
|
||||
result = newTree(nkStmtList, snk, genWasMoved(ri, c))
|
||||
else:
|
||||
result = genCopy(c, dest.typ, dest, ri)
|
||||
result.add p(ri, c)
|
||||
of nkStmtListExpr:
|
||||
result = newNodeI(nkStmtList, ri.info)
|
||||
for i in 0..ri.len-2:
|
||||
result.add p(ri[i], c)
|
||||
result.add moveOrCopy(dest, ri[^1], c)
|
||||
of nkBlockExpr, nkBlockStmt:
|
||||
result = newNodeI(nkBlockStmt, ri.info)
|
||||
result.add ri[0] ## add label
|
||||
result.add moveOrCopy(dest, ri[1], c)
|
||||
of nkIfExpr, nkIfStmt:
|
||||
result = newNodeI(nkIfStmt, ri.info)
|
||||
for i in 0..<ri.len:
|
||||
var branch = copyNode(ri[i])
|
||||
if ri[i].kind in {nkElifBranch, nkElifExpr}:
|
||||
branch.add p(ri[i][0], c)
|
||||
branch.add moveOrCopyIfTyped(ri[i][1])
|
||||
else:
|
||||
branch.add moveOrCopyIfTyped(ri[i][0])
|
||||
result.add branch
|
||||
of nkCaseStmt:
|
||||
result = newNodeI(nkCaseStmt, ri.info)
|
||||
result.add p(ri[0], c)
|
||||
for i in 1..<ri.len:
|
||||
var branch: PNode
|
||||
if ri[i].kind == nkOfBranch:
|
||||
branch = ri[i] # of branch conditions are constants
|
||||
branch[^1] = moveOrCopyIfTyped(ri[i][^1])
|
||||
elif ri[i].kind in {nkElifBranch, nkElifExpr}:
|
||||
branch = copyNode(ri[i])
|
||||
branch.add p(ri[i][0], c)
|
||||
branch.add moveOrCopyIfTyped(ri[i][1])
|
||||
else:
|
||||
branch = copyNode(ri[i])
|
||||
branch.add moveOrCopyIfTyped(ri[i][0])
|
||||
result.add branch
|
||||
result = genCopy(c, dest, ri)
|
||||
result.add pExpr(ri, c)
|
||||
of nkBracket:
|
||||
# array constructor
|
||||
if ri.len > 0 and isDangerousSeq(ri.typ):
|
||||
result = genCopy(c, dest.typ, dest, ri)
|
||||
result = genCopy(c, dest, ri)
|
||||
else:
|
||||
result = genSink(c, dest.typ, dest, ri)
|
||||
let ri2 = copyTree(ri)
|
||||
for i in 0..<ri.len:
|
||||
# everything that is passed to an array constructor is consumed,
|
||||
# so these all act like 'sink' parameters:
|
||||
ri2[i] = pArg(ri[i], c, isSink = true)
|
||||
result.add ri2
|
||||
of nkObjConstr:
|
||||
result = genSink(c, dest.typ, dest, ri)
|
||||
let ri2 = copyTree(ri)
|
||||
for i in 1..<ri.len:
|
||||
# everything that is passed to an object constructor is consumed,
|
||||
# so these all act like 'sink' parameters:
|
||||
ri2[i].sons[1] = pArg(ri[i][1], c, isSink = true)
|
||||
result.add ri2
|
||||
of nkTupleConstr, nkClosure:
|
||||
result = genSink(c, dest.typ, dest, ri)
|
||||
let ri2 = copyTree(ri)
|
||||
for i in ord(ri.kind == nkClosure)..<ri.len:
|
||||
# everything that is passed to an tuple constructor is consumed,
|
||||
# so these all act like 'sink' parameters:
|
||||
if ri[i].kind == nkExprColonExpr:
|
||||
ri2[i].sons[1] = pArg(ri[i][1], c, isSink = true)
|
||||
else:
|
||||
ri2[i] = pArg(ri[i], c, isSink = true)
|
||||
result.add ri2
|
||||
of nkNilLit:
|
||||
result = genSink(c, dest.typ, dest, ri)
|
||||
result.add ri
|
||||
result = genSink(c, dest, ri)
|
||||
result.add pExpr(ri, c)
|
||||
of nkObjConstr, nkTupleConstr, nkClosure, nkCharLit..nkNilLit:
|
||||
result = genSink(c, dest, ri)
|
||||
result.add pExpr(ri, c)
|
||||
of nkSym:
|
||||
if isSinkParam(ri.sym):
|
||||
# Rule 3: `=sink`(x, z); wasMoved(z)
|
||||
sinkParamIsLastReadCheck(c, ri)
|
||||
var snk = genSink(c, dest.typ, dest, ri)
|
||||
var snk = genSink(c, dest, ri)
|
||||
snk.add ri
|
||||
result = newTree(nkStmtList, snk, genWasMoved(ri, c))
|
||||
elif ri.sym.kind != skParam and ri.sym.owner == c.owner and
|
||||
isLastRead(ri, c) and canBeMoved(dest.typ):
|
||||
# Rule 3: `=sink`(x, z); wasMoved(z)
|
||||
var snk = genSink(c, dest.typ, dest, ri)
|
||||
var snk = genSink(c, dest, ri)
|
||||
snk.add ri
|
||||
result = newTree(nkStmtList, snk, genWasMoved(ri, c))
|
||||
else:
|
||||
result = genCopy(c, dest.typ, dest, ri)
|
||||
result.add p(ri, c)
|
||||
of nkHiddenSubConv, nkHiddenStdConv:
|
||||
if sameType(ri.typ, ri[1].typ):
|
||||
result = moveOrCopy(dest, ri[1], c)
|
||||
elif ri[1].kind in movableNodeKinds:
|
||||
result = moveOrCopy(dest, ri[1], c)
|
||||
var b = newNodeIT(ri.kind, ri.info, ri.typ)
|
||||
b.add ri[0] # add empty node
|
||||
let L = result.len-1
|
||||
b.add result[L]
|
||||
result[L] = b
|
||||
else:
|
||||
result = genCopy(c, dest.typ, dest, ri)
|
||||
result.add p(ri, c)
|
||||
result = genCopy(c, dest, ri)
|
||||
result.add pExpr(ri, c)
|
||||
of nkHiddenSubConv, nkHiddenStdConv, nkConv:
|
||||
result = moveOrCopy(dest, ri[1], c)
|
||||
if not sameType(ri.typ, ri[1].typ):
|
||||
let copyRi = copyTree(ri)
|
||||
copyRi[1] = result[^1]
|
||||
result[^1] = copyRi
|
||||
of nkObjDownConv, nkObjUpConv:
|
||||
if ri[0].kind in movableNodeKinds:
|
||||
result = moveOrCopy(dest, ri[0], c)
|
||||
var b = newNodeIT(ri.kind, ri.info, ri.typ)
|
||||
let L = result.len-1
|
||||
b.add result[L]
|
||||
result[L] = b
|
||||
else:
|
||||
result = genCopy(c, dest.typ, dest, ri)
|
||||
result.add p(ri, c)
|
||||
result = moveOrCopy(dest, ri[0], c)
|
||||
let copyRi = copyTree(ri)
|
||||
copyRi[0] = result[^1]
|
||||
result[^1] = copyRi
|
||||
of nkStmtListExpr, nkBlockExpr:
|
||||
result = recurse(ri, c, proc(n: PNode, c: var Con): PNode = moveOrCopy(dest, n, c))
|
||||
of nkIfExpr, nkCaseStmt:
|
||||
result = recurse(ri, c, proc(n: PNode, c: var Con): PNode =
|
||||
if n.typ == nil: pStmt(n, c) #in if/case expr branch with noreturn
|
||||
else: moveOrCopy(dest, n, c))
|
||||
else:
|
||||
if isAnalysableFieldAccess(ri, c.owner) and isLastRead(ri, c) and
|
||||
canBeMoved(dest.typ):
|
||||
# Rule 3: `=sink`(x, z); wasMoved(z)
|
||||
var snk = genSink(c, dest.typ, dest, ri)
|
||||
var snk = genSink(c, dest, ri)
|
||||
snk.add ri
|
||||
result = newTree(nkStmtList, snk, genWasMoved(ri, c))
|
||||
else:
|
||||
# XXX At least string literals can be moved?
|
||||
result = genCopy(c, dest.typ, dest, ri)
|
||||
result.add p(ri, c)
|
||||
result = genCopy(c, dest, ri)
|
||||
result.add pExpr(ri, c)
|
||||
|
||||
proc computeUninit(c: var Con) =
|
||||
if not c.uninitComputed:
|
||||
@@ -717,17 +791,14 @@ proc computeUninit(c: var Con) =
|
||||
proc injectDefaultCalls(n: PNode, c: var Con) =
|
||||
case n.kind
|
||||
of nkVarSection, nkLetSection:
|
||||
for i in 0..<n.len:
|
||||
let it = n[i]
|
||||
let L = it.len-1
|
||||
let ri = it[L]
|
||||
if it.kind == nkIdentDefs and ri.kind == nkEmpty:
|
||||
for it in n:
|
||||
if it.kind == nkIdentDefs and it[^1].kind == nkEmpty:
|
||||
computeUninit(c)
|
||||
for j in 0..L-2:
|
||||
for j in 0..<it.len-2:
|
||||
let v = it[j]
|
||||
doAssert v.kind == nkSym
|
||||
if c.uninit.contains(v.sym.id):
|
||||
it[L] = genDefaultCall(v.sym.typ, c, v.info)
|
||||
it[^1] = genDefaultCall(v.sym.typ, c, v.info)
|
||||
break
|
||||
of nkNone..nkNilLit, nkTypeSection, nkProcDef, nkConverterDef, nkMethodDef,
|
||||
nkIteratorDef, nkMacroDef, nkTemplateDef, nkLambda, nkDo, nkFuncDef:
|
||||
@@ -736,130 +807,16 @@ proc injectDefaultCalls(n: PNode, c: var Con) =
|
||||
for i in 0..<safeLen(n):
|
||||
injectDefaultCalls(n[i], c)
|
||||
|
||||
proc isCursor(n: PNode): bool {.inline.} =
|
||||
result = n.kind == nkSym and sfCursor in n.sym.flags
|
||||
|
||||
proc keepVar(n, it: PNode, c: var Con): PNode =
|
||||
# keep the var but transform 'ri':
|
||||
result = copyNode(n)
|
||||
var itCopy = copyNode(it)
|
||||
for j in 0..it.len-2:
|
||||
itCopy.add it[j]
|
||||
itCopy.add p(it[it.len-1], c)
|
||||
result.add itCopy
|
||||
|
||||
proc p(n: PNode; c: var Con): PNode =
|
||||
case n.kind
|
||||
of nkVarSection, nkLetSection:
|
||||
discard "transform; var x = y to var x; x op y where op is a move or copy"
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
|
||||
for i in 0..<n.len:
|
||||
let it = n[i]
|
||||
let L = it.len
|
||||
var ri = it[L-1]
|
||||
if it.kind == nkVarTuple and hasDestructor(ri.typ):
|
||||
let x = lowerTupleUnpacking(c.graph, it, c.owner)
|
||||
result.add p(x, c)
|
||||
elif it.kind == nkIdentDefs and hasDestructor(it[0].typ) and not isCursor(it[0]):
|
||||
for j in 0..L-3:
|
||||
let v = it[j]
|
||||
if v.kind == nkSym:
|
||||
if sfCompileTime in v.sym.flags: continue
|
||||
# move the variable declaration to the top of the frame:
|
||||
c.addTopVar v
|
||||
# make sure it's destroyed at the end of the proc:
|
||||
if not isUnpackedTuple(it[0].sym):
|
||||
c.destroys.add genDestroy(c, v.typ, v)
|
||||
if ri.kind == nkEmpty and c.inLoop > 0:
|
||||
ri = genDefaultCall(v.typ, c, v.info)
|
||||
if ri.kind != nkEmpty:
|
||||
let r = moveOrCopy(v, ri, c)
|
||||
result.add r
|
||||
else:
|
||||
result.add keepVar(n, it, c)
|
||||
of nkCallKinds:
|
||||
let parameters = n[0].typ
|
||||
let L = if parameters != nil: parameters.len else: 0
|
||||
for i in 1 ..< n.len:
|
||||
n.sons[i] = pArg(n[i], c, i < L and isSinkTypeForParam(parameters[i]))
|
||||
if n.typ != nil and hasDestructor(n.typ):
|
||||
discard "produce temp creation"
|
||||
result = newNodeIT(nkStmtListExpr, n.info, n.typ)
|
||||
let tmp = getTemp(c, n.typ, n.info)
|
||||
var sinkExpr = genSink(c, n.typ, tmp, n)
|
||||
sinkExpr.add n
|
||||
result.add sinkExpr
|
||||
result.add tmp
|
||||
c.destroys.add genDestroy(c, n.typ, tmp)
|
||||
else:
|
||||
result = n
|
||||
of nkAsgn, nkFastAsgn:
|
||||
if hasDestructor(n[0].typ) and n[1].kind notin {nkProcDef, nkDo, nkLambda}:
|
||||
# rule (self-assignment-removal):
|
||||
if n[1].kind == nkSym and n[0].kind == nkSym and n[0].sym == n[1].sym:
|
||||
result = newNodeI(nkEmpty, n.info)
|
||||
else:
|
||||
result = moveOrCopy(n[0], n[1], c)
|
||||
else:
|
||||
result = copyNode(n)
|
||||
recurse(n, result)
|
||||
of nkNone..nkNilLit, nkTypeSection, nkProcDef, nkConverterDef, nkMethodDef,
|
||||
nkIteratorDef, nkMacroDef, nkTemplateDef, nkLambda, nkDo, nkFuncDef:
|
||||
result = n
|
||||
of nkCast, nkHiddenStdConv, nkHiddenSubConv, nkConv:
|
||||
result = copyNode(n)
|
||||
# Destination type
|
||||
result.add n[0]
|
||||
# Analyse the inner expression
|
||||
result.add p(n[1], c)
|
||||
of nkWhen:
|
||||
# This should be a "when nimvm" node.
|
||||
result = copyTree(n)
|
||||
result[1][0] = p(result[1][0], c)
|
||||
of nkRaiseStmt:
|
||||
if optNimV2 in c.graph.config.globalOptions and n[0].kind != nkEmpty:
|
||||
if n[0].kind in nkCallKinds:
|
||||
let call = copyNode(n[0])
|
||||
recurse(n[0], call)
|
||||
result = copyNode(n)
|
||||
result.add call
|
||||
else:
|
||||
let t = n[0].typ
|
||||
let tmp = getTemp(c, t, n.info)
|
||||
var m = genCopyNoCheck(c, t, tmp, n[0])
|
||||
|
||||
m.add p(n[0], c)
|
||||
result = newTree(nkStmtList, genWasMoved(tmp, c), m)
|
||||
var toDisarm = n[0]
|
||||
if toDisarm.kind == nkStmtListExpr: toDisarm = toDisarm.lastSon
|
||||
if toDisarm.kind == nkSym and toDisarm.sym.owner == c.owner:
|
||||
result.add genWasMoved(toDisarm, c)
|
||||
result.add newTree(nkRaiseStmt, tmp)
|
||||
else:
|
||||
result = copyNode(n)
|
||||
recurse(n, result)
|
||||
of nkForStmt, nkParForStmt, nkWhileStmt:
|
||||
inc c.inLoop
|
||||
result = copyNode(n)
|
||||
recurse(n, result)
|
||||
dec c.inLoop
|
||||
else:
|
||||
result = copyNode(n)
|
||||
recurse(n, result)
|
||||
|
||||
proc extractDestroysForTemporaries(c: Con, destroys: PNode): PNode =
|
||||
result = newNodeI(nkStmtList, destroys.info)
|
||||
for i in 0 ..< destroys.len:
|
||||
for i in 0..<destroys.len:
|
||||
if destroys[i][1][0].sym.kind == skTemp:
|
||||
result.add destroys[i]
|
||||
destroys[i] = c.emptyNode
|
||||
|
||||
proc reverseDestroys(destroys: PNode) =
|
||||
var reversed: seq[PNode]
|
||||
proc reverseDestroys(destroys: seq[PNode]): seq[PNode] =
|
||||
for i in countdown(destroys.len - 1, 0):
|
||||
reversed.add(destroys[i])
|
||||
destroys.sons = reversed
|
||||
result.add destroys[i]
|
||||
|
||||
proc injectDestructorCalls*(g: ModuleGraph; owner: PSym; n: PNode): PNode =
|
||||
if sfGeneratedOp in owner.flags or isInlineIterator(owner): return n
|
||||
@@ -876,23 +833,24 @@ proc injectDestructorCalls*(g: ModuleGraph; owner: PSym; n: PNode): PNode =
|
||||
if c.g[i].kind in {goto, fork}:
|
||||
c.jumpTargets.incl(i+c.g[i].dest)
|
||||
dbg:
|
||||
echo "injecting into ", n
|
||||
echo "\n### ", owner.name.s, ":\nCFG:"
|
||||
echoCfg(c.g)
|
||||
echo n
|
||||
if owner.kind in {skProc, skFunc, skMethod, skIterator, skConverter}:
|
||||
let params = owner.typ.n
|
||||
for i in 1 ..< params.len:
|
||||
let param = params[i].sym
|
||||
if isSinkTypeForParam(param.typ) and hasDestructor(param.typ.skipTypes({tySink})):
|
||||
c.destroys.add genDestroy(c, param.typ.skipTypes({tyGenericInst, tyAlias, tySink}), params[i])
|
||||
for i in 1..<params.len:
|
||||
let t = params[i].sym.typ
|
||||
if isSinkTypeForParam(t) and hasDestructor(t.skipTypes({tySink})):
|
||||
c.destroys.add genDestroy(c, params[i])
|
||||
|
||||
#if optNimV2 in c.graph.config.globalOptions:
|
||||
# injectDefaultCalls(n, c)
|
||||
let body = p(n, c)
|
||||
let body = pStmt(n, c)
|
||||
result = newNodeI(nkStmtList, n.info)
|
||||
if c.topLevelVars.len > 0:
|
||||
result.add c.topLevelVars
|
||||
if c.destroys.len > 0:
|
||||
reverseDestroys(c.destroys)
|
||||
c.destroys.sons = reverseDestroys(c.destroys.sons)
|
||||
if owner.kind == skModule:
|
||||
result.add newTryFinally(body, extractDestroysForTemporaries(c, c.destroys))
|
||||
g.globalDestructors.add c.destroys
|
||||
@@ -902,6 +860,5 @@ proc injectDestructorCalls*(g: ModuleGraph; owner: PSym; n: PNode): PNode =
|
||||
result.add body
|
||||
|
||||
dbg:
|
||||
echo "------------------------------------"
|
||||
echo owner.name.s, " transformed to: "
|
||||
echo ">---------transformed-to--------->"
|
||||
echo result
|
||||
|
||||
@@ -9,7 +9,7 @@ Platforms: """
|
||||
linux: i386;ia64;alpha;amd64;powerpc64;arm;sparc;sparc64;m68k;mips;mipsel;mips64;mips64el;powerpc;powerpc64el;arm64;riscv64
|
||||
macosx: i386;amd64;powerpc64
|
||||
solaris: i386;amd64;sparc;sparc64
|
||||
freebsd: i386;amd64
|
||||
freebsd: i386;amd64;powerpc64
|
||||
netbsd: i386;amd64
|
||||
openbsd: i386;amd64
|
||||
dragonfly: i386;amd64
|
||||
@@ -87,6 +87,7 @@ Files: "bin/nimble.exe"
|
||||
Files: "bin/vccexe.exe"
|
||||
Files: "bin/nimgrab.exe"
|
||||
Files: "bin/nimpretty.exe"
|
||||
Files: "bin/testament.exe"
|
||||
|
||||
Files: "koch.exe"
|
||||
Files: "finish.exe"
|
||||
|
||||
@@ -431,10 +431,6 @@ const # magic checked op; magic unchecked op;
|
||||
["", ""], # UnaryPlusF64
|
||||
["", ""], # UnaryMinusF64
|
||||
["", ""], # AbsF64
|
||||
["", ""], # ToFloat
|
||||
["", ""], # ToBiggestFloat
|
||||
["", ""], # ToInt
|
||||
["", ""], # ToBiggestInt
|
||||
["nimCharToStr", "nimCharToStr"],
|
||||
["nimBoolToStr", "nimBoolToStr"],
|
||||
["cstrToNimstr", "cstrToNimstr"],
|
||||
@@ -612,10 +608,6 @@ proc arithAux(p: PProc, n: PNode, r: var TCompRes, op: TMagic) =
|
||||
of mUnaryPlusF64: applyFormat("+($1)", "+($1)")
|
||||
of mUnaryMinusF64: applyFormat("-($1)", "-($1)")
|
||||
of mAbsF64: applyFormat("Math.abs($1)", "Math.abs($1)")
|
||||
of mToFloat: applyFormat("$1", "$1")
|
||||
of mToBiggestFloat: applyFormat("$1", "$1")
|
||||
of mToInt: applyFormat("Math.trunc($1)", "Math.trunc($1)")
|
||||
of mToBiggestInt: applyFormat("Math.trunc($1)", "Math.trunc($1)")
|
||||
of mCharToStr: applyFormat("nimCharToStr($1)", "nimCharToStr($1)")
|
||||
of mBoolToStr: applyFormat("nimBoolToStr($1)", "nimBoolToStr($1)")
|
||||
of mIntToStr: applyFormat("cstrToNimstr(($1)+\"\")", "cstrToNimstr(($1)+\"\")")
|
||||
@@ -666,11 +658,7 @@ proc genLineDir(p: PProc, n: PNode) =
|
||||
if optLineDir in p.options:
|
||||
lineF(p, "// line $2 \"$1\"$n",
|
||||
[rope(toFilename(p.config, n.info)), rope(line)])
|
||||
if {optStackTrace, optEndb} * p.options == {optStackTrace, optEndb} and
|
||||
((p.prc == nil) or sfPure notin p.prc.flags):
|
||||
useMagic(p, "endb")
|
||||
lineF(p, "endb($1);$n", [rope(line)])
|
||||
elif hasFrameInfo(p):
|
||||
if hasFrameInfo(p):
|
||||
lineF(p, "F.line = $1;$n", [rope(line)])
|
||||
|
||||
proc genWhileStmt(p: PProc, n: PNode) =
|
||||
@@ -1612,6 +1600,8 @@ proc createVar(p: PProc, typ: PType, indirect: bool): Rope =
|
||||
result = putToSeq("{}", indirect)
|
||||
of tyBool:
|
||||
result = putToSeq("false", indirect)
|
||||
of tyNil:
|
||||
result = putToSeq("null", indirect)
|
||||
of tyArray:
|
||||
let length = toInt(lengthOrd(p.config, t))
|
||||
let e = elemType(t)
|
||||
|
||||
@@ -129,7 +129,7 @@ proc newDeepCopyCall(op: PSym; x, y: PNode): PNode =
|
||||
result = newAsgnStmt(x, newOpCall(op, y))
|
||||
|
||||
proc useNoGc(c: TLiftCtx; t: PType): bool {.inline.} =
|
||||
result = optNimV2 in c.g.config.globalOptions and
|
||||
result = c.g.config.selectedGC == gcDestructors and
|
||||
({tfHasGCedMem, tfHasOwned} * t.flags != {} or t.isGCedMem)
|
||||
|
||||
proc instantiateGeneric(c: var TLiftCtx; op: PSym; t, typeInst: PType): PSym =
|
||||
@@ -506,7 +506,11 @@ proc fillBody(c: var TLiftCtx; t: PType; body, x, y: PNode) =
|
||||
of tyTuple:
|
||||
fillBodyTup(c, t, body, x, y)
|
||||
of tyVarargs, tyOpenArray:
|
||||
localError(c.g.config, c.info, "cannot copy openArray")
|
||||
if c.kind == attachedDestructor:
|
||||
forallElements(c, t, body, x, y)
|
||||
else:
|
||||
discard "cannot copy openArray"
|
||||
|
||||
of tyFromExpr, tyProxy, tyBuiltInTypeClass, tyUserTypeClass,
|
||||
tyUserTypeClassInst, tyCompositeTypeClass, tyAnd, tyOr, tyNot, tyAnything,
|
||||
tyGenericParam, tyGenericBody, tyNil, tyUntyped, tyTyped,
|
||||
@@ -559,7 +563,7 @@ proc produceSym(g: ModuleGraph; c: PContext; typ: PType; kind: TTypeAttachedOp;
|
||||
typ.attachedOps[kind] = result
|
||||
|
||||
var tk: TTypeKind
|
||||
if optNimV2 in g.config.globalOptions:
|
||||
if g.config.selectedGC == gcDestructors:
|
||||
tk = skipTypes(typ, {tyOrdinal, tyRange, tyInferred, tyGenericInst, tyStatic, tyAlias, tySink}).kind
|
||||
else:
|
||||
tk = tyNone # no special casing for strings and seqs
|
||||
@@ -626,7 +630,7 @@ proc createTypeBoundOps(g: ModuleGraph; c: PContext; orig: PType; info: TLineInf
|
||||
var canon = g.canonTypes.getOrDefault(h)
|
||||
var overwrite = false
|
||||
if canon == nil:
|
||||
let typ = orig.skipTypes({tyGenericInst, tyAlias})
|
||||
let typ = orig.skipTypes({tyGenericInst, tyAlias, tySink})
|
||||
g.canonTypes[h] = typ
|
||||
canon = typ
|
||||
elif canon != orig:
|
||||
|
||||
@@ -21,8 +21,8 @@ proc newDeref*(n: PNode): PNode {.inline.} =
|
||||
|
||||
proc newTupleAccess*(g: ModuleGraph; tup: PNode, i: int): PNode =
|
||||
if tup.kind == nkHiddenAddr:
|
||||
result = newNodeIT(nkHiddenAddr, tup.info, tup.typ.skipTypes(abstractInst+{tyPtr, tyVar}))
|
||||
result.addSon(newNodeIT(nkBracketExpr, tup.info, tup.typ.skipTypes(abstractInst+{tyPtr, tyVar}).sons[i]))
|
||||
result = newNodeIT(nkHiddenAddr, tup.info, tup.typ.skipTypes(abstractInst+{tyPtr, tyVar, tyLent}))
|
||||
result.addSon(newNodeIT(nkBracketExpr, tup.info, tup.typ.skipTypes(abstractInst+{tyPtr, tyVar, tyLent}).sons[i]))
|
||||
addSon(result[0], tup[0])
|
||||
var lit = newNodeIT(nkIntLit, tup.info, getSysType(g, tup.info, tyInt))
|
||||
lit.intVal = i
|
||||
|
||||
@@ -18,7 +18,7 @@ import
|
||||
sem, idents, passes, extccomp,
|
||||
cgen, json, nversion,
|
||||
platform, nimconf, passaux, depends, vm, idgen,
|
||||
parser, modules,
|
||||
modules,
|
||||
modulegraphs, tables, rod, lineinfos, pathutils
|
||||
|
||||
when not defined(leanCompiler):
|
||||
|
||||
@@ -50,7 +50,6 @@ proc partialInitModule(result: PSym; graph: ModuleGraph; fileIdx: FileIndex; fil
|
||||
setLen(graph.modules, int(fileIdx) + 1)
|
||||
graph.modules[result.position] = result
|
||||
|
||||
incl(result.flags, sfUsed)
|
||||
initStrTable(result.tab)
|
||||
strTableAdd(result.tab, result) # a module knows itself
|
||||
strTableAdd(packSym.tab, result)
|
||||
|
||||
@@ -30,7 +30,6 @@ type # please make sure we have under 32 options
|
||||
optAssert, optLineDir, optWarns, optHints,
|
||||
optOptimizeSpeed, optOptimizeSize, optStackTrace, # stack tracing support
|
||||
optLineTrace, # line tracing support (includes stack tracing)
|
||||
optEndb, # embedded debugger
|
||||
optByRef, # use pass by ref for objects
|
||||
# (for interfacing with C)
|
||||
optProfiler, # profiler turned on
|
||||
@@ -84,6 +83,7 @@ type # please make sure we have under 32 options
|
||||
optDynlibOverrideAll
|
||||
optNimV2
|
||||
optMultiMethods
|
||||
optNimV019
|
||||
|
||||
TGlobalOptions* = set[TGlobalOption]
|
||||
|
||||
@@ -718,3 +718,13 @@ proc `$`*(c: IdeCmd): string =
|
||||
of ideOutline: "outline"
|
||||
of ideKnown: "known"
|
||||
of ideMsg: "msg"
|
||||
|
||||
proc floatInt64Align*(conf: ConfigRef): int16 =
|
||||
## Returns either 4 or 8 depending on reasons.
|
||||
if conf.target.targetCPU == cpuI386:
|
||||
#on Linux/BSD i386, double are aligned to 4bytes (except with -malign-double)
|
||||
if conf.target.targetOS != osWindows:
|
||||
# on i386 for all known POSIX systems, 64bits ints are aligned
|
||||
# to 4bytes (except with -malign-double)
|
||||
return 4
|
||||
return 8
|
||||
|
||||
@@ -17,25 +17,24 @@ proc semLocals*(c: PContext, n: PNode): PNode =
|
||||
var tupleType = newTypeS(tyTuple, c)
|
||||
result = newNodeIT(nkPar, n.info, tupleType)
|
||||
tupleType.n = newNodeI(nkRecList, n.info)
|
||||
let owner = getCurrOwner(c)
|
||||
# for now we skip openarrays ...
|
||||
for scope in walkScopes(c.currentScope):
|
||||
if scope == c.topLevelScope: break
|
||||
for it in items(scope.symbols):
|
||||
# XXX parameters' owners are wrong for generics; this caused some pain
|
||||
# for closures too; we should finally fix it.
|
||||
#if it.owner != c.p.owner: return result
|
||||
if it.kind in skLocalVars and
|
||||
it.typ.skipTypes({tyGenericInst, tyVar}).kind notin
|
||||
{tyVarargs, tyOpenArray, tyTypeDesc, tyStatic, tyUntyped, tyTyped, tyEmpty}:
|
||||
|
||||
var field = newSym(skField, it.name, getCurrOwner(c), n.info)
|
||||
field.typ = it.typ.skipTypes({tyVar})
|
||||
field.position = counter
|
||||
inc(counter)
|
||||
if it.owner == owner:
|
||||
var field = newSym(skField, it.name, owner, n.info)
|
||||
field.typ = it.typ.skipTypes({tyVar})
|
||||
field.position = counter
|
||||
inc(counter)
|
||||
|
||||
addSon(tupleType.n, newSymNode(field))
|
||||
addSonSkipIntLit(tupleType, field.typ)
|
||||
addSon(tupleType.n, newSymNode(field))
|
||||
addSonSkipIntLit(tupleType, field.typ)
|
||||
|
||||
var a = newSymNode(it, result.info)
|
||||
if it.typ.skipTypes({tyGenericInst}).kind == tyVar: a = newDeref(a)
|
||||
result.add(a)
|
||||
var a = newSymNode(it, result.info)
|
||||
if it.typ.skipTypes({tyGenericInst}).kind == tyVar: a = newDeref(a)
|
||||
result.add(a)
|
||||
|
||||
@@ -44,12 +44,12 @@ const
|
||||
wWarnings, wHints,
|
||||
wLineDir, wStackTrace, wLineTrace, wOptimization, wHint, wWarning, wError,
|
||||
wFatal, wDefine, wUndef, wCompile, wLink, wLinksys, wPure, wPush, wPop,
|
||||
wBreakpoint, wWatchPoint, wPassl, wPassc,
|
||||
wPassl, wPassc,
|
||||
wDeadCodeElimUnused, # deprecated, always on
|
||||
wDeprecated,
|
||||
wFloatChecks, wInfChecks, wNanChecks, wPragma, wEmit, wUnroll,
|
||||
wLinearScanEnd, wPatterns, wTrMacros, wEffects, wNoForward, wReorder, wComputedGoto,
|
||||
wInjectStmt, wDeprecated, wExperimental, wThis}
|
||||
wInjectStmt, wDeprecated, wExperimental, wThis, wUsed}
|
||||
lambdaPragmas* = {FirstCallConv..LastCallConv, wImportc, wExportc, wNodecl,
|
||||
wNoSideEffect, wSideEffect, wNoreturn, wDynlib, wHeader,
|
||||
wDeprecated, wExtern, wThread, wImportCpp, wImportObjC, wAsmNoStackFrame,
|
||||
@@ -345,7 +345,7 @@ proc pragmaToOptions(w: TSpecialWord): TOptions {.inline.} =
|
||||
of wLineDir: {optLineDir}
|
||||
of wStackTrace: {optStackTrace}
|
||||
of wLineTrace: {optLineTrace}
|
||||
of wDebugger: {optEndb}
|
||||
of wDebugger: {optNone}
|
||||
of wProfiler: {optProfiler, optMemTracker}
|
||||
of wMemTracker: {optMemTracker}
|
||||
of wByRef: {optByRef}
|
||||
@@ -513,15 +513,6 @@ proc processLink(c: PContext, n: PNode) =
|
||||
extccomp.addExternalFileToLink(c.config, found)
|
||||
recordPragma(c, n, "link", found.string)
|
||||
|
||||
proc pragmaBreakpoint(c: PContext, n: PNode) =
|
||||
discard getOptionalStr(c, n, "")
|
||||
|
||||
proc pragmaWatchpoint(c: PContext, n: PNode) =
|
||||
if n.kind in nkPragmaCallKinds and n.len == 2:
|
||||
n.sons[1] = c.semExpr(c, n.sons[1])
|
||||
else:
|
||||
invalidPragma(c, n)
|
||||
|
||||
proc semAsmOrEmit*(con: PContext, n: PNode, marker: char): PNode =
|
||||
case n.sons[1].kind
|
||||
of nkStrLit, nkRStrLit, nkTripleStrLit:
|
||||
@@ -816,12 +807,12 @@ proc singlePragma(c: PContext, sym: PSym, n: PNode, i: var int,
|
||||
if sym.typ == nil: invalidPragma(c, it)
|
||||
var size = expectIntLit(c, it)
|
||||
case size
|
||||
of 1, 2, 4, 8:
|
||||
of 1, 2, 4:
|
||||
sym.typ.size = size
|
||||
if size == 8 and c.config.target.targetCPU == cpuI386:
|
||||
sym.typ.align = 4
|
||||
else:
|
||||
sym.typ.align = int16(size)
|
||||
sym.typ.align = int16 size
|
||||
of 8:
|
||||
sym.typ.size = 8
|
||||
sym.typ.align = floatInt64Align(c.config)
|
||||
else:
|
||||
localError(c.config, it.info, "size may only be 1, 2, 4 or 8")
|
||||
of wNodecl:
|
||||
@@ -996,8 +987,6 @@ proc singlePragma(c: PContext, sym: PSym, n: PNode, i: var int,
|
||||
let s = expectStrLit(c, it)
|
||||
extccomp.addCompileOption(c.config, s)
|
||||
recordPragma(c, it, "passc", s)
|
||||
of wBreakpoint: pragmaBreakpoint(c, it)
|
||||
of wWatchPoint: pragmaWatchpoint(c, it)
|
||||
of wPush:
|
||||
processPush(c, n, i + 1)
|
||||
result = true
|
||||
|
||||
@@ -9,6 +9,11 @@
|
||||
|
||||
# This module implements the renderer of the standard Nim representation.
|
||||
|
||||
when defined(nimHasUsed):
|
||||
# 'import renderer' is so useful for debugging
|
||||
# that Nim shouldn't produce a warning for that:
|
||||
{.used.}
|
||||
|
||||
import
|
||||
lexer, options, idents, strutils, ast, msgs, lineinfos
|
||||
|
||||
|
||||
@@ -77,14 +77,12 @@ template semIdeForTemplateOrGeneric(c: PContext; n: PNode;
|
||||
discard safeSemExpr(c, n)
|
||||
|
||||
proc fitNodePostMatch(c: PContext, formal: PType, arg: PNode): PNode =
|
||||
result = arg
|
||||
let x = result.skipConv
|
||||
let x = arg.skipConv
|
||||
if x.kind in {nkPar, nkTupleConstr, nkCurly} and formal.kind != tyUntyped:
|
||||
changeType(c, x, formal, check=true)
|
||||
else:
|
||||
result = skipHiddenSubConv(result)
|
||||
#result.typ = takeType(formal, arg.typ)
|
||||
#echo arg.info, " picked ", result.typ.typeToString
|
||||
result = arg
|
||||
result = skipHiddenSubConv(result)
|
||||
|
||||
|
||||
proc fitNode(c: PContext, formal: PType, arg: PNode; info: TLineInfo): PNode =
|
||||
if arg.typ.isNil:
|
||||
@@ -405,7 +403,13 @@ proc semAfterMacroCall(c: PContext, call, macroResult: PNode,
|
||||
if s.typ.sons[0] == nil:
|
||||
result = semStmt(c, result, flags)
|
||||
else:
|
||||
case s.typ.sons[0].kind
|
||||
var retType = s.typ.sons[0]
|
||||
if retType.kind == tyTypeDesc and tfUnresolved in retType.flags and
|
||||
retType.len == 1:
|
||||
# bug #11941: template fails(T: type X, v: auto): T
|
||||
# does not mean we expect a tyTypeDesc.
|
||||
retType = retType[0]
|
||||
case retType.kind
|
||||
of tyUntyped:
|
||||
# Not expecting a type here allows templates like in ``tmodulealias.in``.
|
||||
result = semExpr(c, result, flags)
|
||||
@@ -423,7 +427,6 @@ proc semAfterMacroCall(c: PContext, call, macroResult: PNode,
|
||||
result.typ = makeTypeDesc(c, typ)
|
||||
#result = symNodeFromType(c, typ, n.info)
|
||||
else:
|
||||
var retType = s.typ.sons[0]
|
||||
if s.ast[genericParamsPos] != nil and retType.isMetaType:
|
||||
# The return type may depend on the Macro arguments
|
||||
# e.g. template foo(T: typedesc): seq[T]
|
||||
@@ -444,6 +447,7 @@ proc semAfterMacroCall(c: PContext, call, macroResult: PNode,
|
||||
|
||||
const
|
||||
errMissingGenericParamsForTemplate = "'$1' has unspecified generic parameters"
|
||||
errFloatToString = "cannot convert '$1' to '$2'"
|
||||
|
||||
proc semMacroExpr(c: PContext, n, nOrig: PNode, sym: PSym,
|
||||
flags: TExprFlags = {}): PNode =
|
||||
@@ -614,7 +618,8 @@ proc myProcess(context: PPassContext, n: PNode): PNode =
|
||||
|
||||
proc reportUnusedModules(c: PContext) =
|
||||
for i in 0..high(c.unusedImports):
|
||||
message(c.config, c.unusedImports[i][1], warnUnusedImportX, c.unusedImports[i][0].name.s)
|
||||
if sfUsed notin c.unusedImports[i][0].flags:
|
||||
message(c.config, c.unusedImports[i][1], warnUnusedImportX, c.unusedImports[i][0].name.s)
|
||||
|
||||
proc myClose(graph: ModuleGraph; context: PPassContext, n: PNode): PNode =
|
||||
var c = PContext(context)
|
||||
|
||||
@@ -176,6 +176,8 @@ proc presentFailedCandidates(c: PContext, n: PNode, errors: CandidateErrors):
|
||||
filterOnlyFirst = true
|
||||
break
|
||||
|
||||
var maybeWrongSpace = false
|
||||
|
||||
var candidates = ""
|
||||
var skipped = 0
|
||||
for err in errors:
|
||||
@@ -218,11 +220,17 @@ proc presentFailedCandidates(c: PContext, n: PNode, errors: CandidateErrors):
|
||||
if got != nil: effectProblem(wanted, got, candidates)
|
||||
of kUnknown: discard "do not break 'nim check'"
|
||||
candidates.add "\n"
|
||||
if err.firstMismatch.arg == 1 and nArg.kind == nkTupleConstr and
|
||||
n.kind == nkCommand:
|
||||
maybeWrongSpace = true
|
||||
for diag in err.diagnostics:
|
||||
candidates.add(diag & "\n")
|
||||
if skipped > 0:
|
||||
candidates.add($skipped & " other mismatching symbols have been " &
|
||||
"suppressed; compile with --showAllMismatches:on to see them\n")
|
||||
if maybeWrongSpace:
|
||||
candidates.add("maybe misplaced space between " & renderTree(n[0]) & " and '(' \n")
|
||||
|
||||
result = (prefer, candidates)
|
||||
|
||||
const
|
||||
|
||||
@@ -30,7 +30,7 @@ proc semTemplateExpr(c: PContext, n: PNode, s: PSym,
|
||||
# Note: This is n.info on purpose. It prevents template from creating an info
|
||||
# context when called from an another template
|
||||
pushInfoContext(c.config, n.info, s.detailedInfo)
|
||||
result = evalTemplate(n, s, getCurrOwner(c), c.config, efFromHlo in flags)
|
||||
result = evalTemplate(n, s, getCurrOwner(c), c.config, c.cache, efFromHlo in flags)
|
||||
if efNoSemCheck notin flags: result = semAfterMacroCall(c, n, result, s, flags)
|
||||
popInfoContext(c.config)
|
||||
|
||||
@@ -513,6 +513,9 @@ proc changeType(c: PContext; n: PNode, newType: PType, check: bool) =
|
||||
if value < firstOrd(c.config, newType) or value > lastOrd(c.config, newType):
|
||||
localError(c.config, n.info, "cannot convert " & $value &
|
||||
" to " & typeToString(newType))
|
||||
of nkFloatLit..nkFloat64Lit:
|
||||
if check and not floatRangeCheck(n.floatVal, newType):
|
||||
localError(c.config, n.info, errFloatToString % [$n.floatVal, typeToString(newType)])
|
||||
else: discard
|
||||
n.typ = newType
|
||||
|
||||
@@ -1236,6 +1239,7 @@ proc semSym(c: PContext, n: PNode, sym: PSym, flags: TExprFlags): PNode =
|
||||
result = newSymNode(s, n.info)
|
||||
else:
|
||||
let info = getCallLineInfo(n)
|
||||
#if efInCall notin flags:
|
||||
markUsed(c, info, s)
|
||||
onUse(info, s)
|
||||
result = newSymNode(s, info)
|
||||
@@ -1777,21 +1781,21 @@ proc semYieldVarResult(c: PContext, n: PNode, restype: PType) =
|
||||
var t = skipTypes(restype, {tyGenericInst, tyAlias, tySink})
|
||||
case t.kind
|
||||
of tyVar, tyLent:
|
||||
if t.kind == tyVar: t.flags.incl tfVarIsPtr # bugfix for #4048, #4910, #6892
|
||||
t.flags.incl tfVarIsPtr # bugfix for #4048, #4910, #6892
|
||||
if n.sons[0].kind in {nkHiddenStdConv, nkHiddenSubConv}:
|
||||
n.sons[0] = n.sons[0].sons[1]
|
||||
n.sons[0] = takeImplicitAddr(c, n.sons[0], t.kind == tyLent)
|
||||
of tyTuple:
|
||||
for i in 0..<t.sonsLen:
|
||||
var e = skipTypes(t.sons[i], {tyGenericInst, tyAlias, tySink})
|
||||
let e = skipTypes(t.sons[i], {tyGenericInst, tyAlias, tySink})
|
||||
if e.kind in {tyVar, tyLent}:
|
||||
if e.kind == tyVar: e.flags.incl tfVarIsPtr # bugfix for #4048, #4910, #6892
|
||||
e.flags.incl tfVarIsPtr # bugfix for #4048, #4910, #6892
|
||||
if n.sons[0].kind in {nkPar, nkTupleConstr}:
|
||||
n.sons[0].sons[i] = takeImplicitAddr(c, n.sons[0].sons[i], e.kind == tyLent)
|
||||
elif n.sons[0].kind in {nkHiddenStdConv, nkHiddenSubConv} and
|
||||
n.sons[0].sons[1].kind in {nkPar, nkTupleConstr}:
|
||||
var a = n.sons[0].sons[1]
|
||||
a.sons[i] = takeImplicitAddr(c, a.sons[i], false)
|
||||
a.sons[i] = takeImplicitAddr(c, a.sons[i], e.kind == tyLent)
|
||||
else:
|
||||
localError(c.config, n.sons[0].info, errXExpected, "tuple constructor")
|
||||
else: discard
|
||||
@@ -2398,11 +2402,9 @@ proc semTupleFieldsConstr(c: PContext, n: PNode, flags: TExprFlags): PNode =
|
||||
typ.n = newNodeI(nkRecList, n.info) # nkIdentDefs
|
||||
var ids = initIntSet()
|
||||
for i in 0 ..< sonsLen(n):
|
||||
if n[i].kind != nkExprColonExpr or n[i][0].kind notin {nkSym, nkIdent}:
|
||||
if n[i].kind != nkExprColonExpr:
|
||||
illFormedAst(n.sons[i], c.config)
|
||||
var id: PIdent
|
||||
if n.sons[i].sons[0].kind == nkIdent: id = n.sons[i].sons[0].ident
|
||||
else: id = n.sons[i].sons[0].sym.name
|
||||
let id = considerQuotedIdent(c, n[i][0])
|
||||
if containsOrIncl(ids, id.id):
|
||||
localError(c.config, n.sons[i].info, errFieldInitTwice % id.s)
|
||||
n.sons[i].sons[1] = semExprWithType(c, n.sons[i].sons[1],
|
||||
|
||||
@@ -50,7 +50,10 @@ proc newIntNodeT*(intVal: Int128, n: PNode; g: ModuleGraph): PNode =
|
||||
result.info = n.info
|
||||
|
||||
proc newFloatNodeT*(floatVal: BiggestFloat, n: PNode; g: ModuleGraph): PNode =
|
||||
result = newFloatNode(nkFloatLit, floatVal)
|
||||
if n.typ.skipTypes(abstractInst).kind == tyFloat32:
|
||||
result = newFloatNode(nkFloat32Lit, floatVal)
|
||||
else:
|
||||
result = newFloatNode(nkFloatLit, floatVal)
|
||||
result.typ = n.typ
|
||||
result.info = n.info
|
||||
|
||||
@@ -176,7 +179,7 @@ proc evalOp(m: TMagic, n, a, b, c: PNode; g: ModuleGraph): PNode =
|
||||
of mOrd: result = newIntNodeT(getOrdValue(a), n, g)
|
||||
of mChr: result = newIntNodeT(getInt(a), n, g)
|
||||
of mUnaryMinusI, mUnaryMinusI64: result = foldUnarySub(getInt(a), n, g)
|
||||
of mUnaryMinusF64: result = newFloatNodeT(- getFloat(a), n, g)
|
||||
of mUnaryMinusF64: result = newFloatNodeT(-getFloat(a), n, g)
|
||||
of mNot: result = newIntNodeT(One - getInt(a), n, g)
|
||||
of mCard: result = newIntNodeT(nimsets.cardSet(g.config, a), n, g)
|
||||
of mBitnotI:
|
||||
@@ -193,10 +196,7 @@ proc evalOp(m: TMagic, n, a, b, c: PNode; g: ModuleGraph): PNode =
|
||||
else:
|
||||
result = newIntNodeT(toInt128(sonsLen(a)), n, g)
|
||||
of mUnaryPlusI, mUnaryPlusF64: result = a # throw `+` away
|
||||
of mToFloat, mToBiggestFloat:
|
||||
result = newFloatNodeT(toFloat64(getInt(a)), n, g)
|
||||
# XXX: Hides overflow/underflow
|
||||
of mToInt, mToBiggestInt: result = newIntNodeT(system.toInt(getFloat(a)), n, g)
|
||||
of mAbsF64: result = newFloatNodeT(abs(getFloat(a)), n, g)
|
||||
of mAbsI: result = foldAbs(getInt(a), n, g)
|
||||
of mUnaryLt: result = foldSub(getOrdValue(a), One, n, g)
|
||||
|
||||
@@ -141,6 +141,14 @@ proc evalTypeTrait(c: PContext; traitCall: PNode, operand: PType, context: PSym)
|
||||
return typeWithSonsResult(tyAnd, @[operand, operand2])
|
||||
of "not":
|
||||
return typeWithSonsResult(tyNot, @[operand])
|
||||
of "typeToString":
|
||||
var prefer = preferTypeName
|
||||
if traitCall.sons.len >= 2:
|
||||
let preferStr = traitCall.sons[2].strVal
|
||||
prefer = parseEnum[TPreferedDesc](preferStr)
|
||||
result = newStrNode(nkStrLit, operand.typeToString(prefer))
|
||||
result.typ = newType(tyString, context)
|
||||
result.info = traitCall.info
|
||||
of "name", "$":
|
||||
result = newStrNode(nkStrLit, operand.typeToString(preferTypeName))
|
||||
result.typ = newType(tyString, context)
|
||||
|
||||
@@ -292,6 +292,8 @@ proc fitRemoveHiddenConv(c: PContext, typ: PType, n: PNode): PNode =
|
||||
result = newFloatNode(nkFloatLit, BiggestFloat r1.intVal)
|
||||
result.info = n.info
|
||||
result.typ = typ
|
||||
if not floatRangeCheck(result.floatVal, typ):
|
||||
localError(c.config, n.info, errFloatToString % [$result.floatVal, typeToString(typ)])
|
||||
else:
|
||||
changeType(c, r1, typ, check=true)
|
||||
result = r1
|
||||
@@ -676,25 +678,30 @@ proc semForVars(c: PContext, n: PNode; flags: TExprFlags): PNode =
|
||||
var length = sonsLen(n)
|
||||
let iterBase = n.sons[length-2].typ
|
||||
var iter = skipTypes(iterBase, {tyGenericInst, tyAlias, tySink})
|
||||
var iterAfterVarLent = iter.skipTypes({tyLent, tyVar})
|
||||
# length == 3 means that there is one for loop variable
|
||||
# and thus no tuple unpacking:
|
||||
if iter.kind != tyTuple or length == 3:
|
||||
if iterAfterVarLent.kind != tyTuple or length == 3:
|
||||
if length == 3:
|
||||
if n.sons[0].kind == nkVarTuple:
|
||||
var mutable = false
|
||||
if iter.kind == tyVar:
|
||||
iter = iter.skipTypes({tyVar})
|
||||
mutable = true
|
||||
if sonsLen(n[0])-1 != sonsLen(iter):
|
||||
if sonsLen(n[0])-1 != sonsLen(iterAfterVarLent):
|
||||
localError(c.config, n[0].info, errWrongNumberOfVariables)
|
||||
for i in 0 ..< sonsLen(n[0])-1:
|
||||
var v = symForVar(c, n[0][i])
|
||||
if getCurrOwner(c).kind == skModule: incl(v.flags, sfGlobal)
|
||||
if mutable:
|
||||
v.typ = newTypeS(tyVar, c)
|
||||
v.typ.sons.add iter[i]
|
||||
else:
|
||||
v.typ = iter.sons[i]
|
||||
case iter.kind
|
||||
of tyVar:
|
||||
v.typ = newTypeS(tyVar, c)
|
||||
v.typ.sons.add iterAfterVarLent[i]
|
||||
if tfVarIsPtr in iter.flags:
|
||||
v.typ.flags.incl tfVarIsPtr
|
||||
of tyLent:
|
||||
v.typ = newTypeS(tyLent, c)
|
||||
v.typ.sons.add iterAfterVarLent[i]
|
||||
if tfVarIsPtr in iter.flags:
|
||||
v.typ.flags.incl tfVarIsPtr
|
||||
else:
|
||||
v.typ = iter.sons[i]
|
||||
n.sons[0][i] = newSymNode(v)
|
||||
if sfGenSym notin v.flags: addDecl(c, v)
|
||||
elif v.owner == nil: v.owner = getCurrOwner(c)
|
||||
@@ -710,15 +717,22 @@ proc semForVars(c: PContext, n: PNode; flags: TExprFlags): PNode =
|
||||
elif v.owner == nil: v.owner = getCurrOwner(c)
|
||||
else:
|
||||
localError(c.config, n.info, errWrongNumberOfVariables)
|
||||
elif length-2 != sonsLen(iter):
|
||||
elif length-2 != sonsLen(iterAfterVarLent):
|
||||
localError(c.config, n.info, errWrongNumberOfVariables)
|
||||
else:
|
||||
for i in 0 .. length - 3:
|
||||
if n.sons[i].kind == nkVarTuple:
|
||||
var mutable = false
|
||||
if iter[i].kind == tyVar:
|
||||
iter[i] = iter[i].skipTypes({tyVar})
|
||||
mutable = true
|
||||
var isLent = false
|
||||
iter[i] = case iter[i].kind
|
||||
of tyVar:
|
||||
mutable = true
|
||||
iter[i].skipTypes({tyVar})
|
||||
of tyLent:
|
||||
isLent = true
|
||||
iter[i].skipTypes({tyLent})
|
||||
else: iter[i]
|
||||
|
||||
if sonsLen(n[i])-1 != sonsLen(iter[i]):
|
||||
localError(c.config, n[i].info, errWrongNumberOfVariables)
|
||||
for j in 0 ..< sonsLen(n[i])-1:
|
||||
@@ -727,6 +741,9 @@ proc semForVars(c: PContext, n: PNode; flags: TExprFlags): PNode =
|
||||
if mutable:
|
||||
v.typ = newTypeS(tyVar, c)
|
||||
v.typ.sons.add iter[i][j]
|
||||
elif isLent:
|
||||
v.typ = newTypeS(tyLent, c)
|
||||
v.typ.sons.add iter[i][j]
|
||||
else:
|
||||
v.typ = iter[i][j]
|
||||
n.sons[i][j] = newSymNode(v)
|
||||
@@ -735,7 +752,19 @@ proc semForVars(c: PContext, n: PNode; flags: TExprFlags): PNode =
|
||||
else:
|
||||
var v = symForVar(c, n.sons[i])
|
||||
if getCurrOwner(c).kind == skModule: incl(v.flags, sfGlobal)
|
||||
v.typ = iter.sons[i]
|
||||
case iter.kind
|
||||
of tyVar:
|
||||
v.typ = newTypeS(tyVar, c)
|
||||
v.typ.sons.add iterAfterVarLent[i]
|
||||
if tfVarIsPtr in iter.flags:
|
||||
v.typ.flags.incl tfVarIsPtr
|
||||
of tyLent:
|
||||
v.typ = newTypeS(tyLent, c)
|
||||
v.typ.sons.add iterAfterVarLent[i]
|
||||
if tfVarIsPtr in iter.flags:
|
||||
v.typ.flags.incl tfVarIsPtr
|
||||
else:
|
||||
v.typ = iter.sons[i]
|
||||
n.sons[i] = newSymNode(v)
|
||||
if sfGenSym notin v.flags:
|
||||
if not isDiscardUnderscore(v): addDecl(c, v)
|
||||
@@ -1593,7 +1622,9 @@ proc semOverride(c: PContext, s: PSym, n: PNode) =
|
||||
else: break
|
||||
if obj.kind in {tyObject, tyDistinct, tySequence, tyString}:
|
||||
obj = canonType(c, obj)
|
||||
if obj.destructor.isNil:
|
||||
if obj.attachedOps[attachedDestructor] == s:
|
||||
discard "forward declared destructor"
|
||||
elif obj.destructor.isNil and tfCheckedForDestructor notin obj.flags:
|
||||
obj.attachedOps[attachedDestructor] = s
|
||||
else:
|
||||
prevDestructor(c, obj.destructor, obj, n.info)
|
||||
@@ -1658,7 +1689,9 @@ proc semOverride(c: PContext, s: PSym, n: PNode) =
|
||||
obj = canonType(c, obj)
|
||||
#echo "ATTACHING TO ", obj.id, " ", s.name.s, " ", cast[int](obj)
|
||||
let k = if name == "=": attachedAsgn else: attachedSink
|
||||
if obj.attachedOps[k].isNil:
|
||||
if obj.attachedOps[k] == s:
|
||||
discard "forward declared op"
|
||||
elif obj.attachedOps[k].isNil and tfCheckedForDestructor notin obj.flags:
|
||||
obj.attachedOps[k] = s
|
||||
else:
|
||||
prevDestructor(c, obj.attachedOps[k], obj, n.info)
|
||||
@@ -1878,7 +1911,7 @@ proc semProcAux(c: PContext, n: PNode, kind: TSymKind,
|
||||
else:
|
||||
pushProcCon(c, s)
|
||||
if n.sons[genericParamsPos].kind == nkEmpty or usePseudoGenerics:
|
||||
if not usePseudoGenerics: paramsTypeCheck(c, s.typ)
|
||||
if not usePseudoGenerics and s.magic == mNone: paramsTypeCheck(c, s.typ)
|
||||
|
||||
c.p.wasForwarded = proto != nil
|
||||
maybeAddResult(c, s, n)
|
||||
@@ -1904,6 +1937,10 @@ proc semProcAux(c: PContext, n: PNode, kind: TSymKind,
|
||||
discard
|
||||
popProcCon(c)
|
||||
else:
|
||||
if s.kind in {skProc, skFunc} and s.typ[0] != nil and s.typ[0].kind == tyUntyped:
|
||||
# `auto` is represented as `tyUntyped` at this point in compilation.
|
||||
localError(c.config, n[paramsPos][0].info, "return type 'auto' cannot be used in forward declarations")
|
||||
|
||||
if s.kind == skMethod: semMethodPrototype(c, s, n)
|
||||
if proto != nil: localError(c.config, n.info, errImplOfXexpected % proto.name.s)
|
||||
if {sfImportc, sfBorrow, sfError} * s.flags == {} and s.magic == mNone:
|
||||
|
||||
@@ -47,7 +47,8 @@ type
|
||||
TSymChoiceRule = enum
|
||||
scClosed, scOpen, scForceOpen
|
||||
|
||||
proc symChoice(c: PContext, n: PNode, s: PSym, r: TSymChoiceRule): PNode =
|
||||
proc symChoice(c: PContext, n: PNode, s: PSym, r: TSymChoiceRule;
|
||||
isField = false): PNode =
|
||||
var
|
||||
a: PSym
|
||||
o: TOverloadIter
|
||||
@@ -63,9 +64,12 @@ proc symChoice(c: PContext, n: PNode, s: PSym, r: TSymChoiceRule): PNode =
|
||||
# XXX this makes more sense but breaks bootstrapping for now:
|
||||
# (s.kind notin routineKinds or s.magic != mNone):
|
||||
# for instance 'nextTry' is both in tables.nim and astalgo.nim ...
|
||||
result = newSymNode(s, info)
|
||||
markUsed(c, info, s)
|
||||
onUse(info, s)
|
||||
if not isField or sfGenSym notin s.flags:
|
||||
result = newSymNode(s, info)
|
||||
markUsed(c, info, s)
|
||||
onUse(info, s)
|
||||
else:
|
||||
result = n
|
||||
else:
|
||||
# semantic checking requires a type; ``fitNode`` deals with it
|
||||
# appropriately
|
||||
@@ -74,7 +78,7 @@ proc symChoice(c: PContext, n: PNode, s: PSym, r: TSymChoiceRule): PNode =
|
||||
result = newNodeIT(kind, info, newTypeS(tyNone, c))
|
||||
a = initOverloadIter(o, c, n)
|
||||
while a != nil:
|
||||
if a.kind != skModule:
|
||||
if a.kind != skModule and (not isField or sfGenSym notin s.flags):
|
||||
incl(a.flags, sfUsed)
|
||||
addSon(result, newSymNode(a, info))
|
||||
onUse(info, a)
|
||||
@@ -119,6 +123,7 @@ type
|
||||
owner: PSym
|
||||
cursorInBody: bool # only for nimsuggest
|
||||
scopeN: int
|
||||
noGenSym: int
|
||||
|
||||
template withBracketExpr(ctx, x, body: untyped) =
|
||||
body
|
||||
@@ -228,7 +233,7 @@ proc addLocalDecl(c: var TemplCtx, n: var PNode, k: TSymKind) =
|
||||
else:
|
||||
replaceIdentBySym(c.c, n, ident)
|
||||
|
||||
proc semTemplSymbol(c: PContext, n: PNode, s: PSym): PNode =
|
||||
proc semTemplSymbol(c: PContext, n: PNode, s: PSym; isField: bool): PNode =
|
||||
incl(s.flags, sfUsed)
|
||||
# we do not call onUse here, as the identifier is not really
|
||||
# resolved here. We will fixup the used identifiers later.
|
||||
@@ -237,15 +242,18 @@ proc semTemplSymbol(c: PContext, n: PNode, s: PSym): PNode =
|
||||
# Introduced in this pass! Leave it as an identifier.
|
||||
result = n
|
||||
of OverloadableSyms:
|
||||
result = symChoice(c, n, s, scOpen)
|
||||
result = symChoice(c, n, s, scOpen, isField)
|
||||
of skGenericParam:
|
||||
result = newSymNodeTypeDesc(s, n.info)
|
||||
if isField: result = n
|
||||
else: result = newSymNodeTypeDesc(s, n.info)
|
||||
of skParam:
|
||||
result = n
|
||||
of skType:
|
||||
result = newSymNodeTypeDesc(s, n.info)
|
||||
if isField: result = n
|
||||
else: result = newSymNodeTypeDesc(s, n.info)
|
||||
else:
|
||||
result = newSymNode(s, n.info)
|
||||
if isField: result = n
|
||||
else: result = newSymNode(s, n.info)
|
||||
|
||||
proc semRoutineInTemplName(c: var TemplCtx, n: PNode): PNode =
|
||||
result = n
|
||||
@@ -322,22 +330,23 @@ proc semTemplBody(c: var TemplCtx, n: PNode): PNode =
|
||||
if n.ident.id in c.toInject: return n
|
||||
let s = qualifiedLookUp(c.c, n, {})
|
||||
if s != nil:
|
||||
if s.owner == c.owner and s.kind == skParam:
|
||||
if s.owner == c.owner and s.kind == skParam and
|
||||
(sfGenSym notin s.flags or c.noGenSym == 0):
|
||||
incl(s.flags, sfUsed)
|
||||
result = newSymNode(s, n.info)
|
||||
onUse(n.info, s)
|
||||
elif contains(c.toBind, s.id):
|
||||
result = symChoice(c.c, n, s, scClosed)
|
||||
result = symChoice(c.c, n, s, scClosed, c.noGenSym > 0)
|
||||
elif contains(c.toMixin, s.name.id):
|
||||
result = symChoice(c.c, n, s, scForceOpen)
|
||||
elif s.owner == c.owner and sfGenSym in s.flags:
|
||||
result = symChoice(c.c, n, s, scForceOpen, c.noGenSym > 0)
|
||||
elif s.owner == c.owner and sfGenSym in s.flags and c.noGenSym == 0:
|
||||
# template tmp[T](x: var seq[T]) =
|
||||
# var yz: T
|
||||
incl(s.flags, sfUsed)
|
||||
result = newSymNode(s, n.info)
|
||||
onUse(n.info, s)
|
||||
else:
|
||||
result = semTemplSymbol(c.c, n, s)
|
||||
result = semTemplSymbol(c.c, n, s, c.noGenSym > 0)
|
||||
of nkBind:
|
||||
result = semTemplBody(c, n.sons[0])
|
||||
of nkBindStmt:
|
||||
@@ -524,12 +533,27 @@ proc semTemplBody(c: var TemplCtx, n: PNode): PNode =
|
||||
onUse(n.info, s)
|
||||
return newSymNode(s, n.info)
|
||||
elif contains(c.toBind, s.id):
|
||||
return symChoice(c.c, n, s, scClosed)
|
||||
return symChoice(c.c, n, s, scClosed, c.noGenSym > 0)
|
||||
elif contains(c.toMixin, s.name.id):
|
||||
return symChoice(c.c, n, s, scForceOpen)
|
||||
return symChoice(c.c, n, s, scForceOpen, c.noGenSym > 0)
|
||||
else:
|
||||
return symChoice(c.c, n, s, scOpen)
|
||||
result = semTemplBodySons(c, n)
|
||||
return symChoice(c.c, n, s, scOpen, c.noGenSym > 0)
|
||||
if n.kind == nkDotExpr:
|
||||
result = n
|
||||
result.sons[0] = semTemplBody(c, n.sons[0])
|
||||
inc c.noGenSym
|
||||
result.sons[1] = semTemplBody(c, n.sons[1])
|
||||
dec c.noGenSym
|
||||
else:
|
||||
result = semTemplBodySons(c, n)
|
||||
of nkExprColonExpr, nkExprEqExpr:
|
||||
if n.len == 2:
|
||||
inc c.noGenSym
|
||||
result.sons[0] = semTemplBody(c, n.sons[0])
|
||||
dec c.noGenSym
|
||||
result.sons[1] = semTemplBody(c, n.sons[1])
|
||||
else:
|
||||
result = semTemplBodySons(c, n)
|
||||
else:
|
||||
result = semTemplBodySons(c, n)
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
# this module does the semantic checking of type declarations
|
||||
# included from sem.nim
|
||||
|
||||
import math
|
||||
|
||||
const
|
||||
errStringOrIdentNodeExpected = "string or ident node expected"
|
||||
errStringLiteralExpected = "string literal expected"
|
||||
@@ -236,6 +238,10 @@ proc semRangeAux(c: PContext, n: PNode, prev: PType): PType =
|
||||
else:
|
||||
result.n.addSon semConstExpr(c, range[i])
|
||||
|
||||
if (result.n[0].kind in {nkFloatLit..nkFloat64Lit} and classify(result.n[0].floatVal) == fcNan) or
|
||||
(result.n[1].kind in {nkFloatLit..nkFloat64Lit} and classify(result.n[1].floatVal) == fcNan):
|
||||
localError(c.config, n.info, "NaN is not a valid start or end for a range")
|
||||
|
||||
if weakLeValue(result.n[0], result.n[1]) == impNo:
|
||||
localError(c.config, n.info, "range is empty")
|
||||
|
||||
@@ -899,63 +905,59 @@ template shouldHaveMeta(t) =
|
||||
internalAssert c.config, tfHasMeta in t.flags
|
||||
# result.lastSon.flags.incl tfHasMeta
|
||||
|
||||
proc addImplicitGeneric(c: PContext; typeClass: PType, typId: PIdent;
|
||||
info: TLineInfo; genericParams: PNode;
|
||||
paramName: string): PType =
|
||||
if genericParams == nil:
|
||||
# This happens with anonymous proc types appearing in signatures
|
||||
# XXX: we need to lift these earlier
|
||||
return
|
||||
let finalTypId = if typId != nil: typId
|
||||
else: getIdent(c.cache, paramName & ":type")
|
||||
# is this a bindOnce type class already present in the param list?
|
||||
for i in 0 ..< genericParams.len:
|
||||
if genericParams.sons[i].sym.name.id == finalTypId.id:
|
||||
return genericParams.sons[i].typ
|
||||
|
||||
let owner = if typeClass.sym != nil: typeClass.sym
|
||||
else: getCurrOwner(c)
|
||||
var s = newSym(skType, finalTypId, owner, info)
|
||||
if sfExplain in owner.flags: s.flags.incl sfExplain
|
||||
if typId == nil: s.flags.incl(sfAnon)
|
||||
s.linkTo(typeClass)
|
||||
typeClass.flags.incl tfImplicitTypeParam
|
||||
s.position = genericParams.len
|
||||
genericParams.addSon(newSymNode(s))
|
||||
result = typeClass
|
||||
addDecl(c, s)
|
||||
|
||||
proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
paramType: PType, paramName: string,
|
||||
info: TLineInfo, anon = false): PType =
|
||||
if paramType == nil: return # (e.g. proc return type)
|
||||
|
||||
proc addImplicitGenericImpl(c: PContext; typeClass: PType, typId: PIdent): PType =
|
||||
if genericParams == nil:
|
||||
# This happens with anonymous proc types appearing in signatures
|
||||
# XXX: we need to lift these earlier
|
||||
return
|
||||
let finalTypId = if typId != nil: typId
|
||||
else: getIdent(c.cache, paramName & ":type")
|
||||
# is this a bindOnce type class already present in the param list?
|
||||
for i in 0 ..< genericParams.len:
|
||||
if genericParams.sons[i].sym.name.id == finalTypId.id:
|
||||
return genericParams.sons[i].typ
|
||||
|
||||
let owner = if typeClass.sym != nil: typeClass.sym
|
||||
else: getCurrOwner(c)
|
||||
var s = newSym(skType, finalTypId, owner, info)
|
||||
if sfExplain in owner.flags: s.flags.incl sfExplain
|
||||
if typId == nil: s.flags.incl(sfAnon)
|
||||
s.linkTo(typeClass)
|
||||
typeClass.flags.incl tfImplicitTypeParam
|
||||
s.position = genericParams.len
|
||||
genericParams.addSon(newSymNode(s))
|
||||
result = typeClass
|
||||
addDecl(c, s)
|
||||
|
||||
# XXX: There are codegen errors if this is turned into a nested proc
|
||||
template liftingWalk(typ: PType, anonFlag = false): untyped =
|
||||
template recurse(typ: PType, anonFlag = false): untyped =
|
||||
liftParamType(c, procKind, genericParams, typ, paramName, info, anonFlag)
|
||||
#proc liftingWalk(paramType: PType, anon = false): PType =
|
||||
|
||||
var paramTypId = if not anon and paramType.sym != nil: paramType.sym.name
|
||||
else: nil
|
||||
|
||||
template maybeLift(typ: PType): untyped =
|
||||
let lifted = liftingWalk(typ)
|
||||
(if lifted != nil: lifted else: typ)
|
||||
|
||||
template addImplicitGeneric(e): untyped =
|
||||
addImplicitGenericImpl(c, e, paramTypId)
|
||||
|
||||
case paramType.kind:
|
||||
of tyAnything:
|
||||
result = addImplicitGenericImpl(c, newTypeS(tyGenericParam, c), nil)
|
||||
result = addImplicitGeneric(c, newTypeS(tyGenericParam, c), nil, info, genericParams, paramName)
|
||||
|
||||
of tyStatic:
|
||||
if paramType.base.kind != tyNone and paramType.n != nil:
|
||||
# this is a concrete static value
|
||||
return
|
||||
if tfUnresolved in paramType.flags: return # already lifted
|
||||
let base = paramType.base.maybeLift
|
||||
|
||||
let lifted = recurse(paramType.base)
|
||||
let base = (if lifted != nil: lifted else: paramType.base)
|
||||
if base.isMetaType and procKind == skMacro:
|
||||
localError(c.config, info, errMacroBodyDependsOnGenericTypes % paramName)
|
||||
result = addImplicitGeneric(c.newTypeWithSons(tyStatic, @[base]))
|
||||
result = addImplicitGeneric(c, c.newTypeWithSons(tyStatic, @[base]),
|
||||
paramTypId, info, genericParams, paramName)
|
||||
if result != nil: result.flags.incl({tfHasStatic, tfUnresolved})
|
||||
|
||||
of tyTypeDesc:
|
||||
@@ -968,15 +970,15 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
paramTypId = nil
|
||||
let t = c.newTypeWithSons(tyTypeDesc, @[paramType.base])
|
||||
incl t.flags, tfCheckedForDestructor
|
||||
result = addImplicitGeneric(t)
|
||||
result = addImplicitGeneric(c, t, paramTypId, info, genericParams, paramName)
|
||||
|
||||
of tyDistinct:
|
||||
if paramType.sonsLen == 1:
|
||||
# disable the bindOnce behavior for the type class
|
||||
result = liftingWalk(paramType.base, true)
|
||||
result = recurse(paramType.base, true)
|
||||
|
||||
of tyAlias, tyOwned:
|
||||
result = liftingWalk(paramType.base)
|
||||
result = recurse(paramType.base)
|
||||
|
||||
of tySequence, tySet, tyArray, tyOpenArray,
|
||||
tyVar, tyLent, tyPtr, tyRef, tyProc:
|
||||
@@ -989,12 +991,12 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
if paramType.kind == tySequence and paramType.lastSon.kind == tyNone:
|
||||
let typ = c.newTypeWithSons(tyBuiltInTypeClass,
|
||||
@[newTypeS(paramType.kind, c)])
|
||||
result = addImplicitGeneric(typ)
|
||||
result = addImplicitGeneric(c, typ, paramTypId, info, genericParams, paramName)
|
||||
else:
|
||||
for i in 0 ..< paramType.len:
|
||||
if paramType.sons[i] == paramType:
|
||||
globalError(c.config, info, errIllegalRecursionInTypeX % typeToString(paramType))
|
||||
var lifted = liftingWalk(paramType.sons[i])
|
||||
var lifted = recurse(paramType.sons[i])
|
||||
if lifted != nil:
|
||||
paramType.sons[i] = lifted
|
||||
result = paramType
|
||||
@@ -1014,29 +1016,29 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
if paramType.lastSon.kind == tyUserTypeClass:
|
||||
result.kind = tyUserTypeClassInst
|
||||
result.rawAddSon paramType.lastSon
|
||||
return addImplicitGeneric(result)
|
||||
return addImplicitGeneric(c, result, paramTypId, info, genericParams, paramName)
|
||||
|
||||
let x = instGenericContainer(c, paramType.sym.info, result,
|
||||
allowMetaTypes = true)
|
||||
result = newTypeWithSons(c, tyCompositeTypeClass, @[paramType, x])
|
||||
#result = newTypeS(tyCompositeTypeClass, c)
|
||||
#for i in 0..<x.len: result.rawAddSon(x.sons[i])
|
||||
result = addImplicitGeneric(result)
|
||||
result = addImplicitGeneric(c, result, paramTypId, info, genericParams, paramName)
|
||||
|
||||
of tyGenericInst:
|
||||
if paramType.lastSon.kind == tyUserTypeClass:
|
||||
var cp = copyType(paramType, getCurrOwner(c), false)
|
||||
cp.kind = tyUserTypeClassInst
|
||||
return addImplicitGeneric(cp)
|
||||
return addImplicitGeneric(c, cp, paramTypId, info, genericParams, paramName)
|
||||
|
||||
for i in 1 .. paramType.len-2:
|
||||
var lifted = liftingWalk(paramType.sons[i])
|
||||
var lifted = recurse(paramType.sons[i])
|
||||
if lifted != nil:
|
||||
paramType.sons[i] = lifted
|
||||
result = paramType
|
||||
result.lastSon.shouldHaveMeta
|
||||
|
||||
let liftBody = liftingWalk(paramType.lastSon, true)
|
||||
let liftBody = recurse(paramType.lastSon, true)
|
||||
if liftBody != nil:
|
||||
result = liftBody
|
||||
result.flags.incl tfHasMeta
|
||||
@@ -1044,7 +1046,8 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
|
||||
of tyGenericInvocation:
|
||||
for i in 1 ..< paramType.len:
|
||||
let lifted = liftingWalk(paramType.sons[i])
|
||||
#if paramType[i].kind != tyTypeDesc:
|
||||
let lifted = recurse(paramType.sons[i])
|
||||
if lifted != nil: paramType.sons[i] = lifted
|
||||
|
||||
let body = paramType.base
|
||||
@@ -1056,11 +1059,13 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
if body.lastSon.kind == tyUserTypeClass:
|
||||
let expanded = instGenericContainer(c, info, paramType,
|
||||
allowMetaTypes = true)
|
||||
result = liftingWalk(expanded, true)
|
||||
result = recurse(expanded, true)
|
||||
|
||||
of tyUserTypeClasses, tyBuiltInTypeClass, tyCompositeTypeClass,
|
||||
tyAnd, tyOr, tyNot:
|
||||
result = addImplicitGeneric(copyType(paramType, getCurrOwner(c), false))
|
||||
result = addImplicitGeneric(c,
|
||||
copyType(paramType, getCurrOwner(c), false), paramTypId,
|
||||
info, genericParams, paramName)
|
||||
|
||||
of tyGenericParam:
|
||||
markUsed(c, paramType.sym.info, paramType.sym)
|
||||
@@ -1071,8 +1076,6 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
|
||||
|
||||
else: discard
|
||||
|
||||
# result = liftingWalk(paramType)
|
||||
|
||||
proc semParamType(c: PContext, n: PNode, constraint: var PNode): PType =
|
||||
if n.kind == nkCurlyExpr:
|
||||
result = semTypeNode(c, n.sons[0], nil)
|
||||
@@ -1359,7 +1362,10 @@ proc semGeneric(c: PContext, n: PNode, s: PSym, prev: PType): PType =
|
||||
|
||||
for i in 1 ..< m.call.len:
|
||||
var typ = m.call[i].typ
|
||||
if typ.kind == tyTypeDesc and typ.sons[0].kind == tyNone:
|
||||
# is this a 'typedesc' *parameter*? If so, use the typedesc type,
|
||||
# unstripped.
|
||||
if m.call[i].kind == nkSym and m.call[i].sym.kind == skParam and
|
||||
typ.kind == tyTypeDesc and containsGenericType(typ):
|
||||
isConcrete = false
|
||||
addToResult(typ)
|
||||
else:
|
||||
@@ -1647,7 +1653,7 @@ proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
|
||||
of mSet: result = semSet(c, n, prev)
|
||||
of mOrdinal: result = semOrdinal(c, n, prev)
|
||||
of mSeq:
|
||||
if c.config.selectedGC == gcDestructors and optNimV2 notin c.config.globalOptions:
|
||||
if false: # c.config.selectedGC == gcDestructors and optNimV2 notin c.config.globalOptions:
|
||||
let s = c.graph.sysTypes[tySequence]
|
||||
assert s != nil
|
||||
assert prev == nil
|
||||
@@ -1793,7 +1799,7 @@ proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
|
||||
of nkStmtListType: result = semStmtListType(c, n, prev)
|
||||
of nkBlockType: result = semBlockType(c, n, prev)
|
||||
else:
|
||||
localError(c.config, n.info, errTypeExpected)
|
||||
localError(c.config, n.info, "type expected, but got: " & renderTree(n))
|
||||
result = newOrPrevType(tyError, prev, c)
|
||||
n.typ = result
|
||||
dec c.inTypeContext
|
||||
@@ -1813,13 +1819,8 @@ proc setMagicType(conf: ConfigRef; m: PSym, kind: TTypeKind, size: int) =
|
||||
|
||||
# FIXME: proper support for clongdouble should be added.
|
||||
# long double size can be 8, 10, 12, 16 bytes depending on platform & compiler
|
||||
if conf.target.targetCPU == cpuI386 and size == 8:
|
||||
#on Linux/BSD i386, double are aligned to 4bytes (except with -malign-double)
|
||||
if conf.target.targetOS != osWindows:
|
||||
if kind in {tyFloat64, tyFloat, tyInt, tyUInt, tyInt64, tyUInt64}:
|
||||
# on i386 for all known POSIX systems, 64bits ints are aligned
|
||||
# to 4bytes (except with -malign-double)
|
||||
m.typ.align = 4
|
||||
if kind in {tyFloat64, tyFloat, tyInt, tyUInt, tyInt64, tyUInt64} and size == 8:
|
||||
m.typ.align = int16(conf.floatInt64Align)
|
||||
|
||||
proc setMagicIntegral(conf: ConfigRef; m: PSym, kind: TTypeKind, size: int) =
|
||||
setMagicType(conf, m, kind, size)
|
||||
|
||||
@@ -260,12 +260,32 @@ proc replaceTypeVarsS(cl: var TReplTypeVars, s: PSym): PSym =
|
||||
|
||||
#result = PSym(idTableGet(cl.symMap, s))
|
||||
#if result == nil:
|
||||
#[
|
||||
|
||||
We cannot naively check for symbol recursions, because otherwise
|
||||
object types A, B whould share their fields!
|
||||
|
||||
import tables
|
||||
|
||||
type
|
||||
Table[S, T] = object
|
||||
x: S
|
||||
y: T
|
||||
|
||||
G[T] = object
|
||||
inodes: Table[int, T] # A
|
||||
rnodes: Table[T, int] # B
|
||||
|
||||
var g: G[string]
|
||||
|
||||
]#
|
||||
result = copySym(s)
|
||||
incl(result.flags, sfFromGeneric)
|
||||
#idTablePut(cl.symMap, s, result)
|
||||
result.owner = s.owner
|
||||
result.typ = replaceTypeVarsT(cl, s.typ)
|
||||
result.ast = replaceTypeVarsN(cl, s.ast)
|
||||
if result.kind != skType:
|
||||
result.ast = replaceTypeVarsN(cl, s.ast)
|
||||
|
||||
proc lookupTypeVar(cl: var TReplTypeVars, t: PType): PType =
|
||||
result = cl.typeMap.lookup(t)
|
||||
@@ -685,6 +705,10 @@ proc recomputeFieldPositions*(t: PType; obj: PNode; currPosition: var int) =
|
||||
|
||||
proc generateTypeInstance*(p: PContext, pt: TIdTable, info: TLineInfo,
|
||||
t: PType): PType =
|
||||
# Given `t` like Foo[T]
|
||||
# pt: Table with type mappings: T -> int
|
||||
# Desired result: Foo[int]
|
||||
# proc (x: T = 0); T -> int ----> proc (x: int = 0)
|
||||
var typeMap = initLayeredTypeMap(pt)
|
||||
var cl = initTypeVars(p, addr(typeMap), info, nil)
|
||||
pushInfoContext(p.config, info)
|
||||
|
||||
@@ -401,22 +401,25 @@ proc handleRange(f, a: PType, min, max: TTypeKind): TTypeRelation =
|
||||
else: result = isNone
|
||||
|
||||
proc isConvertibleToRange(f, a: PType): bool =
|
||||
# be less picky for tyRange, as that it is used for array indexing:
|
||||
if f.kind in {tyInt..tyInt64, tyUInt..tyUInt64} and
|
||||
a.kind in {tyInt..tyInt64, tyUInt..tyUInt64}:
|
||||
case f.kind
|
||||
of tyInt, tyInt64: result = true
|
||||
of tyInt8: result = a.kind in {tyInt8, tyInt}
|
||||
of tyInt16: result = a.kind in {tyInt8, tyInt16, tyInt}
|
||||
of tyInt32: result = a.kind in {tyInt8, tyInt16, tyInt32, tyInt}
|
||||
of tyUInt, tyUInt64: result = true
|
||||
of tyUInt8: result = a.kind in {tyUInt8, tyUInt}
|
||||
of tyUInt16: result = a.kind in {tyUInt8, tyUInt16, tyUInt}
|
||||
of tyUInt32: result = a.kind in {tyUInt8, tyUInt16, tyUInt32, tyUInt}
|
||||
of tyInt8: result = isIntLit(a) or a.kind in {tyInt8}
|
||||
of tyInt16: result = isIntLit(a) or a.kind in {tyInt8, tyInt16}
|
||||
of tyInt32: result = isIntLit(a) or a.kind in {tyInt8, tyInt16, tyInt32}
|
||||
# This is wrong, but seems like there's a lot of code that relies on it :(
|
||||
of tyInt: result = true
|
||||
of tyInt64: result = isIntLit(a) or a.kind in {tyInt8, tyInt16, tyInt32, tyInt, tyInt64}
|
||||
of tyUInt8: result = isIntLit(a) or a.kind in {tyUInt8}
|
||||
of tyUInt16: result = isIntLit(a) or a.kind in {tyUInt8, tyUInt16}
|
||||
of tyUInt32: result = isIntLit(a) or a.kind in {tyUInt8, tyUInt16, tyUInt32}
|
||||
of tyUInt: result = isIntLit(a) or a.kind in {tyUInt8, tyUInt16, tyUInt32, tyUInt}
|
||||
of tyUInt64: result = isIntLit(a) or a.kind in {tyUInt8, tyUInt16, tyUInt32, tyUInt, tyUInt64}
|
||||
else: result = false
|
||||
elif f.kind in {tyFloat..tyFloat128} and
|
||||
a.kind in {tyFloat..tyFloat128}:
|
||||
result = true
|
||||
elif f.kind in {tyFloat..tyFloat128}:
|
||||
# `isIntLit` is correct and should be used above as well, see PR:
|
||||
# https://github.com/nim-lang/Nim/pull/11197
|
||||
result = isIntLit(a) or a.kind in {tyFloat..tyFloat128}
|
||||
|
||||
proc handleFloatRange(f, a: PType): TTypeRelation =
|
||||
if a.kind == f.kind:
|
||||
@@ -873,7 +876,7 @@ proc inferStaticParam*(c: var TCandidate, lhs: PNode, rhs: BiggestInt): bool =
|
||||
of mUnaryMinusI:
|
||||
return inferStaticParam(c, lhs[1], -rhs)
|
||||
|
||||
of mUnaryPlusI, mToInt, mToBiggestInt:
|
||||
of mUnaryPlusI:
|
||||
return inferStaticParam(c, lhs[1], rhs)
|
||||
|
||||
else: discard
|
||||
@@ -1654,8 +1657,7 @@ proc typeRel(c: var TCandidate, f, aOrig: PType,
|
||||
of tyGenericParam:
|
||||
var x = PType(idTableGet(c.bindings, f))
|
||||
if x == nil:
|
||||
if c.callee.kind == tyGenericBody and
|
||||
f.kind == tyGenericParam and not c.typedescMatched:
|
||||
if c.callee.kind == tyGenericBody and not c.typedescMatched:
|
||||
# XXX: The fact that generic types currently use tyGenericParam for
|
||||
# their parameters is really a misnomer. tyGenericParam means "match
|
||||
# any value" and what we need is "match any type", which can be encoded
|
||||
@@ -1700,6 +1702,11 @@ proc typeRel(c: var TCandidate, f, aOrig: PType,
|
||||
c.inheritancePenalty = oldInheritancePenalty - c.inheritancePenalty -
|
||||
100 * ord(result == isEqual)
|
||||
result = isGeneric
|
||||
elif a.kind == tyTypeDesc:
|
||||
# somewhat special typing rule, the following is illegal:
|
||||
# proc p[T](x: T)
|
||||
# p(int)
|
||||
result = isNone
|
||||
else:
|
||||
result = isGeneric
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
#
|
||||
## code owner: Arne Döring
|
||||
## e-mail: arne.doering@gmx.net
|
||||
## included from types.nim
|
||||
|
||||
proc align(address, alignment: BiggestInt): BiggestInt =
|
||||
result = (address + (alignment - 1)) and not (alignment - 1)
|
||||
@@ -14,7 +15,6 @@ proc align(address, alignment: BiggestInt): BiggestInt =
|
||||
proc align(address, alignment: int): int =
|
||||
result = (address + (alignment - 1)) and not (alignment - 1)
|
||||
|
||||
|
||||
const
|
||||
## a size is concidered "unknown" when it is an imported type from C
|
||||
## or C++.
|
||||
@@ -39,6 +39,13 @@ proc inc(arg: var OffsetAccum; value: int) =
|
||||
else:
|
||||
arg.offset += value
|
||||
|
||||
proc alignmentMax(a,b: int): int =
|
||||
if unlikely(a == szIllegalRecursion or b == szIllegalRecursion): raiseIllegalTypeRecursion()
|
||||
if a == szUnknownSize or b == szUnknownSize:
|
||||
szUnknownSize
|
||||
else:
|
||||
max(a,b)
|
||||
|
||||
proc align(arg: var OffsetAccum; value: int) =
|
||||
if unlikely(value == szIllegalRecursion): raiseIllegalTypeRecursion()
|
||||
if value == szUnknownSize or arg.maxAlign == szUnknownSize or arg.offset == szUnknownSize:
|
||||
@@ -48,11 +55,22 @@ proc align(arg: var OffsetAccum; value: int) =
|
||||
arg.maxAlign = max(value, arg.maxAlign)
|
||||
arg.offset = align(arg.offset, value)
|
||||
|
||||
proc finish(arg: var OffsetAccum) =
|
||||
if arg.maxAlign == szUnknownSize or arg.offset == szUnknownSize:
|
||||
proc mergeBranch(arg: var OffsetAccum; value: OffsetAccum) =
|
||||
if value.maxAlign == szUnknownSize or arg.maxAlign == szUnknownSize or
|
||||
value.offset == szUnknownSize or arg.offset == szUnknownSize:
|
||||
arg.maxAlign = szUnknownSize
|
||||
arg.offset = szUnknownSize
|
||||
else:
|
||||
arg.offset = align(arg.offset, arg.maxAlign)
|
||||
arg.offset = max(arg.offset, value.offset)
|
||||
arg.maxAlign = max(arg.maxAlign, value.maxAlign)
|
||||
|
||||
proc finish(arg: var OffsetAccum): int =
|
||||
if arg.maxAlign == szUnknownSize or arg.offset == szUnknownSize:
|
||||
result = szUnknownSize
|
||||
arg.offset = szUnknownSize
|
||||
else:
|
||||
result = align(arg.offset, arg.maxAlign) - arg.offset
|
||||
arg.offset += result
|
||||
|
||||
proc computeSizeAlign(conf: ConfigRef; typ: PType)
|
||||
|
||||
@@ -93,154 +111,69 @@ proc setOffsetsToUnknown(n: PNode) =
|
||||
for i in 0 ..< safeLen(n):
|
||||
setOffsetsToUnknown(n[i])
|
||||
|
||||
proc computeObjectOffsetsFoldFunction(conf: ConfigRef; n: PNode,
|
||||
initialOffset: BiggestInt): tuple[offset, align: BiggestInt] =
|
||||
proc computeObjectOffsetsFoldFunction(conf: ConfigRef; n: PNode, packed: bool, accum: var OffsetAccum): void =
|
||||
## ``offset`` is the offset within the object, after the node has been written, no padding bytes added
|
||||
## ``align`` maximum alignment from all sub nodes
|
||||
assert n != nil
|
||||
if n.typ != nil and n.typ.size == szIllegalRecursion:
|
||||
result.offset = szIllegalRecursion
|
||||
result.align = szIllegalRecursion
|
||||
return
|
||||
|
||||
result.align = 1
|
||||
raiseIllegalTypeRecursion()
|
||||
case n.kind
|
||||
of nkRecCase:
|
||||
assert(n.sons[0].kind == nkSym)
|
||||
let (kindOffset, kindAlign) = computeObjectOffsetsFoldFunction(conf, n.sons[0], initialOffset)
|
||||
|
||||
var maxChildAlign: BiggestInt = if initialOffset == szUnknownSize: szUnknownSize else: 0
|
||||
for i in 1 ..< sonsLen(n):
|
||||
let child = n.sons[i]
|
||||
case child.kind
|
||||
of nkOfBranch, nkElse:
|
||||
# offset parameter cannot be known yet, it needs to know the alignment first
|
||||
let align = computeSubObjectAlign(conf, n.sons[i].lastSon)
|
||||
if align == szIllegalRecursion:
|
||||
result.offset = szIllegalRecursion
|
||||
result.align = szIllegalRecursion
|
||||
return
|
||||
if align == szUnknownSize or maxChildAlign == szUnknownSize:
|
||||
maxChildAlign = szUnknownSize
|
||||
computeObjectOffsetsFoldFunction(conf, n.sons[0], packed, accum)
|
||||
var maxChildAlign: int = if accum.offset == szUnknownSize: szUnknownSize else: 1
|
||||
if not packed:
|
||||
for i in 1 ..< sonsLen(n):
|
||||
let child = n.sons[i]
|
||||
case child.kind
|
||||
of nkOfBranch, nkElse:
|
||||
# offset parameter cannot be known yet, it needs to know the alignment first
|
||||
let align = int(computeSubObjectAlign(conf, n.sons[i].lastSon))
|
||||
maxChildAlign = alignmentMax(maxChildAlign, align)
|
||||
else:
|
||||
maxChildAlign = max(maxChildAlign, align)
|
||||
else:
|
||||
internalError(conf, "computeObjectOffsetsFoldFunction(record case branch)")
|
||||
internalError(conf, "computeObjectOffsetsFoldFunction(record case branch)")
|
||||
if maxChildAlign == szUnknownSize:
|
||||
setOffsetsToUnknown(n)
|
||||
result.align = szUnknownSize
|
||||
result.offset = szUnknownSize
|
||||
accum.offset = szUnknownSize
|
||||
accum.maxAlign = szUnknownSize
|
||||
else:
|
||||
# the union neds to be aligned first, before the offsets can be assigned
|
||||
let kindUnionOffset = align(kindOffset, maxChildAlign)
|
||||
var maxChildOffset: BiggestInt = 0
|
||||
accum.align(maxChildAlign)
|
||||
let accumRoot = accum # copy, because each branch should start af the same offset
|
||||
for i in 1 ..< sonsLen(n):
|
||||
let (offset, align) = computeObjectOffsetsFoldFunction(conf, n.sons[i].lastSon, kindUnionOffset)
|
||||
maxChildOffset = max(maxChildOffset, offset)
|
||||
result.align = max(kindAlign, maxChildAlign)
|
||||
result.offset = maxChildOffset
|
||||
var branchAccum = accumRoot
|
||||
computeObjectOffsetsFoldFunction(conf, n.sons[i].lastSon, packed, branchAccum)
|
||||
accum.mergeBranch(branchAccum)
|
||||
of nkRecList:
|
||||
result.align = 1 # maximum of all member alignments
|
||||
var offset = initialOffset
|
||||
for i, child in n.sons:
|
||||
let (newOffset, align) = computeObjectOffsetsFoldFunction(conf, child, offset)
|
||||
if newOffset == szIllegalRecursion:
|
||||
result.offset = szIllegalRecursion
|
||||
result.align = szIllegalRecursion
|
||||
return
|
||||
elif newOffset == szUnknownSize or offset == szUnknownSize:
|
||||
# if anything is unknown, the rest becomes unknown as well
|
||||
offset = szUnknownSize
|
||||
result.align = szUnknownSize
|
||||
else:
|
||||
offset = newOffset
|
||||
result.align = max(result.align, align)
|
||||
# final alignment
|
||||
if offset == szUnknownSize:
|
||||
result.offset = szUnknownSize
|
||||
else:
|
||||
result.offset = align(offset, result.align)
|
||||
computeObjectOffsetsFoldFunction(conf, child, packed, accum)
|
||||
of nkSym:
|
||||
var size = szUnknownSize
|
||||
var align = szUnknownSize
|
||||
if n.sym.bitsize == 0: # 0 represents bitsize not set
|
||||
computeSizeAlign(conf, n.sym.typ)
|
||||
size = n.sym.typ.size.int
|
||||
align = n.sym.typ.align.int
|
||||
|
||||
result.align = align
|
||||
if initialOffset == szUnknownSize or size == szUnknownSize or align == szUnknownSize:
|
||||
n.sym.offset = szUnknownSize
|
||||
result.offset = szUnknownSize
|
||||
else:
|
||||
n.sym.offset = align(initialOffset, align).int
|
||||
result.offset = n.sym.offset + n.sym.typ.size
|
||||
align = if packed: 1 else: n.sym.typ.align.int
|
||||
accum.align(align)
|
||||
n.sym.offset = accum.offset
|
||||
accum.inc(size)
|
||||
else:
|
||||
result.align = szUnknownSize
|
||||
result.offset = szUnknownSize
|
||||
accum.maxAlign = szUnknownSize
|
||||
accum.offset = szUnknownSize
|
||||
|
||||
proc computePackedObjectOffsetsFoldFunction(conf: ConfigRef; n: PNode, initialOffset: BiggestInt, debug: bool): BiggestInt =
|
||||
## ``result`` is the offset within the object, after the node has been written, no padding bytes added
|
||||
proc computeUnionObjectOffsetsFoldFunction(conf: ConfigRef; n: PNode; accum: var OffsetAccum) =
|
||||
## ``accum.offset`` will the offset from the larget member of the union.
|
||||
case n.kind
|
||||
of nkRecCase:
|
||||
assert(n.sons[0].kind == nkSym)
|
||||
let kindOffset = computePackedObjectOffsetsFoldFunction(conf, n.sons[0], initialOffset, debug)
|
||||
# the union neds to be aligned first, before the offsets can be assigned
|
||||
let kindUnionOffset = kindOffset
|
||||
var maxChildOffset: BiggestInt = kindUnionOffset
|
||||
for i in 1 ..< sonsLen(n):
|
||||
let offset = computePackedObjectOffsetsFoldFunction(conf, n.sons[i].lastSon, kindUnionOffset, debug)
|
||||
if offset == szIllegalRecursion:
|
||||
return szIllegalRecursion
|
||||
if offset == szUnknownSize or maxChildOffset == szUnknownSize:
|
||||
maxChildOffset = szUnknownSize
|
||||
else:
|
||||
maxChildOffset = max(maxChildOffset, offset)
|
||||
result = maxChildOffset
|
||||
of nkRecList:
|
||||
result = initialOffset
|
||||
for i, child in n.sons:
|
||||
result = computePackedObjectOffsetsFoldFunction(conf, child, result, debug)
|
||||
if result == szIllegalRecursion:
|
||||
break
|
||||
of nkSym:
|
||||
var size = szUnknownSize
|
||||
if n.sym.bitsize == 0:
|
||||
computeSizeAlign(conf, n.sym.typ)
|
||||
size = n.sym.typ.size.int
|
||||
|
||||
if initialOffset == szUnknownSize or size == szUnknownSize:
|
||||
n.sym.offset = szUnknownSize
|
||||
result = szUnknownSize
|
||||
else:
|
||||
n.sym.offset = int(initialOffset)
|
||||
result = initialOffset + n.sym.typ.size
|
||||
else:
|
||||
result = szUnknownSize
|
||||
|
||||
proc computeUnionObjectOffsetsFoldFunction(conf: ConfigRef; n: PNode, debug: bool): tuple[offset, align: BiggestInt] =
|
||||
## ``result`` is the offset from the larget member of the union.
|
||||
case n.kind
|
||||
of nkRecCase:
|
||||
result.offset = szUnknownSize
|
||||
result.align = szUnknownSize
|
||||
accum.offset = szUnknownSize
|
||||
accum.maxAlign = szUnknownSize
|
||||
localError(conf, n.info, "Illegal use of ``case`` in union type.")
|
||||
#internalError(conf, "Illegal use of ``case`` in union type.")
|
||||
of nkRecList:
|
||||
var maxChildOffset: BiggestInt = 0
|
||||
let accumRoot = accum # copy, because each branch should start af the same offset
|
||||
for i, child in n.sons:
|
||||
let (offset, align) = computeUnionObjectOffsetsFoldFunction(conf, child, debug)
|
||||
if offset == szIllegalRecursion or align == szIllegalRecursion:
|
||||
result.offset = szIllegalRecursion
|
||||
result.align = szIllegalRecursion
|
||||
elif offset == szUnknownSize or align == szUnknownSize:
|
||||
result.offset = szUnknownSize
|
||||
result.align = szUnknownSize
|
||||
else:
|
||||
assert offset != szUncomputedSize
|
||||
assert align != szUncomputedSize
|
||||
result.offset = max(result.offset, offset)
|
||||
result.align = max(result.align, align)
|
||||
var branchAccum = accumRoot
|
||||
computeUnionObjectOffsetsFoldFunction(conf, child, branchAccum)
|
||||
accum.mergeBranch(branchAccum)
|
||||
of nkSym:
|
||||
var size = szUnknownSize
|
||||
var align = szUnknownSize
|
||||
@@ -248,17 +181,12 @@ proc computeUnionObjectOffsetsFoldFunction(conf: ConfigRef; n: PNode, debug: boo
|
||||
computeSizeAlign(conf, n.sym.typ)
|
||||
size = n.sym.typ.size.int
|
||||
align = n.sym.typ.align.int
|
||||
|
||||
result.align = align
|
||||
if size == szUnknownSize:
|
||||
n.sym.offset = szUnknownSize
|
||||
result.offset = szUnknownSize
|
||||
else:
|
||||
n.sym.offset = 0
|
||||
result.offset = n.sym.typ.size
|
||||
accum.align(align)
|
||||
n.sym.offset = accum.offset
|
||||
accum.inc(size)
|
||||
else:
|
||||
result.offset = szUnknownSize
|
||||
result.align = szUnknownSize
|
||||
accum.maxAlign = szUnknownSize
|
||||
accum.offset = szUnknownSize
|
||||
|
||||
proc computeSizeAlign(conf: ConfigRef; typ: PType) =
|
||||
## computes and sets ``size`` and ``align`` members of ``typ``
|
||||
@@ -288,8 +216,7 @@ proc computeSizeAlign(conf: ConfigRef; typ: PType) =
|
||||
# mark computation in progress
|
||||
typ.size = szIllegalRecursion
|
||||
typ.align = szIllegalRecursion
|
||||
|
||||
var maxAlign, sizeAccum, length: BiggestInt
|
||||
typ.paddingAtEnd = 0
|
||||
|
||||
var tk = typ.kind
|
||||
case tk
|
||||
@@ -299,26 +226,23 @@ proc computeSizeAlign(conf: ConfigRef; typ: PType) =
|
||||
else:
|
||||
typ.size = conf.target.ptrSize
|
||||
typ.align = int16(conf.target.ptrSize)
|
||||
|
||||
of tyNil:
|
||||
typ.size = conf.target.ptrSize
|
||||
typ.align = int16(conf.target.ptrSize)
|
||||
|
||||
of tyString:
|
||||
if conf.selectedGC == gcDestructors:
|
||||
typ.size = conf.target.ptrSize * 2
|
||||
else:
|
||||
typ.size = conf.target.ptrSize
|
||||
typ.align = int16(conf.target.ptrSize)
|
||||
|
||||
of tyCString, tySequence, tyPtr, tyRef, tyVar, tyLent, tyOpenArray:
|
||||
let base = typ.lastSon
|
||||
if base == typ:
|
||||
# this is not the correct location to detect ``type A = ptr A``
|
||||
typ.size = szIllegalRecursion
|
||||
typ.align = szIllegalRecursion
|
||||
typ.paddingAtEnd = szIllegalRecursion
|
||||
return
|
||||
|
||||
typ.align = int16(conf.target.ptrSize)
|
||||
if typ.kind == tySequence and conf.selectedGC == gcDestructors:
|
||||
typ.size = conf.target.ptrSize * 2
|
||||
@@ -340,12 +264,13 @@ proc computeSizeAlign(conf: ConfigRef; typ: PType) =
|
||||
computeSizeAlign(conf, base)
|
||||
typ.size = 0
|
||||
typ.align = base.align
|
||||
|
||||
of tyEnum:
|
||||
if firstOrd(conf, typ) < Zero:
|
||||
typ.size = 4 # use signed int32
|
||||
typ.align = 4
|
||||
else:
|
||||
length = toInt64(lastOrd(conf, typ)) # BUGFIX: use lastOrd!
|
||||
let length = toInt64(lastOrd(conf, typ)) # BUGFIX: use lastOrd!
|
||||
if length + 1 < `shl`(1, 8):
|
||||
typ.size = 1
|
||||
typ.align = 1
|
||||
@@ -357,30 +282,37 @@ proc computeSizeAlign(conf: ConfigRef; typ: PType) =
|
||||
typ.align = 4
|
||||
else:
|
||||
typ.size = 8
|
||||
typ.align = 8
|
||||
typ.align = int16(conf.floatInt64Align)
|
||||
of tySet:
|
||||
if typ.sons[0].kind == tyGenericParam:
|
||||
typ.size = szUncomputedSize
|
||||
typ.align = szUncomputedSize # in original version this was 1
|
||||
typ.align = szUncomputedSize
|
||||
else:
|
||||
length = toInt64(lengthOrd(conf, typ.sons[0]))
|
||||
let length = toInt64(lengthOrd(conf, typ.sons[0]))
|
||||
if length <= 8:
|
||||
typ.size = 1
|
||||
typ.align = 1
|
||||
elif length <= 16:
|
||||
typ.size = 2
|
||||
typ.align = 2
|
||||
elif length <= 32:
|
||||
typ.size = 4
|
||||
typ.align = 4
|
||||
elif length <= 64:
|
||||
typ.size = 8
|
||||
typ.align = int16(conf.floatInt64Align)
|
||||
elif align(length, 8) mod 8 == 0:
|
||||
typ.size = align(length, 8) div 8
|
||||
typ.align = int16(conf.floatInt64Align)
|
||||
else:
|
||||
typ.size = align(length, 8) div 8 + 1
|
||||
typ.align = int16(typ.size)
|
||||
typ.align = int16(conf.floatInt64Align)
|
||||
of tyRange:
|
||||
computeSizeAlign(conf, typ.sons[0])
|
||||
typ.size = typ.sons[0].size
|
||||
typ.align = typ.sons[0].align
|
||||
typ.paddingAtEnd = typ.sons[0].paddingAtEnd
|
||||
|
||||
of tyTuple:
|
||||
try:
|
||||
var accum = OffsetAccum(maxAlign: 1)
|
||||
@@ -392,112 +324,121 @@ proc computeSizeAlign(conf: ConfigRef; typ: PType) =
|
||||
let sym = typ.n[i].sym
|
||||
sym.offset = accum.offset
|
||||
accum.inc(int(child.size))
|
||||
accum.finish
|
||||
typ.paddingAtEnd = int16(accum.finish())
|
||||
typ.size = accum.offset
|
||||
typ.align = int16(accum.maxAlign)
|
||||
except IllegalTypeRecursionError:
|
||||
typ.paddingAtEnd = szIllegalRecursion
|
||||
typ.size = szIllegalRecursion
|
||||
typ.align = szIllegalRecursion
|
||||
|
||||
of tyObject:
|
||||
var headerSize: BiggestInt
|
||||
var headerAlign: int16
|
||||
if typ.sons[0] != nil:
|
||||
# compute header size
|
||||
if conf.cmd == cmdCompileToCpp:
|
||||
# if the target is C++ the members of this type are written
|
||||
# into the padding byets at the end of the parent type. At the
|
||||
# moment it is not supported to calculate that.
|
||||
headerSize = szUnknownSize
|
||||
headerAlign = szUncomputedSize
|
||||
else:
|
||||
var st = typ.sons[0]
|
||||
while st.kind in skipPtrs:
|
||||
st = st.sons[^1]
|
||||
computeSizeAlign(conf, st)
|
||||
if st.size == szIllegalRecursion:
|
||||
typ.size = st.size
|
||||
typ.align = st.align
|
||||
return
|
||||
headerSize = st.size
|
||||
headerAlign = st.align
|
||||
elif isObjectWithTypeFieldPredicate(typ):
|
||||
# this branch is taken for RootObj
|
||||
headerSize = conf.target.intSize
|
||||
headerAlign = conf.target.intSize.int16
|
||||
else:
|
||||
headerSize = 0
|
||||
headerAlign = 1
|
||||
let (offset, align) =
|
||||
try:
|
||||
var accum =
|
||||
if typ.sons[0] != nil:
|
||||
# compute header size
|
||||
var st = typ.sons[0]
|
||||
while st.kind in skipPtrs:
|
||||
st = st.sons[^1]
|
||||
computeSizeAlign(conf, st)
|
||||
if conf.cmd == cmdCompileToCpp:
|
||||
OffsetAccum(
|
||||
offset: int(st.size) - int(st.paddingAtEnd),
|
||||
maxAlign: st.align
|
||||
)
|
||||
else:
|
||||
OffsetAccum(
|
||||
offset: int(st.size),
|
||||
maxAlign: st.align
|
||||
)
|
||||
elif isObjectWithTypeFieldPredicate(typ):
|
||||
# this branch is taken for RootObj
|
||||
OffsetAccum(
|
||||
offset: conf.target.intSize,
|
||||
maxAlign: conf.target.intSize
|
||||
)
|
||||
else:
|
||||
OffsetAccum(maxAlign: 1)
|
||||
if tfUnion in typ.flags:
|
||||
if tfPacked in typ.flags:
|
||||
let info = if typ.sym != nil: typ.sym.info else: unknownLineInfo()
|
||||
localError(conf, info, "type may not be packed and union at the same time.")
|
||||
(BiggestInt(szUnknownSize), BiggestInt(szUnknownSize))
|
||||
localError(conf, info, "union type may not be packed.")
|
||||
accum = OffsetAccum(offset: szUnknownSize, maxAlign: szUnknownSize)
|
||||
elif accum.offset != 0:
|
||||
let info = if typ.sym != nil: typ.sym.info else: unknownLineInfo()
|
||||
localError(conf, info, "union type may not have an object header")
|
||||
accum = OffsetAccum(offset: szUnknownSize, maxAlign: szUnknownSize)
|
||||
else:
|
||||
computeUnionObjectOffsetsFoldFunction(conf, typ.n, false)
|
||||
computeUnionObjectOffsetsFoldFunction(conf, typ.n, accum)
|
||||
elif tfPacked in typ.flags:
|
||||
(computePackedObjectOffsetsFoldFunction(conf, typ.n, headerSize, false), BiggestInt(1))
|
||||
accum.maxAlign = 1
|
||||
computeObjectOffsetsFoldFunction(conf, typ.n, true, accum)
|
||||
else:
|
||||
computeObjectOffsetsFoldFunction(conf, typ.n, headerSize)
|
||||
if offset == szIllegalRecursion:
|
||||
computeObjectOffsetsFoldFunction(conf, typ.n, false, accum)
|
||||
let paddingAtEnd = int16(accum.finish())
|
||||
if typ.sym != nil and
|
||||
typ.sym.flags * {sfCompilerProc, sfImportc} == {sfImportc}:
|
||||
typ.size = szUnknownSize
|
||||
typ.align = szUnknownSize
|
||||
typ.paddingAtEnd = szUnknownSize
|
||||
else:
|
||||
typ.size = accum.offset
|
||||
typ.align = int16(accum.maxAlign)
|
||||
typ.paddingAtEnd = paddingAtEnd
|
||||
except IllegalTypeRecursionError:
|
||||
typ.size = szIllegalRecursion
|
||||
typ.align = szIllegalRecursion
|
||||
return
|
||||
if offset == szUnknownSize or (
|
||||
typ.sym != nil and
|
||||
typ.sym.flags * {sfCompilerProc, sfImportc} == {sfImportc}):
|
||||
typ.size = szUnknownSize
|
||||
typ.align = szUnknownSize
|
||||
return
|
||||
# header size is already in size from computeObjectOffsetsFoldFunction
|
||||
# maxAlign is probably not changed at all from headerAlign
|
||||
if tfPacked in typ.flags:
|
||||
typ.size = offset
|
||||
typ.align = 1
|
||||
else:
|
||||
typ.align = int16(max(align, headerAlign))
|
||||
typ.size = align(offset, typ.align)
|
||||
typ.paddingAtEnd = szIllegalRecursion
|
||||
of tyInferred:
|
||||
if typ.len > 1:
|
||||
computeSizeAlign(conf, typ.lastSon)
|
||||
typ.size = typ.lastSon.size
|
||||
typ.align = typ.lastSon.align
|
||||
typ.paddingAtEnd = typ.lastSon.paddingAtEnd
|
||||
|
||||
of tyGenericInst, tyDistinct, tyGenericBody, tyAlias, tySink, tyOwned:
|
||||
computeSizeAlign(conf, typ.lastSon)
|
||||
typ.size = typ.lastSon.size
|
||||
typ.align = typ.lastSon.align
|
||||
typ.paddingAtEnd = typ.lastSon.paddingAtEnd
|
||||
|
||||
of tyTypeClasses:
|
||||
if typ.isResolvedUserTypeClass:
|
||||
computeSizeAlign(conf, typ.lastSon)
|
||||
typ.size = typ.lastSon.size
|
||||
typ.align = typ.lastSon.align
|
||||
typ.paddingAtEnd = typ.lastSon.paddingAtEnd
|
||||
else:
|
||||
typ.size = szUncomputedSize
|
||||
typ.align = szUncomputedSize
|
||||
typ.size = szUnknownSize
|
||||
typ.align = szUnknownSize
|
||||
typ.paddingAtEnd = szUnknownSize
|
||||
|
||||
of tyTypeDesc:
|
||||
computeSizeAlign(conf, typ.base)
|
||||
typ.size = typ.base.size
|
||||
typ.align = typ.base.align
|
||||
typ.paddingAtEnd = typ.base.paddingAtEnd
|
||||
|
||||
of tyForward:
|
||||
# is this really illegal recursion, or maybe just unknown?
|
||||
typ.size = szIllegalRecursion
|
||||
typ.align = szIllegalRecursion
|
||||
typ.paddingAtEnd = szIllegalRecursion
|
||||
|
||||
of tyStatic:
|
||||
if typ.n != nil:
|
||||
computeSizeAlign(conf, typ.lastSon)
|
||||
typ.size = typ.lastSon.size
|
||||
typ.align = typ.lastSon.align
|
||||
typ.paddingAtEnd = typ.lastSon.paddingAtEnd
|
||||
else:
|
||||
typ.size = szUncomputedSize
|
||||
typ.align = szUncomputedSize
|
||||
typ.size = szUnknownSize
|
||||
typ.align = szUnknownSize
|
||||
typ.paddingAtEnd = szUnknownSize
|
||||
else:
|
||||
typ.size = szUncomputedSize
|
||||
typ.align = szUncomputedSize
|
||||
typ.size = szUnknownSize
|
||||
typ.align = szUnknownSize
|
||||
typ.paddingAtEnd = szUnknownSize
|
||||
|
||||
template foldSizeOf*(conf: ConfigRef; n: PNode; fallback: PNode): PNode =
|
||||
let config = conf
|
||||
|
||||
@@ -261,15 +261,15 @@ proc getQuality(s: PSym): range[0..100] =
|
||||
if exp.kind in {tyUntyped, tyTyped, tyGenericParam, tyAnything}: return 50
|
||||
return 100
|
||||
|
||||
template wholeSymTab(cond, section: untyped) =
|
||||
template wholeSymTab(cond, section: untyped) {.dirty.} =
|
||||
var isLocal = true
|
||||
var scopeN = 0
|
||||
for scope in walkScopes(c.currentScope):
|
||||
if scope == c.topLevelScope: isLocal = false
|
||||
dec scopeN
|
||||
for item in scope.symbols:
|
||||
let it {.inject.} = item
|
||||
var pm {.inject.}: PrefixMatch
|
||||
let it = item
|
||||
var pm: PrefixMatch
|
||||
if cond:
|
||||
outputs.add(symToSuggest(c.config, it, isLocal = isLocal, section, info, getQuality(it),
|
||||
pm, c.inTypeContext > 0, scopeN))
|
||||
|
||||
@@ -371,8 +371,7 @@ proc transformYield(c: PTransf, n: PNode): PTransNode =
|
||||
# c.transCon.forStmt.len == 3 means that there is one for loop variable
|
||||
# and thus no tuple unpacking:
|
||||
if e.typ.isNil: return result # can happen in nimsuggest for unknown reasons
|
||||
if skipTypes(e.typ, {tyGenericInst, tyAlias, tySink}).kind == tyTuple and
|
||||
c.transCon.forStmt.len != 3:
|
||||
if c.transCon.forStmt.len != 3:
|
||||
e = skipConv(e)
|
||||
if e.kind in {nkPar, nkTupleConstr}:
|
||||
for i in 0 ..< sonsLen(e):
|
||||
|
||||
@@ -15,8 +15,14 @@ import
|
||||
|
||||
type
|
||||
TPreferedDesc* = enum
|
||||
preferName, preferDesc, preferExported, preferModuleInfo, preferGenericArg,
|
||||
preferTypeName
|
||||
preferName, # default
|
||||
preferDesc, # probably should become what preferResolved is
|
||||
preferExported,
|
||||
preferModuleInfo, # fully qualified
|
||||
preferGenericArg,
|
||||
preferTypeName,
|
||||
preferResolved, # fully resolved symbols
|
||||
preferMixed, # show symbol + resolved symbols if it differs, eg: seq[cint{int32}, float]
|
||||
|
||||
proc typeToString*(typ: PType; prefer: TPreferedDesc = preferName): string
|
||||
template `$`*(typ: PType): string = typeToString(typ)
|
||||
@@ -121,6 +127,7 @@ proc isFloatLit*(t: PType): bool {.inline.} =
|
||||
|
||||
proc getProcHeader*(conf: ConfigRef; sym: PSym; prefer: TPreferedDesc = preferName; getDeclarationPath = true): string =
|
||||
assert sym != nil
|
||||
# consider using `skipGenericOwner` to avoid fun2.fun2 when fun2 is generic
|
||||
result = sym.owner.name.s & '.' & sym.name.s
|
||||
if sym.kind in routineKinds:
|
||||
result.add '('
|
||||
@@ -414,12 +421,12 @@ proc rangeToStr(n: PNode): string =
|
||||
result = valueToString(n.sons[0]) & ".." & valueToString(n.sons[1])
|
||||
|
||||
const
|
||||
typeToStr: array[TTypeKind, string] = ["None", "bool", "Char", "empty",
|
||||
"Alias", "nil", "untyped", "typed", "typeDesc",
|
||||
typeToStr: array[TTypeKind, string] = ["None", "bool", "char", "empty",
|
||||
"Alias", "typeof(nil)", "untyped", "typed", "typeDesc",
|
||||
"GenericInvocation", "GenericBody", "GenericInst", "GenericParam",
|
||||
"distinct $1", "enum", "ordinal[$1]", "array[$1, $2]", "object", "tuple",
|
||||
"set[$1]", "range[$1]", "ptr ", "ref ", "var ", "seq[$1]", "proc",
|
||||
"pointer", "OpenArray[$1]", "string", "CString", "Forward",
|
||||
"pointer", "OpenArray[$1]", "string", "cstring", "Forward",
|
||||
"int", "int8", "int16", "int32", "int64",
|
||||
"float", "float32", "float64", "float128",
|
||||
"uint", "uint8", "uint16", "uint32", "uint64",
|
||||
@@ -430,7 +437,8 @@ const
|
||||
"and", "or", "not", "any", "static", "TypeFromExpr", "FieldAccessor",
|
||||
"void"]
|
||||
|
||||
const preferToResolveSymbols = {preferName, preferTypeName, preferModuleInfo, preferGenericArg}
|
||||
const preferToResolveSymbols = {preferName, preferTypeName, preferModuleInfo,
|
||||
preferGenericArg, preferResolved, preferMixed}
|
||||
|
||||
template bindConcreteTypeToUserTypeClass*(tc, concrete: PType) =
|
||||
tc.sons.add concrete
|
||||
@@ -449,208 +457,232 @@ proc addTypeFlags(name: var string, typ: PType) {.inline.} =
|
||||
if tfNotNil in typ.flags: name.add(" not nil")
|
||||
|
||||
proc typeToString(typ: PType, prefer: TPreferedDesc = preferName): string =
|
||||
var t = typ
|
||||
result = ""
|
||||
if t == nil: return
|
||||
if prefer in preferToResolveSymbols and t.sym != nil and
|
||||
sfAnon notin t.sym.flags and t.kind != tySequence:
|
||||
if t.kind == tyInt and isIntLit(t):
|
||||
result = t.sym.name.s & " literal(" & $t.n.intVal & ")"
|
||||
elif t.kind == tyAlias and t.sons[0].kind != tyAlias:
|
||||
result = typeToString(t.sons[0])
|
||||
elif prefer in {preferName, preferTypeName} or t.sym.owner.isNil:
|
||||
result = t.sym.name.s
|
||||
if t.kind == tyGenericParam and t.sonsLen > 0:
|
||||
result.add ": "
|
||||
var first = true
|
||||
for son in t.sons:
|
||||
if not first: result.add " or "
|
||||
result.add son.typeToString
|
||||
first = false
|
||||
let preferToplevel = prefer
|
||||
proc getPrefer(prefer: TPreferedDesc): TPreferedDesc =
|
||||
if preferToplevel in {preferResolved, preferMixed}:
|
||||
preferToplevel # sticky option
|
||||
else:
|
||||
result = t.sym.owner.name.s & '.' & t.sym.name.s
|
||||
result.addTypeFlags(t)
|
||||
return
|
||||
case t.kind
|
||||
of tyInt:
|
||||
if not isIntLit(t) or prefer == preferExported:
|
||||
result = typeToStr[t.kind]
|
||||
else:
|
||||
if prefer == preferGenericArg:
|
||||
result = $t.n.intVal
|
||||
prefer
|
||||
|
||||
proc typeToString(typ: PType, prefer: TPreferedDesc = preferName): string =
|
||||
let prefer = getPrefer(prefer)
|
||||
let t = typ
|
||||
result = ""
|
||||
if t == nil: return
|
||||
if prefer in preferToResolveSymbols and t.sym != nil and
|
||||
sfAnon notin t.sym.flags and t.kind != tySequence:
|
||||
if t.kind == tyInt and isIntLit(t):
|
||||
result = t.sym.name.s & " literal(" & $t.n.intVal & ")"
|
||||
elif t.kind == tyAlias and t.sons[0].kind != tyAlias:
|
||||
result = typeToString(t.sons[0])
|
||||
elif prefer in {preferResolved, preferMixed}:
|
||||
case t.kind
|
||||
of IntegralTypes + {tyFloat..tyFloat128} + {tyString, tyCString}:
|
||||
result = typeToStr[t.kind]
|
||||
of tyGenericBody:
|
||||
result = typeToString(t.lastSon)
|
||||
of tyCompositeTypeClass:
|
||||
# avoids showing `A[any]` in `proc fun(a: A)` with `A = object[T]`
|
||||
result = typeToString(t.lastSon.lastSon)
|
||||
else:
|
||||
result = t.sym.name.s
|
||||
if prefer == preferMixed and result != t.sym.name.s:
|
||||
result = t.sym.name.s & "{" & result & "}"
|
||||
elif prefer in {preferName, preferTypeName} or t.sym.owner.isNil:
|
||||
# note: should probably be: {preferName, preferTypeName, preferGenericArg}
|
||||
result = t.sym.name.s
|
||||
if t.kind == tyGenericParam and t.sonsLen > 0:
|
||||
result.add ": "
|
||||
var first = true
|
||||
for son in t.sons:
|
||||
if not first: result.add " or "
|
||||
result.add son.typeToString
|
||||
first = false
|
||||
else:
|
||||
result = "int literal(" & $t.n.intVal & ")"
|
||||
of tyGenericInst, tyGenericInvocation:
|
||||
result = typeToString(t.sons[0]) & '['
|
||||
for i in 1 ..< sonsLen(t)-ord(t.kind != tyGenericInvocation):
|
||||
if i > 1: add(result, ", ")
|
||||
add(result, typeToString(t.sons[i], preferGenericArg))
|
||||
add(result, ']')
|
||||
of tyGenericBody:
|
||||
result = typeToString(t.lastSon) & '['
|
||||
for i in 0 .. sonsLen(t)-2:
|
||||
if i > 0: add(result, ", ")
|
||||
add(result, typeToString(t.sons[i], preferTypeName))
|
||||
add(result, ']')
|
||||
of tyTypeDesc:
|
||||
if t.sons[0].kind == tyNone: result = "typedesc"
|
||||
else: result = "type " & typeToString(t.sons[0])
|
||||
of tyStatic:
|
||||
if prefer == preferGenericArg and t.n != nil:
|
||||
result = t.n.renderTree
|
||||
else:
|
||||
result = "static[" & (if t.len > 0: typeToString(t.sons[0]) else: "") & "]"
|
||||
if t.n != nil: result.add "(" & renderTree(t.n) & ")"
|
||||
of tyUserTypeClass:
|
||||
if t.sym != nil and t.sym.owner != nil:
|
||||
if t.isResolvedUserTypeClass: return typeToString(t.lastSon)
|
||||
return t.sym.owner.name.s
|
||||
else:
|
||||
result = "<invalid tyUserTypeClass>"
|
||||
of tyBuiltInTypeClass:
|
||||
result = case t.base.kind:
|
||||
of tyVar: "var"
|
||||
of tyRef: "ref"
|
||||
of tyPtr: "ptr"
|
||||
of tySequence: "seq"
|
||||
of tyArray: "array"
|
||||
of tySet: "set"
|
||||
of tyRange: "range"
|
||||
of tyDistinct: "distinct"
|
||||
of tyProc: "proc"
|
||||
of tyObject: "object"
|
||||
of tyTuple: "tuple"
|
||||
of tyOpenArray: "openarray"
|
||||
else: typeToStr[t.base.kind]
|
||||
of tyInferred:
|
||||
let concrete = t.previouslyInferred
|
||||
if concrete != nil: result = typeToString(concrete)
|
||||
else: result = "inferred[" & typeToString(t.base) & "]"
|
||||
of tyUserTypeClassInst:
|
||||
let body = t.base
|
||||
result = body.sym.name.s & "["
|
||||
for i in 1 .. sonsLen(t) - 2:
|
||||
if i > 1: add(result, ", ")
|
||||
add(result, typeToString(t.sons[i]))
|
||||
result.add "]"
|
||||
of tyAnd:
|
||||
for i, son in t.sons:
|
||||
result.add(typeToString(son))
|
||||
if i < t.sons.high:
|
||||
result.add(" and ")
|
||||
of tyOr:
|
||||
for i, son in t.sons:
|
||||
result.add(typeToString(son))
|
||||
if i < t.sons.high:
|
||||
result.add(" or ")
|
||||
of tyNot:
|
||||
result = "not " & typeToString(t.sons[0])
|
||||
of tyUntyped:
|
||||
#internalAssert t.len == 0
|
||||
result = "untyped"
|
||||
of tyFromExpr:
|
||||
if t.n == nil:
|
||||
result = "unknown"
|
||||
else:
|
||||
result = "type(" & renderTree(t.n) & ")"
|
||||
of tyArray:
|
||||
if t.sons[0].kind == tyRange:
|
||||
result = "array[" & rangeToStr(t.sons[0].n) & ", " &
|
||||
typeToString(t.sons[1]) & ']'
|
||||
else:
|
||||
result = "array[" & typeToString(t.sons[0]) & ", " &
|
||||
typeToString(t.sons[1]) & ']'
|
||||
of tyUncheckedArray:
|
||||
result = "UncheckedArray[" & typeToString(t.sons[0]) & ']'
|
||||
of tySequence:
|
||||
result = "seq[" & typeToString(t.sons[0]) & ']'
|
||||
of tyOpt:
|
||||
result = "opt[" & typeToString(t.sons[0]) & ']'
|
||||
of tyOrdinal:
|
||||
result = "ordinal[" & typeToString(t.sons[0]) & ']'
|
||||
of tySet:
|
||||
result = "set[" & typeToString(t.sons[0]) & ']'
|
||||
of tyOpenArray:
|
||||
result = "openarray[" & typeToString(t.sons[0]) & ']'
|
||||
of tyDistinct:
|
||||
result = "distinct " & typeToString(t.sons[0],
|
||||
if prefer == preferModuleInfo: preferModuleInfo else: preferTypeName)
|
||||
of tyTuple:
|
||||
# we iterate over t.sons here, because t.n may be nil
|
||||
if t.n != nil:
|
||||
result = "tuple["
|
||||
assert(sonsLen(t.n) == sonsLen(t))
|
||||
for i in 0 ..< sonsLen(t.n):
|
||||
assert(t.n.sons[i].kind == nkSym)
|
||||
add(result, t.n.sons[i].sym.name.s & ": " & typeToString(t.sons[i]))
|
||||
if i < sonsLen(t.n) - 1: add(result, ", ")
|
||||
result = t.sym.owner.name.s & '.' & t.sym.name.s
|
||||
result.addTypeFlags(t)
|
||||
return
|
||||
case t.kind
|
||||
of tyInt:
|
||||
if not isIntLit(t) or prefer == preferExported:
|
||||
result = typeToStr[t.kind]
|
||||
else:
|
||||
if prefer == preferGenericArg:
|
||||
result = $t.n.intVal
|
||||
else:
|
||||
result = "int literal(" & $t.n.intVal & ")"
|
||||
of tyGenericInst, tyGenericInvocation:
|
||||
result = typeToString(t.sons[0]) & '['
|
||||
for i in 1 ..< sonsLen(t)-ord(t.kind != tyGenericInvocation):
|
||||
if i > 1: add(result, ", ")
|
||||
add(result, typeToString(t.sons[i], preferGenericArg))
|
||||
add(result, ']')
|
||||
elif sonsLen(t) == 0:
|
||||
result = "tuple[]"
|
||||
else:
|
||||
if prefer == preferTypeName: result = "("
|
||||
else: result = "tuple of ("
|
||||
for i in 0 ..< sonsLen(t):
|
||||
of tyGenericBody:
|
||||
result = typeToString(t.lastSon) & '['
|
||||
for i in 0 .. sonsLen(t)-2:
|
||||
if i > 0: add(result, ", ")
|
||||
add(result, typeToString(t.sons[i], preferTypeName))
|
||||
add(result, ']')
|
||||
of tyTypeDesc:
|
||||
if t.sons[0].kind == tyNone: result = "typedesc"
|
||||
else: result = "type " & typeToString(t.sons[0])
|
||||
of tyStatic:
|
||||
if prefer == preferGenericArg and t.n != nil:
|
||||
result = t.n.renderTree
|
||||
else:
|
||||
result = "static[" & (if t.len > 0: typeToString(t.sons[0]) else: "") & "]"
|
||||
if t.n != nil: result.add "(" & renderTree(t.n) & ")"
|
||||
of tyUserTypeClass:
|
||||
if t.sym != nil and t.sym.owner != nil:
|
||||
if t.isResolvedUserTypeClass: return typeToString(t.lastSon)
|
||||
return t.sym.owner.name.s
|
||||
else:
|
||||
result = "<invalid tyUserTypeClass>"
|
||||
of tyBuiltInTypeClass:
|
||||
result = case t.base.kind:
|
||||
of tyVar: "var"
|
||||
of tyRef: "ref"
|
||||
of tyPtr: "ptr"
|
||||
of tySequence: "seq"
|
||||
of tyArray: "array"
|
||||
of tySet: "set"
|
||||
of tyRange: "range"
|
||||
of tyDistinct: "distinct"
|
||||
of tyProc: "proc"
|
||||
of tyObject: "object"
|
||||
of tyTuple: "tuple"
|
||||
of tyOpenArray: "openArray"
|
||||
else: typeToStr[t.base.kind]
|
||||
of tyInferred:
|
||||
let concrete = t.previouslyInferred
|
||||
if concrete != nil: result = typeToString(concrete)
|
||||
else: result = "inferred[" & typeToString(t.base) & "]"
|
||||
of tyUserTypeClassInst:
|
||||
let body = t.base
|
||||
result = body.sym.name.s & "["
|
||||
for i in 1 .. sonsLen(t) - 2:
|
||||
if i > 1: add(result, ", ")
|
||||
add(result, typeToString(t.sons[i]))
|
||||
result.add "]"
|
||||
of tyAnd:
|
||||
for i, son in t.sons:
|
||||
result.add(typeToString(son))
|
||||
if i < t.sons.high:
|
||||
result.add(" and ")
|
||||
of tyOr:
|
||||
for i, son in t.sons:
|
||||
result.add(typeToString(son))
|
||||
if i < t.sons.high:
|
||||
result.add(" or ")
|
||||
of tyNot:
|
||||
result = "not " & typeToString(t.sons[0])
|
||||
of tyUntyped:
|
||||
#internalAssert t.len == 0
|
||||
result = "untyped"
|
||||
of tyFromExpr:
|
||||
if t.n == nil:
|
||||
result = "unknown"
|
||||
else:
|
||||
result = "type(" & renderTree(t.n) & ")"
|
||||
of tyArray:
|
||||
if t.sons[0].kind == tyRange:
|
||||
result = "array[" & rangeToStr(t.sons[0].n) & ", " &
|
||||
typeToString(t.sons[1]) & ']'
|
||||
else:
|
||||
result = "array[" & typeToString(t.sons[0]) & ", " &
|
||||
typeToString(t.sons[1]) & ']'
|
||||
of tyUncheckedArray:
|
||||
result = "UncheckedArray[" & typeToString(t.sons[0]) & ']'
|
||||
of tySequence:
|
||||
result = "seq[" & typeToString(t.sons[0]) & ']'
|
||||
of tyOpt:
|
||||
result = "opt[" & typeToString(t.sons[0]) & ']'
|
||||
of tyOrdinal:
|
||||
result = "ordinal[" & typeToString(t.sons[0]) & ']'
|
||||
of tySet:
|
||||
result = "set[" & typeToString(t.sons[0]) & ']'
|
||||
of tyOpenArray:
|
||||
result = "openArray[" & typeToString(t.sons[0]) & ']'
|
||||
of tyDistinct:
|
||||
result = "distinct " & typeToString(t.sons[0],
|
||||
if prefer == preferModuleInfo: preferModuleInfo else: preferTypeName)
|
||||
of tyTuple:
|
||||
# we iterate over t.sons here, because t.n may be nil
|
||||
if t.n != nil:
|
||||
result = "tuple["
|
||||
assert(sonsLen(t.n) == sonsLen(t))
|
||||
for i in 0 ..< sonsLen(t.n):
|
||||
assert(t.n.sons[i].kind == nkSym)
|
||||
add(result, t.n.sons[i].sym.name.s & ": " & typeToString(t.sons[i]))
|
||||
if i < sonsLen(t.n) - 1: add(result, ", ")
|
||||
add(result, ']')
|
||||
elif sonsLen(t) == 0:
|
||||
result = "tuple[]"
|
||||
else:
|
||||
if prefer == preferTypeName: result = "("
|
||||
else: result = "tuple of ("
|
||||
for i in 0 ..< sonsLen(t):
|
||||
add(result, typeToString(t.sons[i]))
|
||||
if i < sonsLen(t) - 1: add(result, ", ")
|
||||
add(result, ')')
|
||||
of tyPtr, tyRef, tyVar, tyLent:
|
||||
result = typeToStr[t.kind]
|
||||
if t.len >= 2:
|
||||
setLen(result, result.len-1)
|
||||
result.add '['
|
||||
for i in 0 ..< sonsLen(t):
|
||||
add(result, typeToString(t.sons[i]))
|
||||
if i < sonsLen(t) - 1: add(result, ", ")
|
||||
result.add ']'
|
||||
else:
|
||||
result.add typeToString(t.sons[0])
|
||||
of tyRange:
|
||||
result = "range "
|
||||
if t.n != nil and t.n.kind == nkRange:
|
||||
result.add rangeToStr(t.n)
|
||||
if prefer != preferExported:
|
||||
result.add("(" & typeToString(t.sons[0]) & ")")
|
||||
of tyProc:
|
||||
result = if tfIterator in t.flags: "iterator "
|
||||
elif t.owner != nil:
|
||||
case t.owner.kind
|
||||
of skTemplate: "template "
|
||||
of skMacro: "macro "
|
||||
of skConverter: "converter "
|
||||
else: "proc "
|
||||
else:
|
||||
"proc "
|
||||
if tfUnresolved in t.flags: result.add "[*missing parameters*]"
|
||||
result.add "("
|
||||
for i in 1 ..< sonsLen(t):
|
||||
if t.n != nil and i < t.n.len and t.n[i].kind == nkSym:
|
||||
add(result, t.n[i].sym.name.s)
|
||||
add(result, ": ")
|
||||
add(result, typeToString(t.sons[i]))
|
||||
if i < sonsLen(t) - 1: add(result, ", ")
|
||||
add(result, ')')
|
||||
of tyPtr, tyRef, tyVar, tyLent:
|
||||
result = typeToStr[t.kind]
|
||||
if t.len >= 2:
|
||||
setLen(result, result.len-1)
|
||||
result.add '['
|
||||
for i in 0 ..< sonsLen(t):
|
||||
add(result, typeToString(t.sons[i]))
|
||||
if i < sonsLen(t) - 1: add(result, ", ")
|
||||
result.add ']'
|
||||
if t.len > 0 and t.sons[0] != nil: add(result, ": " & typeToString(t.sons[0]))
|
||||
var prag = if t.callConv == ccDefault: "" else: CallingConvToStr[t.callConv]
|
||||
if tfNoSideEffect in t.flags:
|
||||
addSep(prag)
|
||||
add(prag, "noSideEffect")
|
||||
if tfThread in t.flags:
|
||||
addSep(prag)
|
||||
add(prag, "gcsafe")
|
||||
if t.lockLevel.ord != UnspecifiedLockLevel.ord:
|
||||
addSep(prag)
|
||||
add(prag, "locks: " & $t.lockLevel)
|
||||
if len(prag) != 0: add(result, "{." & prag & ".}")
|
||||
of tyVarargs:
|
||||
result = typeToStr[t.kind] % typeToString(t.sons[0])
|
||||
of tySink:
|
||||
result = "sink " & typeToString(t.sons[0])
|
||||
of tyOwned:
|
||||
result = "owned " & typeToString(t.sons[0])
|
||||
else:
|
||||
result.add typeToString(t.sons[0])
|
||||
of tyRange:
|
||||
result = "range "
|
||||
if t.n != nil and t.n.kind == nkRange:
|
||||
result.add rangeToStr(t.n)
|
||||
if prefer != preferExported:
|
||||
result.add("(" & typeToString(t.sons[0]) & ")")
|
||||
of tyProc:
|
||||
result = if tfIterator in t.flags: "iterator "
|
||||
elif t.owner != nil:
|
||||
case t.owner.kind
|
||||
of skTemplate: "template "
|
||||
of skMacro: "macro "
|
||||
of skConverter: "converter "
|
||||
else: "proc "
|
||||
else:
|
||||
"proc "
|
||||
if tfUnresolved in t.flags: result.add "[*missing parameters*]"
|
||||
result.add "("
|
||||
for i in 1 ..< sonsLen(t):
|
||||
if t.n != nil and i < t.n.len and t.n[i].kind == nkSym:
|
||||
add(result, t.n[i].sym.name.s)
|
||||
add(result, ": ")
|
||||
add(result, typeToString(t.sons[i]))
|
||||
if i < sonsLen(t) - 1: add(result, ", ")
|
||||
add(result, ')')
|
||||
if t.len > 0 and t.sons[0] != nil: add(result, ": " & typeToString(t.sons[0]))
|
||||
var prag = if t.callConv == ccDefault: "" else: CallingConvToStr[t.callConv]
|
||||
if tfNoSideEffect in t.flags:
|
||||
addSep(prag)
|
||||
add(prag, "noSideEffect")
|
||||
if tfThread in t.flags:
|
||||
addSep(prag)
|
||||
add(prag, "gcsafe")
|
||||
if t.lockLevel.ord != UnspecifiedLockLevel.ord:
|
||||
addSep(prag)
|
||||
add(prag, "locks: " & $t.lockLevel)
|
||||
if len(prag) != 0: add(result, "{." & prag & ".}")
|
||||
of tyVarargs:
|
||||
result = typeToStr[t.kind] % typeToString(t.sons[0])
|
||||
of tySink:
|
||||
result = "sink " & typeToString(t.sons[0])
|
||||
of tyOwned:
|
||||
result = "owned " & typeToString(t.sons[0])
|
||||
else:
|
||||
result = typeToStr[t.kind]
|
||||
result.addTypeFlags(t)
|
||||
result = typeToStr[t.kind]
|
||||
result.addTypeFlags(t)
|
||||
result = typeToString(typ, prefer)
|
||||
|
||||
proc firstOrd*(conf: ConfigRef; t: PType): Int128 =
|
||||
case t.kind
|
||||
@@ -1187,7 +1219,9 @@ type
|
||||
TTypeAllowedFlag* = enum
|
||||
taField,
|
||||
taHeap,
|
||||
taConcept
|
||||
taConcept,
|
||||
taIsOpenArray,
|
||||
taNoUntyped
|
||||
|
||||
TTypeAllowedFlags* = set[TTypeAllowedFlag]
|
||||
|
||||
@@ -1203,8 +1237,8 @@ proc typeAllowedNode(marker: var IntSet, n: PNode, kind: TSymKind,
|
||||
of nkNone..nkNilLit:
|
||||
discard
|
||||
else:
|
||||
if n.kind == nkRecCase and kind in {skProc, skFunc, skConst}:
|
||||
return n[0].typ
|
||||
#if n.kind == nkRecCase and kind in {skProc, skFunc, skConst}:
|
||||
# return n[0].typ
|
||||
for i in 0 ..< sonsLen(n):
|
||||
let it = n.sons[i]
|
||||
result = typeAllowedNode(marker, it, kind, flags)
|
||||
@@ -1239,28 +1273,29 @@ proc typeAllowedAux(marker: var IntSet, typ: PType, kind: TSymKind,
|
||||
case t2.kind
|
||||
of tyVar, tyLent:
|
||||
if taHeap notin flags: result = t2 # ``var var`` is illegal on the heap
|
||||
of tyOpenArray, tyUncheckedArray:
|
||||
of tyOpenArray:
|
||||
if kind != skParam or taIsOpenArray in flags: result = t
|
||||
else: result = typeAllowedAux(marker, t2.sons[0], kind, flags+{taIsOpenArray})
|
||||
of tyUncheckedArray:
|
||||
if kind != skParam: result = t
|
||||
else: result = typeAllowedAux(marker, t2.sons[0], skParam, flags)
|
||||
else: result = typeAllowedAux(marker, t2.sons[0], kind, flags)
|
||||
else:
|
||||
if kind notin {skParam, skResult}: result = t
|
||||
else: result = typeAllowedAux(marker, t2, kind, flags)
|
||||
of tyProc:
|
||||
if kind == skConst and t.callConv == ccClosure:
|
||||
result = t
|
||||
else:
|
||||
for i in 1 ..< sonsLen(t):
|
||||
result = typeAllowedAux(marker, t.sons[i], skParam, flags)
|
||||
if result != nil: break
|
||||
if result.isNil and t.sons[0] != nil:
|
||||
result = typeAllowedAux(marker, t.sons[0], skResult, flags)
|
||||
let f = if kind in {skProc, skFunc}: flags+{taNoUntyped} else: flags
|
||||
for i in 1 ..< sonsLen(t):
|
||||
result = typeAllowedAux(marker, t.sons[i], skParam, f-{taIsOpenArray})
|
||||
if result != nil: break
|
||||
if result.isNil and t.sons[0] != nil:
|
||||
result = typeAllowedAux(marker, t.sons[0], skResult, flags)
|
||||
of tyTypeDesc:
|
||||
# XXX: This is still a horrible idea...
|
||||
result = nil
|
||||
of tyUntyped, tyTyped:
|
||||
if kind notin {skParam, skResult} or taNoUntyped in flags: result = t
|
||||
of tyStatic:
|
||||
if kind notin {skParam}: result = t
|
||||
of tyUntyped, tyTyped:
|
||||
if kind notin {skParam, skResult}: result = t
|
||||
of tyVoid:
|
||||
if taField notin flags: result = t
|
||||
of tyTypeClasses:
|
||||
@@ -1274,7 +1309,7 @@ proc typeAllowedAux(marker: var IntSet, typ: PType, kind: TSymKind,
|
||||
tyNone, tyForward, tyFromExpr:
|
||||
result = t
|
||||
of tyNil:
|
||||
if kind != skConst and kind != skParam: result = t
|
||||
if kind != skConst and kind != skParam and kind != skResult: result = t
|
||||
of tyString, tyBool, tyChar, tyEnum, tyInt..tyUInt64, tyCString, tyPointer:
|
||||
result = nil
|
||||
of tyOrdinal:
|
||||
@@ -1285,30 +1320,31 @@ proc typeAllowedAux(marker: var IntSet, typ: PType, kind: TSymKind,
|
||||
if skipTypes(t.sons[0], abstractInst-{tyTypeDesc}).kind notin
|
||||
{tyChar, tyEnum, tyInt..tyFloat128, tyInt..tyUInt64}: result = t
|
||||
of tyOpenArray, tyVarargs, tySink:
|
||||
if kind != skParam:
|
||||
# you cannot nest openArrays/sinks/etc.
|
||||
if kind != skParam or taIsOpenArray in flags:
|
||||
result = t
|
||||
else:
|
||||
result = typeAllowedAux(marker, t.sons[0], skVar, flags)
|
||||
result = typeAllowedAux(marker, t.sons[0], kind, flags+{taIsOpenArray})
|
||||
of tyUncheckedArray:
|
||||
if kind != skParam and taHeap notin flags:
|
||||
result = t
|
||||
else:
|
||||
result = typeAllowedAux(marker, lastSon(t), kind, flags)
|
||||
result = typeAllowedAux(marker, lastSon(t), kind, flags-{taHeap})
|
||||
of tySequence, tyOpt:
|
||||
if t.sons[0].kind != tyEmpty:
|
||||
result = typeAllowedAux(marker, t.sons[0], skVar, flags+{taHeap})
|
||||
result = typeAllowedAux(marker, t.sons[0], kind, flags+{taHeap})
|
||||
elif kind in {skVar, skLet}:
|
||||
result = t.sons[0]
|
||||
of tyArray:
|
||||
if t.sons[1].kind != tyEmpty:
|
||||
result = typeAllowedAux(marker, t.sons[1], skVar, flags)
|
||||
result = typeAllowedAux(marker, t.sons[1], kind, flags)
|
||||
elif kind in {skVar, skLet}:
|
||||
result = t.sons[1]
|
||||
of tyRef:
|
||||
if kind == skConst: result = t
|
||||
else: result = typeAllowedAux(marker, t.lastSon, skVar, flags+{taHeap})
|
||||
else: result = typeAllowedAux(marker, t.lastSon, kind, flags+{taHeap})
|
||||
of tyPtr:
|
||||
result = typeAllowedAux(marker, t.lastSon, skVar, flags+{taHeap})
|
||||
result = typeAllowedAux(marker, t.lastSon, kind, flags+{taHeap})
|
||||
of tySet:
|
||||
for i in 0 ..< sonsLen(t):
|
||||
result = typeAllowedAux(marker, t.sons[i], kind, flags)
|
||||
@@ -1332,7 +1368,7 @@ proc typeAllowedAux(marker: var IntSet, typ: PType, kind: TSymKind,
|
||||
result = nil
|
||||
of tyOwned:
|
||||
if t.len == 1 and t.sons[0].skipTypes(abstractInst).kind in {tyRef, tyPtr, tyProc}:
|
||||
result = typeAllowedAux(marker, t.lastSon, skVar, flags+{taHeap})
|
||||
result = typeAllowedAux(marker, t.lastSon, kind, flags+{taHeap})
|
||||
else:
|
||||
result = t
|
||||
|
||||
|
||||
@@ -1137,7 +1137,7 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
|
||||
let node = regs[rb+i].regToNode
|
||||
node.info = c.debug[pc]
|
||||
macroCall.add(node)
|
||||
var a = evalTemplate(macroCall, prc, genSymOwner, c.config)
|
||||
var a = evalTemplate(macroCall, prc, genSymOwner, c.config, c.cache)
|
||||
if a.kind == nkStmtList and a.len == 1: a = a[0]
|
||||
a.recSetFlagIsRef
|
||||
ensureKind(rkNode)
|
||||
@@ -2102,8 +2102,9 @@ proc evalMacroCall*(module: PSym; g: ModuleGraph;
|
||||
|
||||
setupGlobalCtx(module, g)
|
||||
var c = PCtx g.vm
|
||||
let oldMode = c.mode
|
||||
c.mode = emStaticStmt
|
||||
c.comesFromHeuristic.line = 0'u16
|
||||
|
||||
c.callsite = nOrig
|
||||
let start = genProc(c, sym)
|
||||
|
||||
@@ -2142,3 +2143,4 @@ proc evalMacroCall*(module: PSym; g: ModuleGraph;
|
||||
if cyclicTree(result): globalError(c.config, n.info, "macro produced a cyclic tree")
|
||||
dec(g.config.evalMacroCounter)
|
||||
c.callsite = nil
|
||||
c.mode = oldMode
|
||||
|
||||
@@ -1052,8 +1052,7 @@ proc genMagic(c: PCtx; n: PNode; dest: var TDest; m: TMagic) =
|
||||
let t = skipTypes(n.typ, abstractVar-{tyTypeDesc})
|
||||
if t.kind in {tyUInt8..tyUInt32} or (t.kind == tyUInt and t.size < 8):
|
||||
c.gABC(n, opcNarrowU, dest, TRegister(t.size*8))
|
||||
of mToFloat, mToBiggestFloat, mToInt,
|
||||
mToBiggestInt, mCharToStr, mBoolToStr, mIntToStr, mInt64ToStr,
|
||||
of mCharToStr, mBoolToStr, mIntToStr, mInt64ToStr,
|
||||
mFloatToStr, mCStrToStr, mStrToStr, mEnumToStr:
|
||||
genConv(c, n, n.sons[1], dest)
|
||||
of mEqStr, mEqCString: genBinaryABC(c, n, dest, opcEqStr)
|
||||
|
||||
@@ -17,6 +17,8 @@ from os import getEnv, existsEnv, dirExists, fileExists, putEnv, walkDir, getApp
|
||||
from md5 import getMD5
|
||||
from sighashes import symBodyDigest
|
||||
|
||||
from hashes import hash
|
||||
|
||||
template mathop(op) {.dirty.} =
|
||||
registerCallback(c, "stdlib.math." & astToStr(op), `op Wrapper`)
|
||||
|
||||
@@ -157,3 +159,30 @@ proc registerAdditionalOps*(c: PCtx) =
|
||||
stackTrace(c, PStackFrame(prc: c.prc.sym, comesFrom: 0, next: nil), c.exceptionInstr,
|
||||
"isExported() requires a symbol. '" & $n & "' is of kind '" & $n.kind & "'", n.info)
|
||||
setResult(a, sfExported in n.sym.flags)
|
||||
|
||||
proc hashVmImpl(a: VmArgs) =
|
||||
var res = hashes.hash(a.getString(0), a.getInt(1).int, a.getInt(2).int)
|
||||
if c.config.cmd == cmdCompileToJS:
|
||||
# emulate JS's terrible integers:
|
||||
res = cast[int32](res)
|
||||
setResult(a, res)
|
||||
|
||||
registerCallback c, "stdlib.hashes.hashVmImpl", hashVmImpl
|
||||
|
||||
proc hashVmImplByte(a: VmArgs) =
|
||||
# nkBracket[...]
|
||||
let sPos = a.getInt(1).int
|
||||
let ePos = a.getInt(2).int
|
||||
let arr = a.getNode(0)
|
||||
var bytes = newSeq[byte](arr.len)
|
||||
for i in 0 ..< arr.len:
|
||||
bytes[i] = byte(arr[i].intVal and 0xff)
|
||||
|
||||
var res = hashes.hash(bytes, sPos, ePos)
|
||||
if c.config.cmd == cmdCompileToJS:
|
||||
# emulate JS's terrible integers:
|
||||
res = cast[int32](res)
|
||||
setResult(a, res)
|
||||
|
||||
registerCallback c, "stdlib.hashes.hashVmImplByte", hashVmImplByte
|
||||
registerCallback c, "stdlib.hashes.hashVmImplChar", hashVmImplByte
|
||||
|
||||
@@ -48,7 +48,7 @@ type
|
||||
wCompilerProc, wCore, wProcVar, wBase, wUsed,
|
||||
wFatal, wError, wWarning, wHint, wLine, wPush, wPop, wDefine, wUndef,
|
||||
wLineDir, wStackTrace, wLineTrace, wLink, wCompile,
|
||||
wLinksys, wDeprecated, wVarargs, wCallconv, wBreakpoint, wDebugger,
|
||||
wLinksys, wDeprecated, wVarargs, wCallconv, wDebugger,
|
||||
wNimcall, wStdcall, wCdecl, wSafecall, wSyscall, wInline, wNoInline,
|
||||
wFastcall, wClosure, wNoconv, wOn, wOff, wChecks, wRangeChecks,
|
||||
wBoundChecks, wOverflowChecks, wNilChecks,
|
||||
@@ -62,8 +62,7 @@ type
|
||||
wCompileTime, wNoInit,
|
||||
wPassc, wPassl, wBorrow, wDiscardable,
|
||||
wFieldChecks,
|
||||
wWatchPoint, wSubsChar,
|
||||
wAcyclic, wShallow, wUnroll, wLinearScanEnd, wComputedGoto,
|
||||
wSubsChar, wAcyclic, wShallow, wUnroll, wLinearScanEnd, wComputedGoto,
|
||||
wInjectStmt, wExperimental,
|
||||
wWrite, wGensym, wInject, wDirty, wInheritable, wThreadVar, wEmit,
|
||||
wAsmNoStackFrame,
|
||||
@@ -138,7 +137,7 @@ const
|
||||
"fatal", "error", "warning", "hint", "line",
|
||||
"push", "pop", "define", "undef", "linedir", "stacktrace", "linetrace",
|
||||
"link", "compile", "linksys", "deprecated", "varargs",
|
||||
"callconv", "breakpoint", "debugger", "nimcall", "stdcall",
|
||||
"callconv", "debugger", "nimcall", "stdcall",
|
||||
"cdecl", "safecall", "syscall", "inline", "noinline", "fastcall", "closure",
|
||||
"noconv", "on", "off", "checks", "rangechecks", "boundchecks",
|
||||
"overflowchecks", "nilchecks",
|
||||
@@ -152,7 +151,6 @@ const
|
||||
"pragma",
|
||||
"compiletime", "noinit",
|
||||
"passc", "passl", "borrow", "discardable", "fieldchecks",
|
||||
"watchpoint",
|
||||
"subschar", "acyclic", "shallow", "unroll", "linearscanend",
|
||||
"computedgoto", "injectstmt", "experimental",
|
||||
"write", "gensym", "inject", "dirty", "inheritable", "threadvar", "emit",
|
||||
|
||||
@@ -2,3 +2,4 @@
|
||||
|
||||
when defined(nimHasCppDefine):
|
||||
cppDefine "errno"
|
||||
cppDefine "unix"
|
||||
|
||||
@@ -122,7 +122,7 @@ Advanced options:
|
||||
enable experimental language feature
|
||||
--legacy:$2
|
||||
enable obsolete/legacy language feature
|
||||
legacy code.
|
||||
--useVersion:0.19|1.0 emulate Nim version X of the Nim compiler
|
||||
--newruntime use an alternative runtime that uses destructors
|
||||
and that uses a shared heap via -d:useMalloc
|
||||
--profiler:on|off enable profiling; requires `import nimprof`, and
|
||||
|
||||
203
doc/endb.rst
203
doc/endb.rst
@@ -1,203 +0,0 @@
|
||||
==============================================
|
||||
Embedded Nim Debugger (ENDB) User Guide
|
||||
==============================================
|
||||
|
||||
:Author: Andreas Rumpf
|
||||
:Version: |nimversion|
|
||||
|
||||
.. contents::
|
||||
|
||||
**WARNING**: ENDB is not maintained anymore! Please help if you're interested
|
||||
in this tool.
|
||||
|
||||
Nim comes with a platform independent debugger -
|
||||
the Embedded Nim Debugger (ENDB). The debugger is
|
||||
*embedded* into your executable if it has been
|
||||
compiled with the ``--debugger:on`` command line option.
|
||||
This also defines the conditional symbol ``ENDB`` for you.
|
||||
|
||||
Note: You must not compile your program with the ``--app:gui``
|
||||
command line option because then there would be no console
|
||||
available for the debugger.
|
||||
|
||||
If you start your program the debugger will immediately show
|
||||
a prompt on the console. You can now enter a command. The next sections
|
||||
deal with the possible commands. As usual in Nim in all commands
|
||||
underscores and case do not matter. Optional components of a command
|
||||
are listed in brackets ``[...]`` here.
|
||||
|
||||
|
||||
General Commands
|
||||
================
|
||||
|
||||
``h``, ``help``
|
||||
Display a quick reference of the possible commands.
|
||||
|
||||
``q``, ``quit``
|
||||
Quit the debugger and the program.
|
||||
|
||||
<ENTER>
|
||||
(Without any typed command) repeat the previous debugger command.
|
||||
If there is no previous command, ``step_into`` is assumed.
|
||||
|
||||
Executing Commands
|
||||
==================
|
||||
|
||||
``s``, ``step_into``
|
||||
Single step, stepping into routine calls.
|
||||
|
||||
``n``, ``step_over``
|
||||
Single step, without stepping into routine calls.
|
||||
|
||||
``f``, ``skip_current``
|
||||
Continue execution until the current routine finishes.
|
||||
|
||||
``c``, ``continue``
|
||||
Continue execution until the next breakpoint.
|
||||
|
||||
``i``, ``ignore``
|
||||
Continue execution, ignore all breakpoints. This effectively quits
|
||||
the debugger and runs the program until it finishes.
|
||||
|
||||
|
||||
Breakpoint Commands
|
||||
===================
|
||||
|
||||
``b``, ``setbreak`` [fromline [toline]] [file]
|
||||
Set a new breakpoint for the given file
|
||||
and line numbers. If no file is given, the current execution point's
|
||||
filename is used. If the filename has no extension, ``.nim`` is
|
||||
appended for your convenience.
|
||||
If no line numbers are given, the current execution point's
|
||||
line is used. If both ``fromline`` and ``toline`` are given the
|
||||
breakpoint contains a line number range. Some examples if it is still
|
||||
unclear:
|
||||
|
||||
* ``b 12 15 thallo`` creates a breakpoint that
|
||||
will be triggered if the instruction pointer reaches one of the
|
||||
lines 12-15 in the file ``thallo.nim``.
|
||||
* ``b 12 thallo`` creates a breakpoint that
|
||||
will be triggered if the instruction pointer reaches the
|
||||
line 12 in the file ``thallo.nim``.
|
||||
* ``b 12`` creates a breakpoint that
|
||||
will be triggered if the instruction pointer reaches the
|
||||
line 12 in the current file.
|
||||
* ``b`` creates a breakpoint that
|
||||
will be triggered if the instruction pointer reaches the
|
||||
current line in the current file again.
|
||||
|
||||
``breakpoints``
|
||||
Display the entire breakpoint list.
|
||||
|
||||
``disable`` <identifier>
|
||||
Disable a breakpoint. It remains disabled until you turn it on again
|
||||
with the ``enable`` command.
|
||||
|
||||
``enable`` <identifier>
|
||||
Enable a breakpoint.
|
||||
|
||||
Often it happens when debugging that you keep retyping the breakpoints again
|
||||
and again because they are lost when you restart your program. This is not
|
||||
necessary: A special pragma has been defined for this:
|
||||
|
||||
|
||||
The ``breakpoint`` pragma
|
||||
-------------------------
|
||||
|
||||
The ``breakpoint`` pragma is syntactically a statement. It can be used
|
||||
to mark the *following line* as a breakpoint:
|
||||
|
||||
.. code-block:: Nim
|
||||
write("1")
|
||||
{.breakpoint: "before_write_2".}
|
||||
write("2")
|
||||
|
||||
The name of the breakpoint here is ``before_write_2``. Of course the
|
||||
breakpoint's name is optional - the compiler will generate one for you
|
||||
if you leave it out.
|
||||
|
||||
Code for the ``breakpoint`` pragma is only generated if the debugger
|
||||
is turned on, so you don't need to remove it from your source code after
|
||||
debugging.
|
||||
|
||||
|
||||
The ``watchpoint`` pragma
|
||||
-------------------------
|
||||
|
||||
The ``watchpoint`` pragma is syntactically a statement. It can be used
|
||||
to mark a location as a watchpoint:
|
||||
|
||||
.. code-block:: Nim
|
||||
var a: array[0..20, int]
|
||||
|
||||
{.watchpoint: a[3].}
|
||||
for i in 0 .. 20: a[i] = i
|
||||
|
||||
ENDB then writes a stack trace whenever the content of the location ``a[3]``
|
||||
changes. The current implementation only tracks a hash value of the location's
|
||||
contents and so locations that are not word sized may encounter false
|
||||
negatives in very rare cases.
|
||||
|
||||
Code for the ``watchpoint`` pragma is only generated if the debugger
|
||||
is turned on, so you don't need to remove it from your source code after
|
||||
debugging.
|
||||
|
||||
Due to the primitive implementation watchpoints are even slower than
|
||||
breakpoints: After *every* executed Nim code line it is checked whether the
|
||||
location changed.
|
||||
|
||||
|
||||
Data Display Commands
|
||||
=====================
|
||||
|
||||
``e``, ``eval`` <exp>
|
||||
Evaluate the expression <exp>. Note that ENDB has no full-blown expression
|
||||
evaluator built-in. So expressions are limited:
|
||||
|
||||
* To display global variables prefix their names with their
|
||||
owning module: ``nim1.globalVar``
|
||||
* To display local variables or parameters just type in
|
||||
their name: ``localVar``. If you want to inspect variables that are not
|
||||
in the current stack frame, use the ``up`` or ``down`` command.
|
||||
|
||||
Unfortunately, only inspecting variables is possible at the moment. Maybe
|
||||
a future version will implement a full-blown Nim expression evaluator,
|
||||
but this is not easy to do and would bloat the debugger's code.
|
||||
|
||||
Since displaying the whole data structures is often not needed and
|
||||
painfully slow, the debugger uses a *maximal display depth* concept for
|
||||
displaying.
|
||||
|
||||
You can alter the maximal display depth with the ``maxdisplay``
|
||||
command.
|
||||
|
||||
``maxdisplay`` <natural>
|
||||
Sets the maximal display depth to the given integer value. A value of 0
|
||||
means there is no maximal display depth. Default is 3.
|
||||
|
||||
``o``, ``out`` <filename> <exp>
|
||||
Evaluate the expression <exp> and store its string representation into a
|
||||
file named <filename>. If the file does not exist, it will be created,
|
||||
otherwise it will be opened for appending.
|
||||
|
||||
``w``, ``where``
|
||||
Display the current execution point.
|
||||
|
||||
``u``, ``up``
|
||||
Go up in the call stack.
|
||||
|
||||
``d``, ``down``
|
||||
Go down in the call stack.
|
||||
|
||||
``stackframe`` [file]
|
||||
Displays the content of the current stack frame in ``stdout`` or
|
||||
appends it to the file, depending on whether a file is given.
|
||||
|
||||
``callstack``
|
||||
Display the entire call stack (but not its content).
|
||||
|
||||
``l``, ``locals``
|
||||
Display the available local variables in the current stack frame.
|
||||
|
||||
``g``, ``globals``
|
||||
Display all the global variables that are available for inspection.
|
||||
199
doc/manual.rst
199
doc/manual.rst
@@ -721,6 +721,44 @@ Rationale: Consistency with overloaded assignment or assignment-like operations,
|
||||
``a = b`` can be read as ``performSomeCopy(a, b)``.
|
||||
|
||||
|
||||
However, the concept of "order of evaluation" is only applicable after the code
|
||||
was normalized: The normalization involves template expansions and argument
|
||||
reorderings that have been passed to named parameters:
|
||||
|
||||
.. code-block:: nim
|
||||
:test: "nim c $1"
|
||||
|
||||
var s = ""
|
||||
|
||||
proc p(): int =
|
||||
s.add "p"
|
||||
result = 5
|
||||
|
||||
proc q(): int =
|
||||
s.add "q"
|
||||
result = 3
|
||||
|
||||
# Evaluation order is 'b' before 'a' due to template
|
||||
# expansion's semantics.
|
||||
template swapArgs(a, b): untyped =
|
||||
b + a
|
||||
|
||||
doAssert swapArgs(p() + q(), q() - p()) == 6
|
||||
doAssert s == "qppq"
|
||||
|
||||
# Evaluation order is not influenced by named parameters:
|
||||
proc construct(first, second: int) =
|
||||
discard
|
||||
|
||||
# 'p' is evaluated before 'q'!
|
||||
construct(second = q(), first = p())
|
||||
|
||||
doAssert s == "qppqpq"
|
||||
|
||||
|
||||
Rationale: This is far easier to implement than hypothetical alternatives.
|
||||
|
||||
|
||||
Constants and Constant Expressions
|
||||
==================================
|
||||
|
||||
@@ -2244,9 +2282,11 @@ algorithm returns true:
|
||||
proc isImplicitlyConvertible(a, b: PType): bool =
|
||||
if isSubtype(a, b) or isCovariant(a, b):
|
||||
return true
|
||||
if isIntLiteral(a):
|
||||
return b in {int8, int16, int32, int64, int, uint, uint8, uint16,
|
||||
uint32, uint64, float32, float64}
|
||||
case a.kind
|
||||
of int: result = b in {int8, int16, int32, int64, uint, uint8, uint16,
|
||||
uint32, uint64, float, float32, float64}
|
||||
of int: result = b in {int32, int64}
|
||||
of int8: result = b in {int16, int32, int64, int}
|
||||
of int16: result = b in {int32, int64, int}
|
||||
of int32: result = b in {int64, int}
|
||||
@@ -2254,9 +2294,8 @@ algorithm returns true:
|
||||
of uint8: result = b in {uint16, uint32, uint64}
|
||||
of uint16: result = b in {uint32, uint64}
|
||||
of uint32: result = b in {uint64}
|
||||
of float: result = b in {float32, float64}
|
||||
of float32: result = b in {float64, float}
|
||||
of float64: result = b in {float32, float}
|
||||
of float32: result = b in {float64}
|
||||
of float64: result = b in {float32}
|
||||
of seq:
|
||||
result = b == openArray and typeEquals(a.baseType, b.baseType)
|
||||
of array:
|
||||
@@ -4410,13 +4449,10 @@ more complex type classes:
|
||||
# create a type class that will match all tuple and object types
|
||||
type RecordType = tuple or object
|
||||
|
||||
proc printFields(rec: RecordType) =
|
||||
proc printFields[T: RecordType](rec: T) =
|
||||
for key, value in fieldPairs(rec):
|
||||
echo key, " = ", value
|
||||
|
||||
Procedures utilizing type classes in such manner are considered to be
|
||||
`implicitly generic`:idx:. They will be instantiated once for each unique
|
||||
combination of param types used within the program.
|
||||
|
||||
Whilst the syntax of type classes appears to resemble that of ADTs/algebraic data
|
||||
types in ML-like languages, it should be understood that type classes are static
|
||||
@@ -4442,6 +4478,26 @@ as `type constraints`:idx: of the generic type parameter:
|
||||
onlyIntOrString(5.0, 0.0) # type mismatch
|
||||
onlyIntOrString("xy", 50) # invalid as 'T' cannot be both at the same time
|
||||
|
||||
|
||||
Implicit generics
|
||||
-----------------
|
||||
|
||||
A type class can be used directly as the parameter's type.
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
# create a type class that will match all tuple and object types
|
||||
type RecordType = tuple or object
|
||||
|
||||
proc printFields(rec: RecordType) =
|
||||
for key, value in fieldPairs(rec):
|
||||
echo key, " = ", value
|
||||
|
||||
|
||||
Procedures utilizing type classes in such manner are considered to be
|
||||
`implicitly generic`:idx:. They will be instantiated once for each unique
|
||||
combination of param types used within the program.
|
||||
|
||||
By default, during overload resolution each named type class will bind to
|
||||
exactly one concrete type. We call such type classes `bind once`:idx: types.
|
||||
Here is an example taken directly from the system module to illustrate this:
|
||||
@@ -4470,26 +4526,73 @@ the dot syntax:
|
||||
proc `[]`(m: Matrix, row, col: int): Matrix.T =
|
||||
m.data[col * high(Matrix.Columns) + row]
|
||||
|
||||
Alternatively, the `type` operator can be used over the proc params for similar
|
||||
effect when anonymous or distinct type classes are used.
|
||||
|
||||
When a generic type is instantiated with a type class instead of a concrete
|
||||
type, this results in another more specific type class:
|
||||
Here are more examples that illustrate implicit generics:
|
||||
|
||||
.. code-block:: nim
|
||||
seq[ref object] # Any sequence storing references to any object type
|
||||
|
||||
type T1 = auto
|
||||
proc foo(s: seq[T1], e: T1)
|
||||
# seq[T1] is the same as just `seq`, but T1 will be allowed to bind
|
||||
# to a single type, while the signature is being matched
|
||||
proc p(t: Table; k: Table.Key): Table.Value
|
||||
|
||||
Matrix[Ordinal] # Any Matrix instantiation using integer values
|
||||
# is roughly the same as:
|
||||
|
||||
As seen in the previous example, in such instantiations, it's not necessary to
|
||||
supply all type parameters of the generic type, because any missing ones will
|
||||
be inferred to have the equivalent of the `any` type class and thus they will
|
||||
match anything without discrimination.
|
||||
proc p[Key, Value](t: Table[Key, Value]; k: Key): Value
|
||||
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
proc p(a: Table, b: Table)
|
||||
|
||||
# is roughly the same as:
|
||||
|
||||
proc p[Key, Value](a, b: Table[Key, Value])
|
||||
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
proc p(a: Table, b: distinct Table)
|
||||
|
||||
# is roughly the same as:
|
||||
|
||||
proc p[Key, Value, KeyB, ValueB](a: Table[Key, Value], b: Table[KeyB, ValueB])
|
||||
|
||||
|
||||
`typedesc` used as a parameter type also introduces an implicit
|
||||
generic. `typedesc` has its own set of rules:
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
proc p(a: typedesc)
|
||||
|
||||
# is roughly the same as:
|
||||
|
||||
proc p[T](a: typedesc[T])
|
||||
|
||||
|
||||
`typedesc` is a "bind many" type class:
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
proc p(a, b: typedesc)
|
||||
|
||||
# is roughly the same as:
|
||||
|
||||
proc p[T, T2](a: typedesc[T], b: typedesc[T2])
|
||||
|
||||
|
||||
A parameter of type `typedesc` is itself usable as a type. If it is used
|
||||
as a type, it's the underlying type. (In other words, one level
|
||||
of "typedesc"-ness is stripped off:
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
proc p(a: typedesc; b: a) = discard
|
||||
|
||||
# is roughly the same as:
|
||||
proc p[T](a: typedesc[T]; b: T) = discard
|
||||
|
||||
# hence this is a valid call:
|
||||
p(int, 4)
|
||||
# as parameter 'a' requires a type, but 'b' requires a value.
|
||||
|
||||
|
||||
Generic inference restrictions
|
||||
@@ -4899,6 +5002,45 @@ no semantics outside of a template definition and cannot be abstracted over:
|
||||
To get rid of hygiene in templates, one can use the `dirty`:idx: pragma for
|
||||
a template. ``inject`` and ``gensym`` have no effect in ``dirty`` templates.
|
||||
|
||||
``gensym``'ed symbols cannot be used as ``field`` in the ``x.field`` syntax.
|
||||
Nor can they be used in the ``ObjectConstruction(field: value)``
|
||||
and ``namedParameterCall(field = value)`` syntactic constructs.
|
||||
|
||||
The reason for this is that code like
|
||||
|
||||
.. code-block:: nim
|
||||
:test: "nim c $1"
|
||||
|
||||
type
|
||||
T = object
|
||||
f: int
|
||||
|
||||
template tmp(x: T) =
|
||||
let f = 34
|
||||
echo x.f, T(f: 4)
|
||||
|
||||
|
||||
should work as expected.
|
||||
|
||||
However, this means that the method call syntax is not available for
|
||||
``gensym``'ed symbols:
|
||||
|
||||
.. code-block:: nim
|
||||
:test: "nim c $1"
|
||||
:status: 1
|
||||
|
||||
template tmp(x) =
|
||||
type
|
||||
T {.gensym.} = int
|
||||
|
||||
echo x.T # invalid: instead use: 'echo T(x)'.
|
||||
|
||||
tmp(12)
|
||||
|
||||
|
||||
**Note**: The Nim compiler prior to version 1 was more lenient about this
|
||||
requirement. Use the ``--useVersion:0.19`` switch for a transition period.
|
||||
|
||||
|
||||
|
||||
Limitations of the method call syntax
|
||||
@@ -6135,6 +6277,17 @@ is particularly useful when the symbol was generated by a macro:
|
||||
implementArithOps(int)
|
||||
echoAdd 3, 5
|
||||
|
||||
``used`` can also be used as a top level statement to mark a module as "used".
|
||||
This prevents the "Unused import" warning:
|
||||
|
||||
.. code-block:: nim
|
||||
|
||||
# module: debughelper.nim
|
||||
when defined(nimHasUsed):
|
||||
# 'import debughelper' is so useful for debugging
|
||||
# that Nim shouldn't produce a warning for that import,
|
||||
# even if currently unused:
|
||||
{.used.}
|
||||
|
||||
|
||||
experimental pragma
|
||||
|
||||
13
koch.nim
13
koch.nim
@@ -176,6 +176,7 @@ proc bundleWinTools(args: string) =
|
||||
nimCompile("tools/nimgrab.nim", options = "-d:ssl " & args)
|
||||
nimCompile("tools/nimgrep.nim", options = args)
|
||||
bundleC2nim(args)
|
||||
nimCompile("testament/testament.nim", options = args)
|
||||
when false:
|
||||
# not yet a tool worth including
|
||||
nimCompile(r"tools\downloader.nim",
|
||||
@@ -217,6 +218,8 @@ proc buildTools(args: string = "") =
|
||||
options = "-d:release " & args)
|
||||
nimCompileFold("Compile nimfind", "tools/nimfind.nim",
|
||||
options = "-d:release " & args)
|
||||
nimCompileFold("Compile testament", "testament/testament.nim",
|
||||
options = "-d:release " & args)
|
||||
|
||||
proc nsis(latest: bool; args: string) =
|
||||
bundleNimbleExe(latest, args)
|
||||
@@ -417,8 +420,8 @@ proc winRelease*() =
|
||||
template `|`(a, b): string = (if a.len > 0: a else: b)
|
||||
|
||||
proc tests(args: string) =
|
||||
nimexec "cc --opt:speed testament/tester"
|
||||
let tester = quoteShell(getCurrentDir() / "testament/tester".exe)
|
||||
nimexec "cc --opt:speed testament/testament"
|
||||
let tester = quoteShell(getCurrentDir() / "testament/testament".exe)
|
||||
let success = tryExec tester & " " & (args|"all")
|
||||
if not success:
|
||||
quit("tests failed", QuitFailure)
|
||||
@@ -482,7 +485,7 @@ proc runCI(cmd: string) =
|
||||
kochExecFold("boot -d:release -d:nimHasLibFFI", "boot -d:release -d:nimHasLibFFI")
|
||||
|
||||
if getEnv("NIM_TEST_PACKAGES", "false") == "true":
|
||||
execFold("Test selected Nimble packages", "nim c -r testament/tester cat nimble-packages")
|
||||
execFold("Test selected Nimble packages", "nim c -r testament/testament cat nimble-packages")
|
||||
else:
|
||||
buildTools() # altenatively, kochExec "tools --toolsNoNimble"
|
||||
|
||||
@@ -490,10 +493,10 @@ proc runCI(cmd: string) =
|
||||
execFold("Test nimscript", "nim e tests/test_nimscript.nims")
|
||||
when defined(windows):
|
||||
# note: will be over-written below
|
||||
execFold("Compile tester", "nim c -d:nimCoroutines --os:genode -d:posix --compileOnly testament/tester")
|
||||
execFold("Compile tester", "nim c -d:nimCoroutines --os:genode -d:posix --compileOnly testament/testament")
|
||||
|
||||
# main bottleneck here
|
||||
execFold("Run tester", "nim c -r -d:nimCoroutines testament/tester --pedantic all -d:nimCoroutines")
|
||||
execFold("Run tester", "nim c -r -d:nimCoroutines testament/testament --pedantic all -d:nimCoroutines")
|
||||
|
||||
execFold("Run nimdoc tests", "nim c -r nimdoc/tester")
|
||||
execFold("Run nimpretty tests", "nim c -r nimpretty/tester.nim")
|
||||
|
||||
@@ -60,11 +60,11 @@ template withLock*(a: Lock, body: untyped) =
|
||||
## Acquires the given lock, executes the statements in body and
|
||||
## releases the lock after the statements finish executing.
|
||||
mixin acquire, release
|
||||
a.acquire()
|
||||
acquire(a)
|
||||
{.locks: [a].}:
|
||||
try:
|
||||
body
|
||||
finally:
|
||||
a.release()
|
||||
release(a)
|
||||
|
||||
{.pop.}
|
||||
|
||||
@@ -202,7 +202,7 @@ when false:
|
||||
assert i < x.len
|
||||
x.data[i] = y
|
||||
|
||||
proc `@`*[T](elems: openArray[T]): NimSeqV2[T] =
|
||||
proc `@`*[T](elems: sink openArray[T]): NimSeqV2[T] =
|
||||
result.cap = elems.len
|
||||
result.len = elems.len
|
||||
result.data = cast[type(result.data)](alloc(result.cap * sizeof(T)))
|
||||
|
||||
@@ -9,20 +9,6 @@
|
||||
|
||||
## Default new string implementation used by Nim's core.
|
||||
|
||||
when false:
|
||||
# these are to be implemented or changed in the code generator.
|
||||
|
||||
#proc rawNewStringNoInit(space: int): NimString {.compilerproc.}
|
||||
# seems to be unused.
|
||||
proc copyDeepString(src: NimString): NimString {.inline.}
|
||||
# ----------------- sequences ----------------------------------------------
|
||||
|
||||
proc incrSeqV3(s: PGenericSeq, typ: PNimType): PGenericSeq {.compilerproc.}
|
||||
proc setLengthSeqV2(s: PGenericSeq, typ: PNimType, newLen: int): PGenericSeq {.
|
||||
compilerRtl.}
|
||||
proc newSeq(typ: PNimType, len: int): pointer {.compilerRtl.}
|
||||
proc newSeqRC1(typ: PNimType, len: int): pointer {.compilerRtl.}
|
||||
|
||||
import allocators
|
||||
|
||||
type
|
||||
@@ -45,41 +31,6 @@ template frees(s) =
|
||||
if not isLiteral(s):
|
||||
s.p.allocator.dealloc(s.p.allocator, s.p, contentSize(s.p.cap))
|
||||
|
||||
when not defined(nimV2):
|
||||
proc `=destroy`(s: var string) =
|
||||
var a = cast[ptr NimStringV2](addr s)
|
||||
frees(a)
|
||||
a.len = 0
|
||||
a.p = nil
|
||||
|
||||
proc `=sink`(x: var string, y: string) =
|
||||
var a = cast[ptr NimStringV2](addr x)
|
||||
var b = cast[ptr NimStringV2](unsafeAddr y)
|
||||
# we hope this is optimized away for not yet alive objects:
|
||||
if unlikely(a.p == b.p): return
|
||||
frees(a)
|
||||
a.len = b.len
|
||||
a.p = b.p
|
||||
|
||||
proc `=`(x: var string, y: string) =
|
||||
var a = cast[ptr NimStringV2](addr x)
|
||||
var b = cast[ptr NimStringV2](unsafeAddr y)
|
||||
if unlikely(a.p == b.p): return
|
||||
frees(a)
|
||||
a.len = b.len
|
||||
if isLiteral(b):
|
||||
# we can shallow copy literals:
|
||||
a.p = b.p
|
||||
else:
|
||||
let allocator = if a.p != nil and a.p.allocator != nil: a.p.allocator else: getLocalAllocator()
|
||||
# we have to allocate the 'cap' here, consider
|
||||
# 'let y = newStringOfCap(); var x = y'
|
||||
# on the other hand... These get turned into moves now.
|
||||
a.p = cast[ptr NimStrPayload](allocator.alloc(allocator, contentSize(b.len)))
|
||||
a.p.allocator = allocator
|
||||
a.p.cap = b.len
|
||||
copyMem(unsafeAddr a.p.data[0], unsafeAddr b.p.data[0], b.len+1)
|
||||
|
||||
proc resize(old: int): int {.inline.} =
|
||||
if old <= 0: result = 4
|
||||
elif old < 65536: result = old * 2
|
||||
|
||||
@@ -33,46 +33,6 @@ when defined(Windows):
|
||||
stdout.write(prompt)
|
||||
result = readLine(stdin, line)
|
||||
|
||||
import winlean
|
||||
|
||||
const
|
||||
VK_SHIFT* = 16
|
||||
VK_CONTROL* = 17
|
||||
VK_MENU* = 18
|
||||
KEY_EVENT* = 1
|
||||
|
||||
type
|
||||
KEY_EVENT_RECORD = object
|
||||
bKeyDown: WINBOOL
|
||||
wRepeatCount: uint16
|
||||
wVirtualKeyCode: uint16
|
||||
wVirtualScanCode: uint16
|
||||
unicodeChar: uint16
|
||||
dwControlKeyState: uint32
|
||||
INPUT_RECORD = object
|
||||
eventType*: int16
|
||||
reserved*: int16
|
||||
event*: KEY_EVENT_RECORD
|
||||
safetyBuffer: array[0..5, DWORD]
|
||||
|
||||
proc readConsoleInputW*(hConsoleInput: Handle, lpBuffer: var INPUT_RECORD,
|
||||
nLength: uint32,
|
||||
lpNumberOfEventsRead: var uint32): WINBOOL{.
|
||||
stdcall, dynlib: "kernel32", importc: "ReadConsoleInputW".}
|
||||
|
||||
proc getch(): uint16 =
|
||||
let hStdin = getStdHandle(STD_INPUT_HANDLE)
|
||||
var
|
||||
irInputRecord: INPUT_RECORD
|
||||
dwEventsRead: uint32
|
||||
|
||||
while readConsoleInputW(hStdin, irInputRecord, 1, dwEventsRead) != 0:
|
||||
if irInputRecord.eventType == KEY_EVENT and
|
||||
irInputRecord.event.wVirtualKeyCode notin {VK_SHIFT, VK_MENU, VK_CONTROL}:
|
||||
result = irInputRecord.event.unicodeChar
|
||||
discard readConsoleInputW(hStdin, irInputRecord, 1, dwEventsRead)
|
||||
return result
|
||||
|
||||
elif defined(genode):
|
||||
proc readLineFromStdin*(prompt: string): TaintedString {.
|
||||
tags: [ReadIOEffect, WriteIOEffect].} =
|
||||
|
||||
@@ -374,18 +374,6 @@ typedef char* NCSTRING;
|
||||
# define NIM_IMAN 0
|
||||
#endif
|
||||
|
||||
static N_INLINE(NI, float64ToInt32)(double x) {
|
||||
/* nowadays no hack necessary anymore */
|
||||
return x >= 0 ? (NI)(x+0.5) : (NI)(x-0.5);
|
||||
}
|
||||
|
||||
static N_INLINE(NI32, float32ToInt32)(float x) {
|
||||
/* nowadays no hack necessary anymore */
|
||||
return x >= 0 ? (NI32)(x+0.5) : (NI32)(x-0.5);
|
||||
}
|
||||
|
||||
#define float64ToInt64(x) ((NI64) (x))
|
||||
|
||||
#define NIM_STRLIT_FLAG ((NU)(1) << ((NIM_INTBITS) - 2)) /* This has to be the same as system.strlitFlag! */
|
||||
|
||||
#define STRING_LITERAL(name, str, length) \
|
||||
|
||||
@@ -111,6 +111,7 @@
|
||||
# - ARM support for the trampolines
|
||||
# - investigate:
|
||||
# - soon the system module might be importing other modules - the init order...?
|
||||
# (revert https://github.com/nim-lang/Nim/pull/11971 when working on this)
|
||||
# - rethink the closure iterators
|
||||
# - ability to keep old versions of dynamic libraries alive
|
||||
# - because of async server code
|
||||
@@ -421,7 +422,7 @@ when defined(createNimHcr):
|
||||
modules.add(name, newModuleDesc())
|
||||
|
||||
let copiedName = name & ".copy." & dllExt
|
||||
copyFile(name, copiedName)
|
||||
copyFileWithPermissions(name, copiedName)
|
||||
|
||||
let lib = loadLib(copiedName)
|
||||
assert lib != nil
|
||||
|
||||
@@ -372,7 +372,7 @@ when isMainModule:
|
||||
doAssert((a+b) == z)
|
||||
doAssert((a+b) =~ 0.0)
|
||||
doAssert((a/b) == m1)
|
||||
doAssert((1.0/a) == complex(0.2, -0.4))
|
||||
doAssert((1.0/a) =~ complex(0.2, -0.4))
|
||||
doAssert((a*b) == complex(3.0, -4.0))
|
||||
doAssert(10.0*a == tt)
|
||||
doAssert(a*10.0 == tt)
|
||||
|
||||
@@ -49,9 +49,6 @@ type
|
||||
## always have a size of a power of two and can use the ``and``
|
||||
## operator instead of ``mod`` for truncation of the hash value.
|
||||
|
||||
const
|
||||
IntSize = sizeof(int)
|
||||
|
||||
proc `!&`*(h: Hash, val: int): Hash {.inline.} =
|
||||
## Mixes a hash value `h` with `val` to produce a new hash value.
|
||||
##
|
||||
@@ -108,13 +105,12 @@ proc hash*(x: pointer): Hash {.inline.} =
|
||||
else:
|
||||
result = cast[Hash](cast[uint](x) shr 3) # skip the alignment
|
||||
|
||||
when not defined(booting):
|
||||
proc hash*[T: proc](x: T): Hash {.inline.} =
|
||||
## Efficient hashing of proc vars. Closures are supported too.
|
||||
when T is "closure":
|
||||
result = hash(rawProc(x)) !& hash(rawEnv(x))
|
||||
else:
|
||||
result = hash(pointer(x))
|
||||
proc hash*[T: proc](x: T): Hash {.inline.} =
|
||||
## Efficient hashing of proc vars. Closures are supported too.
|
||||
when T is "closure":
|
||||
result = hash(rawProc(x)) !& hash(rawEnv(x))
|
||||
else:
|
||||
result = hash(pointer(x))
|
||||
|
||||
proc hash*(x: int): Hash {.inline.} =
|
||||
## Efficient hashing of integers.
|
||||
@@ -151,27 +147,87 @@ proc hash*(x: float): Hash {.inline.} =
|
||||
proc hash*[A](x: openArray[A]): Hash
|
||||
proc hash*[A](x: set[A]): Hash
|
||||
|
||||
template bytewiseHashing(result: Hash, x: typed, start, stop: int) =
|
||||
for i in start .. stop:
|
||||
result = result !& hash(x[i])
|
||||
result = !$result
|
||||
|
||||
template hashImpl(result: Hash, x: typed, start, stop: int) =
|
||||
when defined(JS):
|
||||
proc imul(a, b: uint32): uint32 =
|
||||
# https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/imul
|
||||
let mask = 0xffff'u32
|
||||
var
|
||||
aHi = (a shr 16) and mask
|
||||
aLo = a and mask
|
||||
bHi = (b shr 16) and mask
|
||||
bLo = b and mask
|
||||
result = (aLo * bLo) + (aHi * bLo + aLo * bHi) shl 16
|
||||
else:
|
||||
template imul(a, b: uint32): untyped = a * b
|
||||
|
||||
proc rotl32(x: uint32, r: int): uint32 {.inline.} =
|
||||
(x shl r) or (x shr (32 - r))
|
||||
|
||||
proc murmurHash(x: openArray[byte]): Hash =
|
||||
# https://github.com/PeterScott/murmur3/blob/master/murmur3.c
|
||||
const
|
||||
c1 = 0xcc9e2d51'u32
|
||||
c2 = 0x1b873593'u32
|
||||
n1 = 0xe6546b64'u32
|
||||
m1 = 0x85ebca6b'u32
|
||||
m2 = 0xc2b2ae35'u32
|
||||
let
|
||||
elementSize = sizeof(x[start])
|
||||
stepSize = IntSize div elementSize
|
||||
var i = start
|
||||
while i <= stop+1 - stepSize:
|
||||
var n = 0
|
||||
when nimvm:
|
||||
# we cannot cast in VM, so we do it manually
|
||||
for j in countdown(stepSize-1, 0):
|
||||
n = (n shl (8*elementSize)) or ord(x[i+j])
|
||||
size = len(x)
|
||||
stepSize = 4 # 32-bit
|
||||
n = size div stepSize
|
||||
var
|
||||
h1: uint32
|
||||
i = 0
|
||||
|
||||
# body
|
||||
while i < n * stepSize:
|
||||
var k1: uint32
|
||||
when defined(js):
|
||||
var j = stepSize
|
||||
while j > 0:
|
||||
dec j
|
||||
k1 = (k1 shl 8) or (ord(x[i+j])).uint32
|
||||
else:
|
||||
n = cast[ptr Hash](unsafeAddr x[i])[]
|
||||
result = result !& n
|
||||
i += stepSize
|
||||
bytewiseHashing(result, x, i, stop) # hash the remaining elements and finish
|
||||
k1 = cast[ptr uint32](unsafeAddr x[i])[]
|
||||
inc i, stepSize
|
||||
|
||||
k1 = imul(k1, c1)
|
||||
k1 = rotl32(k1, 15)
|
||||
k1 = imul(k1, c2)
|
||||
|
||||
h1 = h1 xor k1
|
||||
h1 = rotl32(h1, 13)
|
||||
h1 = h1*5 + n1
|
||||
|
||||
# tail
|
||||
var k1: uint32
|
||||
var rem = size mod stepSize
|
||||
while rem > 0:
|
||||
dec rem
|
||||
k1 = (k1 shl 8) or (ord(x[i+rem])).uint32
|
||||
k1 = imul(k1, c1)
|
||||
k1 = rotl32(k1, 15)
|
||||
k1 = imul(k1, c2)
|
||||
h1 = h1 xor k1
|
||||
|
||||
# finalization
|
||||
h1 = h1 xor size.uint32
|
||||
h1 = h1 xor (h1 shr 16)
|
||||
h1 = imul(h1, m1)
|
||||
h1 = h1 xor (h1 shr 13)
|
||||
h1 = imul(h1, m2)
|
||||
h1 = h1 xor (h1 shr 16)
|
||||
return cast[Hash](h1)
|
||||
|
||||
proc hashVmImpl(x: string, sPos, ePos: int): Hash =
|
||||
doAssert false, "implementation override in compiler/vmops.nim"
|
||||
|
||||
proc hashVmImplChar(x: openArray[char], sPos, ePos: int): Hash =
|
||||
doAssert false, "implementation override in compiler/vmops.nim"
|
||||
|
||||
proc hashVmImplByte(x: openArray[byte], sPos, ePos: int): Hash =
|
||||
doAssert false, "implementation override in compiler/vmops.nim"
|
||||
|
||||
proc hash*(x: string): Hash =
|
||||
## Efficient hashing of strings.
|
||||
@@ -182,7 +238,16 @@ proc hash*(x: string): Hash =
|
||||
runnableExamples:
|
||||
doAssert hash("abracadabra") != hash("AbracadabrA")
|
||||
|
||||
hashImpl(result, x, 0, high(x))
|
||||
when not defined(nimToOpenArrayCString):
|
||||
result = 0
|
||||
for c in x:
|
||||
result = result !& ord(c)
|
||||
result = !$result
|
||||
else:
|
||||
when nimvm:
|
||||
result = hashVmImpl(x, 0, high(x))
|
||||
else:
|
||||
result = murmurHash(toOpenArrayByte(x, 0, high(x)))
|
||||
|
||||
proc hash*(x: cstring): Hash =
|
||||
## Efficient hashing of null-terminated strings.
|
||||
@@ -191,7 +256,19 @@ proc hash*(x: cstring): Hash =
|
||||
doAssert hash(cstring"AbracadabrA") == hash("AbracadabrA")
|
||||
doAssert hash(cstring"abracadabra") != hash(cstring"AbracadabrA")
|
||||
|
||||
hashImpl(result, x, 0, high(x))
|
||||
when not defined(nimToOpenArrayCString):
|
||||
result = 0
|
||||
var i = 0
|
||||
while x[i] != '\0':
|
||||
result = result !& ord(x[i])
|
||||
inc i
|
||||
result = !$result
|
||||
else:
|
||||
when not defined(JS) and defined(nimToOpenArrayCString):
|
||||
murmurHash(toOpenArrayByte(x, 0, x.high))
|
||||
else:
|
||||
let xx = $x
|
||||
murmurHash(toOpenArrayByte(xx, 0, high(xx)))
|
||||
|
||||
proc hash*(sBuf: string, sPos, ePos: int): Hash =
|
||||
## Efficient hashing of a string buffer, from starting
|
||||
@@ -202,7 +279,13 @@ proc hash*(sBuf: string, sPos, ePos: int): Hash =
|
||||
var a = "abracadabra"
|
||||
doAssert hash(a, 0, 3) == hash(a, 7, 10)
|
||||
|
||||
hashImpl(result, sBuf, sPos, ePos)
|
||||
when not defined(nimToOpenArrayCString):
|
||||
result = 0
|
||||
for i in sPos..ePos:
|
||||
result = result !& ord(sBuf[i])
|
||||
result = !$result
|
||||
else:
|
||||
murmurHash(toOpenArrayByte(sBuf, sPos, ePos))
|
||||
|
||||
proc hashIgnoreStyle*(x: string): Hash =
|
||||
## Efficient hashing of strings; style is ignored.
|
||||
@@ -300,12 +383,20 @@ proc hash*[T: tuple](x: T): Hash =
|
||||
result = result !& hash(f)
|
||||
result = !$result
|
||||
|
||||
|
||||
proc hash*[A](x: openArray[A]): Hash =
|
||||
## Efficient hashing of arrays and sequences.
|
||||
when A is char|SomeInteger:
|
||||
hashImpl(result, x, 0, x.high)
|
||||
when A is byte:
|
||||
result = murmurHash(x)
|
||||
elif A is char:
|
||||
when nimvm:
|
||||
result = hashVmImplChar(x, 0, x.high)
|
||||
else:
|
||||
result = murmurHash(toOpenArrayByte(x, 0, x.high))
|
||||
else:
|
||||
bytewiseHashing(result, x, 0, x.high)
|
||||
for a in x:
|
||||
result = result !& hash(a)
|
||||
result = !$result
|
||||
|
||||
proc hash*[A](aBuf: openArray[A], sPos, ePos: int): Hash =
|
||||
## Efficient hashing of portions of arrays and sequences, from starting
|
||||
@@ -316,10 +407,20 @@ proc hash*[A](aBuf: openArray[A], sPos, ePos: int): Hash =
|
||||
let a = [1, 2, 5, 1, 2, 6]
|
||||
doAssert hash(a, 0, 1) == hash(a, 3, 4)
|
||||
|
||||
when A is char|SomeInteger:
|
||||
hashImpl(result, aBuf, sPos, ePos)
|
||||
when A is byte:
|
||||
when nimvm:
|
||||
result = hashVmImplByte(aBuf, sPos, ePos)
|
||||
else:
|
||||
result = murmurHash(toOpenArray(aBuf, sPos, ePos))
|
||||
elif A is char:
|
||||
when nimvm:
|
||||
result = hashVmImplChar(aBuf, sPos, ePos)
|
||||
else:
|
||||
result = murmurHash(toOpenArrayByte(aBuf, sPos, ePos))
|
||||
else:
|
||||
bytewiseHashing(result, aBuf, sPos, ePos)
|
||||
for i in sPos .. ePos:
|
||||
result = result !& hash(aBuf[i])
|
||||
result = !$result
|
||||
|
||||
proc hash*[A](x: set[A]): Hash =
|
||||
## Efficient hashing of sets.
|
||||
@@ -334,11 +435,15 @@ when isMainModule:
|
||||
a = ""
|
||||
b = newSeq[char]()
|
||||
c = newSeq[int]()
|
||||
d = cstring""
|
||||
e = "abcd"
|
||||
doAssert hash(a) == 0
|
||||
doAssert hash(b) == 0
|
||||
doAssert hash(c) == 0
|
||||
doAssert hash(d) == 0
|
||||
doAssert hashIgnoreCase(a) == 0
|
||||
doAssert hashIgnoreStyle(a) == 0
|
||||
doAssert hash(e, 3, 2) == 0
|
||||
block sameButDifferent:
|
||||
doAssert hash("aa bb aaaa1234") == hash("aa bb aaaa1234", 0, 13)
|
||||
doAssert hash("aa bb aaaa1234") == hash(cstring"aa bb aaaa1234")
|
||||
@@ -346,14 +451,14 @@ when isMainModule:
|
||||
doAssert hashIgnoreStyle("aa_bb_AAaa1234") == hashIgnoreCase("aaBBAAAa1234")
|
||||
block smallSize: # no multibyte hashing
|
||||
let
|
||||
xx = @['H','e','l','l','o']
|
||||
ii = @[72'i8, 101, 108, 108, 111]
|
||||
ss = "Hello"
|
||||
xx = @['H','i']
|
||||
ii = @[72'u8, 105]
|
||||
ss = "Hi"
|
||||
doAssert hash(xx) == hash(ii)
|
||||
doAssert hash(xx) == hash(ss)
|
||||
doAssert hash(xx) == hash(xx, 0, xx.high)
|
||||
doAssert hash(ss) == hash(ss, 0, ss.high)
|
||||
block largeSize: # longer than 8 characters, should trigger multibyte hashing
|
||||
block largeSize: # longer than 4 characters
|
||||
let
|
||||
xx = @['H','e','l','l','o']
|
||||
xxl = @['H','e','l','l','o','w','e','e','n','s']
|
||||
@@ -362,9 +467,6 @@ when isMainModule:
|
||||
doAssert hash(xxl) == hash(xxl, 0, xxl.high)
|
||||
doAssert hash(ssl) == hash(ssl, 0, ssl.high)
|
||||
doAssert hash(xx) == hash(xxl, 0, 4)
|
||||
block misc:
|
||||
let
|
||||
a = [1'u8, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4]
|
||||
b = [1'i8, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4]
|
||||
doAssert hash(a) == hash(b)
|
||||
doAssert hash(a, 2, 5) == hash(b, 2, 5)
|
||||
doAssert hash(xx) == hash(ssl, 0, 4)
|
||||
doAssert hash(xx, 0, 3) == hash(xxl, 0, 3)
|
||||
doAssert hash(xx, 0, 3) == hash(ssl, 0, 3)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
#
|
||||
# Nim's Runtime Library
|
||||
# (c) Copyright 2016 Dominik Picheta, Andreas Rumpf
|
||||
# (c) Copyright 2019 Nim Contributors
|
||||
#
|
||||
# See the file "copying.txt", included in this
|
||||
# distribution, for details about the copyright.
|
||||
@@ -175,7 +175,7 @@
|
||||
## let client = newHttpClient(maxRedirects = 0)
|
||||
##
|
||||
|
||||
import net, strutils, uri, parseutils, strtabs, base64, os, mimetypes,
|
||||
import net, strutils, uri, parseutils, base64, os, mimetypes,
|
||||
math, random, httpcore, times, tables, streams, std/monotimes
|
||||
import asyncnet, asyncdispatch, asyncfile
|
||||
import nativesockets
|
||||
@@ -276,134 +276,6 @@ proc fileError(msg: string) =
|
||||
e.msg = msg
|
||||
raise e
|
||||
|
||||
proc parseChunks(s: Socket, timeout: int): string =
|
||||
result = ""
|
||||
var ri = 0
|
||||
while true:
|
||||
var chunkSizeStr = ""
|
||||
var chunkSize = 0
|
||||
s.readLine(chunkSizeStr, timeout)
|
||||
var i = 0
|
||||
if chunkSizeStr == "":
|
||||
httpError("Server terminated connection prematurely")
|
||||
while i < chunkSizeStr.len:
|
||||
case chunkSizeStr[i]
|
||||
of '0'..'9':
|
||||
chunkSize = chunkSize shl 4 or (ord(chunkSizeStr[i]) - ord('0'))
|
||||
of 'a'..'f':
|
||||
chunkSize = chunkSize shl 4 or (ord(chunkSizeStr[i]) - ord('a') + 10)
|
||||
of 'A'..'F':
|
||||
chunkSize = chunkSize shl 4 or (ord(chunkSizeStr[i]) - ord('A') + 10)
|
||||
of ';':
|
||||
# http://tools.ietf.org/html/rfc2616#section-3.6.1
|
||||
# We don't care about chunk-extensions.
|
||||
break
|
||||
else:
|
||||
httpError("Invalid chunk size: " & chunkSizeStr)
|
||||
inc(i)
|
||||
if chunkSize <= 0:
|
||||
s.skip(2, timeout) # Skip \c\L
|
||||
break
|
||||
result.setLen(ri+chunkSize)
|
||||
var bytesRead = 0
|
||||
while bytesRead != chunkSize:
|
||||
let ret = recv(s, addr(result[ri]), chunkSize-bytesRead, timeout)
|
||||
ri += ret
|
||||
bytesRead += ret
|
||||
s.skip(2, timeout) # Skip \c\L
|
||||
# Trailer headers will only be sent if the request specifies that we want
|
||||
# them: http://tools.ietf.org/html/rfc2616#section-3.6.1
|
||||
|
||||
proc parseBody(s: Socket, headers: HttpHeaders, httpVersion: string, timeout: int): string =
|
||||
result = ""
|
||||
if headers.getOrDefault"Transfer-Encoding" == "chunked":
|
||||
result = parseChunks(s, timeout)
|
||||
else:
|
||||
# -REGION- Content-Length
|
||||
# (http://tools.ietf.org/html/rfc2616#section-4.4) NR.3
|
||||
var contentLengthHeader = headers.getOrDefault"Content-Length"
|
||||
if contentLengthHeader != "":
|
||||
var length = contentLengthHeader.parseInt()
|
||||
if length > 0:
|
||||
result = newString(length)
|
||||
var received = 0
|
||||
while true:
|
||||
if received >= length: break
|
||||
let r = s.recv(addr(result[received]), length-received, timeout)
|
||||
if r == 0: break
|
||||
received += r
|
||||
if received != length:
|
||||
httpError("Got invalid content length. Expected: " & $length &
|
||||
" got: " & $received)
|
||||
else:
|
||||
# (http://tools.ietf.org/html/rfc2616#section-4.4) NR.4 TODO
|
||||
|
||||
# -REGION- Connection: Close
|
||||
# (http://tools.ietf.org/html/rfc2616#section-4.4) NR.5
|
||||
let implicitConnectionClose =
|
||||
httpVersion == "1.0" or
|
||||
# This doesn't match the HTTP spec, but it fixes issues for non-conforming servers.
|
||||
(httpVersion == "1.1" and headers.getOrDefault"Connection" == "")
|
||||
if headers.getOrDefault"Connection" == "close" or implicitConnectionClose:
|
||||
var buf = ""
|
||||
while true:
|
||||
buf = newString(4000)
|
||||
let r = s.recv(addr(buf[0]), 4000, timeout)
|
||||
if r == 0: break
|
||||
buf.setLen(r)
|
||||
result.add(buf)
|
||||
|
||||
proc parseResponse(s: Socket, getBody: bool, timeout: int): Response =
|
||||
new result
|
||||
var parsedStatus = false
|
||||
var linei = 0
|
||||
var fullyRead = false
|
||||
var line = ""
|
||||
result.headers = newHttpHeaders()
|
||||
while true:
|
||||
line = ""
|
||||
linei = 0
|
||||
s.readLine(line, timeout)
|
||||
if line == "": break # We've been disconnected.
|
||||
if line == "\c\L":
|
||||
fullyRead = true
|
||||
break
|
||||
if not parsedStatus:
|
||||
# Parse HTTP version info and status code.
|
||||
var le = skipIgnoreCase(line, "HTTP/", linei)
|
||||
if le <= 0: httpError("invalid http version")
|
||||
inc(linei, le)
|
||||
le = skipIgnoreCase(line, "1.1", linei)
|
||||
if le > 0: result.version = "1.1"
|
||||
else:
|
||||
le = skipIgnoreCase(line, "1.0", linei)
|
||||
if le <= 0: httpError("unsupported http version")
|
||||
result.version = "1.0"
|
||||
inc(linei, le)
|
||||
# Status code
|
||||
linei.inc skipWhitespace(line, linei)
|
||||
result.status = line[linei .. ^1]
|
||||
parsedStatus = true
|
||||
else:
|
||||
# Parse headers
|
||||
var name = ""
|
||||
var le = parseUntil(line, name, ':', linei)
|
||||
if le <= 0: httpError("invalid headers")
|
||||
inc(linei, le)
|
||||
if line[linei] != ':': httpError("invalid headers")
|
||||
inc(linei) # Skip :
|
||||
|
||||
result.headers.add(name, line[linei.. ^1].strip())
|
||||
# Ensure the server isn't trying to DoS us.
|
||||
if result.headers.len > headerLimit:
|
||||
httpError("too many headers")
|
||||
|
||||
if not fullyRead:
|
||||
httpError("Connection was closed before full request has been made")
|
||||
if getBody:
|
||||
result.body = parseBody(s, result.headers, result.version, timeout)
|
||||
else:
|
||||
result.body = ""
|
||||
|
||||
when not defined(ssl):
|
||||
type SSLContext = ref object
|
||||
|
||||
@@ -1369,11 +1369,20 @@ proc createConstructor(typeSym, jsonNode: NimNode): NimNode =
|
||||
for `forLoopI` in 0 ..< `jsonNode`.len: list[`forLoopI`] =`constructorNode`;
|
||||
list
|
||||
)
|
||||
|
||||
of "tuple":
|
||||
let typeNode = getTypeImpl(typeSym)
|
||||
result = createConstructor(typeNode, jsonNode)
|
||||
else:
|
||||
# Generic type.
|
||||
# Generic type or some `seq[T]` alias
|
||||
let obj = getType(typeSym)
|
||||
result = processType(typeSym, obj, jsonNode, false)
|
||||
case obj.kind
|
||||
of nnkBracketExpr:
|
||||
# probably a `seq[T]` alias
|
||||
let typeNode = getTypeImpl(typeSym)
|
||||
result = createConstructor(typeNode, jsonNode)
|
||||
else:
|
||||
# generic type
|
||||
result = processType(typeSym, obj, jsonNode, false)
|
||||
of nnkSym:
|
||||
# Handle JsonNode.
|
||||
if ($typeSym).cmpIgnoreStyle("jsonnode") == 0:
|
||||
@@ -1385,7 +1394,7 @@ proc createConstructor(typeSym, jsonNode: NimNode): NimNode =
|
||||
if typeNode.typeKind == ntyDistinct:
|
||||
result = createConstructor(typeNode, jsonNode)
|
||||
elif obj.kind == nnkBracketExpr:
|
||||
# When `Sym "Foo"` turns out to be a `ref object`.
|
||||
# When `Sym "Foo"` turns out to be a `ref object` or `tuple`
|
||||
result = createConstructor(obj, jsonNode)
|
||||
else:
|
||||
result = processType(typeSym, obj, jsonNode, false)
|
||||
|
||||
@@ -100,7 +100,7 @@ proc fillBaseLexer(L: var BaseLexer, pos: int): int =
|
||||
result = 0
|
||||
|
||||
proc handleCR*(L: var BaseLexer, pos: int): int =
|
||||
## Call this if you scanned over '\c' in the buffer; it returns the the
|
||||
## Call this if you scanned over '\c' in the buffer; it returns the
|
||||
## position to continue the scanning from. `pos` must be the position
|
||||
## of the '\c'.
|
||||
assert(L.buf[pos] == '\c')
|
||||
@@ -111,7 +111,7 @@ proc handleCR*(L: var BaseLexer, pos: int): int =
|
||||
L.lineStart = result
|
||||
|
||||
proc handleLF*(L: var BaseLexer, pos: int): int =
|
||||
## Call this if you scanned over '\L' in the buffer; it returns the the
|
||||
## Call this if you scanned over '\L' in the buffer; it returns the
|
||||
## position to continue the scanning from. `pos` must be the position
|
||||
## of the '\L'.
|
||||
assert(L.buf[pos] == '\L')
|
||||
@@ -120,7 +120,8 @@ proc handleLF*(L: var BaseLexer, pos: int): int =
|
||||
L.lineStart = result
|
||||
|
||||
proc handleRefillChar*(L: var BaseLexer, pos: int): int =
|
||||
## To be documented.
|
||||
## Call this if a terminator character other than a new line is scanned
|
||||
## at `pos`; it returns the position to continue the scanning from.
|
||||
assert(L.buf[pos] in L.refillChars)
|
||||
result = fillBaseLexer(L, pos) #L.lastNL := result-1; // BUGFIX: was: result;
|
||||
|
||||
|
||||
@@ -530,8 +530,12 @@ when defineSsl:
|
||||
##
|
||||
## The last two parameters specify the certificate file path and the key file
|
||||
## path, a server socket will most likely not work without these.
|
||||
##
|
||||
## Certificates can be generated using the following command:
|
||||
## ``openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout mycert.pem -out mycert.pem``.
|
||||
## - ``openssl req -x509 -nodes -days 365 -newkey rsa:4096 -keyout mykey.pem -out mycert.pem``
|
||||
## or using ECDSA:
|
||||
## - ``openssl ecparam -out mykey.pem -name secp256k1 -genkey``
|
||||
## - ``openssl req -new -key mykey.pem -x509 -nodes -days 365 -out mycert.pem``
|
||||
var newCTX: SSL_CTX
|
||||
case protVersion
|
||||
of protSSLv23:
|
||||
|
||||
@@ -31,11 +31,6 @@ type
|
||||
## Type that can hold a single Unicode code point.
|
||||
##
|
||||
## A Rune may be composed with other Runes to a character on the screen.
|
||||
Rune16* = distinct int16 ## \
|
||||
## Type that can hold a single UTF-16 encoded character.
|
||||
##
|
||||
## A single Rune16 may not be enough to hold an arbitrary Unicode code point.
|
||||
|
||||
|
||||
template ones(n: untyped): untyped = ((1 shl n)-1)
|
||||
|
||||
@@ -388,7 +383,7 @@ proc runeStrAtPos*(s: string, pos: Natural): string =
|
||||
## * `runeAtPos proc <#runeAtPos,string,int>`_
|
||||
## * `fastRuneAt template <#fastRuneAt.t,string,int,untyped>`_
|
||||
let o = runeOffset(s, pos)
|
||||
s[o.. (o+runeLenAt(s, o)-1)]
|
||||
s[o .. (o+runeLenAt(s, o)-1)]
|
||||
|
||||
proc runeSubStr*(s: string, pos: int, len: int = int.high): string =
|
||||
## Returns the UTF-8 substring starting at code point ``pos``
|
||||
|
||||
@@ -1132,57 +1132,57 @@ proc chr*(u: range[0..255]): char {.magic: "Chr", noSideEffect.}
|
||||
# built-in operators
|
||||
|
||||
when defined(nimNoZeroExtendMagic):
|
||||
proc ze*(x: int8): int =
|
||||
proc ze*(x: int8): int {.deprecated.} =
|
||||
## zero extends a smaller integer type to ``int``. This treats `x` as
|
||||
## unsigned.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
|
||||
cast[int](uint(cast[uint8](x)))
|
||||
|
||||
proc ze*(x: int16): int =
|
||||
proc ze*(x: int16): int {.deprecated.} =
|
||||
## zero extends a smaller integer type to ``int``. This treats `x` as
|
||||
## unsigned.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
cast[int](uint(cast[uint16](x)))
|
||||
|
||||
proc ze64*(x: int8): int64 =
|
||||
proc ze64*(x: int8): int64 {.deprecated.} =
|
||||
## zero extends a smaller integer type to ``int64``. This treats `x` as
|
||||
## unsigned.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
cast[int64](uint64(cast[uint8](x)))
|
||||
|
||||
proc ze64*(x: int16): int64 =
|
||||
proc ze64*(x: int16): int64 {.deprecated.} =
|
||||
## zero extends a smaller integer type to ``int64``. This treats `x` as
|
||||
## unsigned.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
cast[int64](uint64(cast[uint16](x)))
|
||||
|
||||
proc ze64*(x: int32): int64 =
|
||||
proc ze64*(x: int32): int64 {.deprecated.} =
|
||||
## zero extends a smaller integer type to ``int64``. This treats `x` as
|
||||
## unsigned.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
cast[int64](uint64(cast[uint32](x)))
|
||||
|
||||
proc ze64*(x: int): int64 =
|
||||
proc ze64*(x: int): int64 {.deprecated.} =
|
||||
## zero extends a smaller integer type to ``int64``. This treats `x` as
|
||||
## unsigned. Does nothing if the size of an ``int`` is the same as ``int64``.
|
||||
## (This is the case on 64 bit processors.)
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
cast[int64](uint64(cast[uint](x)))
|
||||
|
||||
proc toU8*(x: int): int8 =
|
||||
proc toU8*(x: int): int8 {.deprecated.} =
|
||||
## treats `x` as unsigned and converts it to a byte by taking the last 8 bits
|
||||
## from `x`.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
cast[int8](x)
|
||||
|
||||
proc toU16*(x: int): int16 =
|
||||
proc toU16*(x: int): int16 {.deprecated.} =
|
||||
## treats `x` as unsigned and converts it to an ``int16`` by taking the last
|
||||
## 16 bits from `x`.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
cast[int16](x)
|
||||
|
||||
proc toU32*(x: int64): int32 =
|
||||
proc toU32*(x: int64): int32 {.deprecated.} =
|
||||
## treats `x` as unsigned and converts it to an ``int32`` by taking the
|
||||
## last 32 bits from `x`.
|
||||
## **Deprecated since version 0.19.9**: Use unsigned integers instead.
|
||||
@@ -1794,21 +1794,25 @@ proc cmp*(x, y: string): int {.noSideEffect, procvar.}
|
||||
## **Note**: The precise result values depend on the used C runtime library and
|
||||
## can differ between operating systems!
|
||||
|
||||
proc `@`* [IDX, T](a: array[IDX, T]): seq[T] {.
|
||||
magic: "ArrToSeq", noSideEffect.}
|
||||
## Turns an array into a sequence.
|
||||
##
|
||||
## This most often useful for constructing
|
||||
## sequences with the array constructor: ``@[1, 2, 3]`` has the type
|
||||
## ``seq[int]``, while ``[1, 2, 3]`` has the type ``array[0..2, int]``.
|
||||
##
|
||||
## .. code-block:: Nim
|
||||
## let
|
||||
## a = [1, 3, 5]
|
||||
## b = "foo"
|
||||
##
|
||||
## echo @a # => @[1, 3, 5]
|
||||
## echo @b # => @['f', 'o', 'o']
|
||||
when defined(nimHasDefault):
|
||||
proc `@`* [IDX, T](a: sink array[IDX, T]): seq[T] {.
|
||||
magic: "ArrToSeq", noSideEffect.}
|
||||
## Turns an array into a sequence.
|
||||
##
|
||||
## This most often useful for constructing
|
||||
## sequences with the array constructor: ``@[1, 2, 3]`` has the type
|
||||
## ``seq[int]``, while ``[1, 2, 3]`` has the type ``array[0..2, int]``.
|
||||
##
|
||||
## .. code-block:: Nim
|
||||
## let
|
||||
## a = [1, 3, 5]
|
||||
## b = "foo"
|
||||
##
|
||||
## echo @a # => @[1, 3, 5]
|
||||
## echo @b # => @['f', 'o', 'o']
|
||||
else:
|
||||
proc `@`* [IDX, T](a: array[IDX, T]): seq[T] {.
|
||||
magic: "ArrToSeq", noSideEffect.}
|
||||
|
||||
when defined(nimHasDefault):
|
||||
proc default*(T: typedesc): T {.magic: "Default", noSideEffect.}
|
||||
@@ -2089,7 +2093,7 @@ when not defined(JS) and not defined(nimscript) and hostOS != "standalone":
|
||||
when not defined(JS) and not defined(nimscript) and hasAlloc and not defined(gcDestructors):
|
||||
proc addChar(s: NimString, c: char): NimString {.compilerproc, benign.}
|
||||
|
||||
when not defined(gcDestructors):
|
||||
when not defined(gcDestructors) or defined(nimscript):
|
||||
proc add*[T](x: var seq[T], y: T) {.magic: "AppendSeqElem", noSideEffect.}
|
||||
## Generic proc for adding a data item `y` to a container `x`.
|
||||
##
|
||||
@@ -2277,8 +2281,7 @@ type # these work for most platforms:
|
||||
PInt64* = ptr int64 ## An alias for ``ptr int64``.
|
||||
PInt32* = ptr int32 ## An alias for ``ptr int32``.
|
||||
|
||||
proc toFloat*(i: int): float {.
|
||||
magic: "ToFloat", noSideEffect, importc: "toFloat".}
|
||||
proc toFloat*(i: int): float {.noSideEffect, inline.} =
|
||||
## Converts an integer `i` into a ``float``.
|
||||
##
|
||||
## If the conversion fails, `ValueError` is raised.
|
||||
@@ -2290,13 +2293,13 @@ proc toFloat*(i: int): float {.
|
||||
## b = 3.7
|
||||
##
|
||||
## echo a.toFloat + b # => 5.7
|
||||
float(i)
|
||||
|
||||
proc toBiggestFloat*(i: BiggestInt): BiggestFloat {.
|
||||
magic: "ToBiggestFloat", noSideEffect, importc: "toBiggestFloat".}
|
||||
proc toBiggestFloat*(i: BiggestInt): BiggestFloat {.noSideEffect, inline.} =
|
||||
## Same as `toFloat <#toFloat,int>`_ but for ``BiggestInt`` to ``BiggestFloat``.
|
||||
BiggestFloat(i)
|
||||
|
||||
proc toInt*(f: float): int {.
|
||||
magic: "ToInt", noSideEffect, importc: "toInt".}
|
||||
proc toInt*(f: float): int {.noSideEffect.} =
|
||||
## Converts a floating point number `f` into an ``int``.
|
||||
##
|
||||
## Conversion rounds `f` half away from 0, see
|
||||
@@ -2310,10 +2313,11 @@ proc toInt*(f: float): int {.
|
||||
## doAssert toInt(0.49) == 0
|
||||
## doAssert toInt(0.5) == 1
|
||||
## doAssert toInt(-0.5) == -1 # rounding is symmetrical
|
||||
if f >= 0: int(f+0.5) else: int(f-0.5)
|
||||
|
||||
proc toBiggestInt*(f: BiggestFloat): BiggestInt {.
|
||||
magic: "ToBiggestInt", noSideEffect, importc: "toBiggestInt".}
|
||||
proc toBiggestInt*(f: BiggestFloat): BiggestInt {.noSideEffect.} =
|
||||
## Same as `toInt <#toInt,float>`_ but for ``BiggestFloat`` to ``BiggestInt``.
|
||||
if f >= 0: BiggestInt(f+0.5) else: BiggestInt(f-0.5)
|
||||
|
||||
proc addQuitProc*(quitProc: proc() {.noconv.}) {.
|
||||
importc: "atexit", header: "<stdlib.h>".}
|
||||
@@ -2677,6 +2681,7 @@ when defined(nimNewRoof):
|
||||
##
|
||||
## for i in countup(2, 9, 3):
|
||||
## echo i # => 2; 5; 8
|
||||
mixin inc
|
||||
when T is IntLikeForCount:
|
||||
var res = int(a)
|
||||
while res <= int(b):
|
||||
@@ -2697,6 +2702,7 @@ when defined(nimNewRoof):
|
||||
## .. code-block:: Nim
|
||||
## for i in 3 .. 7:
|
||||
## echo i # => 3; 4; 5; 6; 7
|
||||
mixin inc
|
||||
when T is IntLikeForCount:
|
||||
var res = int(a)
|
||||
while res <= int(b):
|
||||
@@ -2726,6 +2732,7 @@ when defined(nimNewRoof):
|
||||
dotdotImpl(uint32)
|
||||
|
||||
iterator `..<`*[T](a, b: T): T {.inline.} =
|
||||
mixin inc
|
||||
var i = T(a)
|
||||
while i < b:
|
||||
yield i
|
||||
@@ -2781,6 +2788,7 @@ else:
|
||||
## .. code-block:: Nim
|
||||
## for i in 3 .. 7:
|
||||
## echo i # => 3; 4; 5; 6; 7
|
||||
mixin inc
|
||||
when T is IntLikeForCount:
|
||||
var res = int(a)
|
||||
while res <= int(b):
|
||||
@@ -2793,6 +2801,7 @@ else:
|
||||
inc(res)
|
||||
|
||||
iterator `..<`*[S, T](a: S, b: T): T {.inline.} =
|
||||
mixin inc
|
||||
var i = T(a)
|
||||
while i < b:
|
||||
yield i
|
||||
@@ -3630,10 +3639,6 @@ when not defined(JS): #and not defined(nimscript):
|
||||
if result == 0:
|
||||
result = x.len - y.len
|
||||
|
||||
when not defined(nimscript) and hostOS != "standalone":
|
||||
when defined(endb):
|
||||
proc endbStep()
|
||||
|
||||
when declared(newSeq):
|
||||
proc cstringArrayToSeq*(a: cstringArray, len: Natural): seq[string] =
|
||||
## Converts a ``cstringArray`` to a ``seq[string]``. `a` is supposed to be
|
||||
@@ -3807,9 +3812,6 @@ when not defined(JS): #and not defined(nimscript):
|
||||
currException = exc
|
||||
|
||||
{.push stack_trace: off, profiler:off.}
|
||||
when defined(endb) and not defined(nimscript):
|
||||
include "system/debugger"
|
||||
|
||||
when (defined(profiler) or defined(memProfiler)) and not defined(nimscript):
|
||||
include "system/profiler"
|
||||
{.pop.} # stacktrace
|
||||
@@ -4499,6 +4501,11 @@ when defined(nimconfig):
|
||||
when not defined(js):
|
||||
proc toOpenArray*[T](x: ptr UncheckedArray[T]; first, last: int): openArray[T] {.
|
||||
magic: "Slice".}
|
||||
when defined(nimToOpenArrayCString):
|
||||
proc toOpenArray*(x: cstring; first, last: int): openArray[char] {.
|
||||
magic: "Slice".}
|
||||
proc toOpenArrayByte*(x: cstring; first, last: int): openArray[byte] {.
|
||||
magic: "Slice".}
|
||||
|
||||
proc toOpenArray*[T](x: seq[T]; first, last: int): openArray[T] {.
|
||||
magic: "Slice".}
|
||||
@@ -4508,8 +4515,13 @@ proc toOpenArray*[I, T](x: array[I, T]; first, last: I): openArray[T] {.
|
||||
magic: "Slice".}
|
||||
proc toOpenArray*(x: string; first, last: int): openArray[char] {.
|
||||
magic: "Slice".}
|
||||
|
||||
proc toOpenArrayByte*(x: string; first, last: int): openArray[byte] {.
|
||||
magic: "Slice".}
|
||||
proc toOpenArrayByte*(x: openArray[char]; first, last: int): openArray[byte] {.
|
||||
magic: "Slice".}
|
||||
proc toOpenArrayByte*(x: seq[char]; first, last: int): openArray[byte] {.
|
||||
magic: "Slice".}
|
||||
|
||||
type
|
||||
ForLoopStmt* {.compilerproc.} = object ## \
|
||||
|
||||
@@ -984,7 +984,7 @@ when defined(nimTypeNames):
|
||||
|
||||
# ---------------------- thread memory region -------------------------------
|
||||
|
||||
template instantiateForRegion(allocator: untyped) =
|
||||
template instantiateForRegion(allocator: untyped) {.dirty.} =
|
||||
{.push stackTrace: off.}
|
||||
|
||||
when defined(fulldebug):
|
||||
@@ -1006,8 +1006,8 @@ template instantiateForRegion(allocator: untyped) =
|
||||
proc dealloc(p: pointer) =
|
||||
dealloc(allocator, p)
|
||||
|
||||
proc realloc(p: pointer, newsize: Natural): pointer =
|
||||
result = realloc(allocator, p, newsize)
|
||||
proc realloc(p: pointer, newSize: Natural): pointer =
|
||||
result = realloc(allocator, p, newSize)
|
||||
|
||||
when false:
|
||||
proc countFreeMem(): int =
|
||||
@@ -1054,13 +1054,13 @@ template instantiateForRegion(allocator: untyped) =
|
||||
else:
|
||||
dealloc(p)
|
||||
|
||||
proc reallocShared(p: pointer, newsize: Natural): pointer =
|
||||
proc reallocShared(p: pointer, newSize: Natural): pointer =
|
||||
when hasThreadSupport:
|
||||
acquireSys(heapLock)
|
||||
result = realloc(sharedHeap, p, newsize)
|
||||
result = realloc(sharedHeap, p, newSize)
|
||||
releaseSys(heapLock)
|
||||
else:
|
||||
result = realloc(p, newsize)
|
||||
result = realloc(p, newSize)
|
||||
|
||||
when hasThreadSupport:
|
||||
template sharedMemStatsShared(v: int) =
|
||||
|
||||
@@ -30,7 +30,7 @@ proc c_strcmp*(a, b: cstring): cint {.
|
||||
proc c_strlen*(a: cstring): csize {.
|
||||
importc: "strlen", header: "<string.h>", noSideEffect.}
|
||||
proc c_abort*() {.
|
||||
importc: "abort", header: "<stdlib.h>", noSideEffect.}
|
||||
importc: "abort", header: "<stdlib.h>", noSideEffect, noreturn.}
|
||||
|
||||
|
||||
when defined(linux) and defined(amd64):
|
||||
|
||||
@@ -27,12 +27,12 @@ proc failedAssertImpl*(msg: string) {.raises: [], tags: [].} =
|
||||
Hide(raiseAssert)(msg)
|
||||
|
||||
template assertImpl(cond: bool, msg: string, expr: string, enabled: static[bool]) =
|
||||
const
|
||||
loc = instantiationInfo(fullPaths = compileOption("excessiveStackTrace"))
|
||||
ploc = $loc
|
||||
bind instantiationInfo
|
||||
mixin failedAssertImpl
|
||||
when enabled:
|
||||
const
|
||||
loc = instantiationInfo(fullPaths = compileOption("excessiveStackTrace"))
|
||||
ploc = $loc
|
||||
bind instantiationInfo
|
||||
mixin failedAssertImpl
|
||||
{.line: loc.}:
|
||||
if not cond:
|
||||
failedAssertImpl(ploc & " `" & expr & "` " & msg)
|
||||
|
||||
@@ -1,303 +0,0 @@
|
||||
#
|
||||
#
|
||||
# Nim's Runtime Library
|
||||
# (c) Copyright 2013 Andreas Rumpf
|
||||
#
|
||||
# See the file "copying.txt", included in this
|
||||
# distribution, for details about the copyright.
|
||||
#
|
||||
|
||||
## This file implements basic features for any debugger.
|
||||
|
||||
type
|
||||
VarSlot* {.compilerproc, final.} = object ## a slot in a frame
|
||||
address*: pointer ## the variable's address
|
||||
typ*: PNimType ## the variable's type
|
||||
name*: cstring ## the variable's name; for globals this is "module.name"
|
||||
|
||||
PExtendedFrame = ptr ExtendedFrame
|
||||
ExtendedFrame = object # If the debugger is enabled the compiler
|
||||
# provides an extended frame. Of course
|
||||
# only slots that are
|
||||
# needed are allocated and not 10_000,
|
||||
# except for the global data description.
|
||||
f: TFrame
|
||||
slots: array[0..10_000, VarSlot]
|
||||
|
||||
var
|
||||
dbgGlobalData: ExtendedFrame # this reserves much space, but
|
||||
# for now it is the most practical way
|
||||
|
||||
proc dbgRegisterGlobal(name: cstring, address: pointer,
|
||||
typ: PNimType) {.compilerproc.} =
|
||||
let i = dbgGlobalData.f.len
|
||||
if i >= high(dbgGlobalData.slots):
|
||||
#debugOut("[Warning] cannot register global ")
|
||||
return
|
||||
dbgGlobalData.slots[i].name = name
|
||||
dbgGlobalData.slots[i].typ = typ
|
||||
dbgGlobalData.slots[i].address = address
|
||||
inc(dbgGlobalData.f.len)
|
||||
|
||||
proc getLocal*(frame: PFrame; slot: int): VarSlot {.inline.} =
|
||||
## retrieves the meta data for the local variable at `slot`. CAUTION: An
|
||||
## invalid `slot` value causes a corruption!
|
||||
result = cast[PExtendedFrame](frame).slots[slot]
|
||||
|
||||
proc getGlobalLen*(): int {.inline.} =
|
||||
## gets the number of registered globals.
|
||||
result = dbgGlobalData.f.len
|
||||
|
||||
proc getGlobal*(slot: int): VarSlot {.inline.} =
|
||||
## retrieves the meta data for the global variable at `slot`. CAUTION: An
|
||||
## invalid `slot` value causes a corruption!
|
||||
result = dbgGlobalData.slots[slot]
|
||||
|
||||
# ------------------- breakpoint support ------------------------------------
|
||||
|
||||
type
|
||||
Breakpoint* = object ## represents a break point
|
||||
low*, high*: int ## range from low to high; if disabled
|
||||
## both low and high are set to their negative values
|
||||
filename*: cstring ## the filename of the breakpoint
|
||||
|
||||
var
|
||||
dbgBP: array[0..127, Breakpoint] # breakpoints
|
||||
dbgBPlen: int
|
||||
dbgBPbloom: int64 # we use a bloom filter to speed up breakpoint checking
|
||||
|
||||
dbgFilenames*: array[0..300, cstring] ## registered filenames;
|
||||
## 'nil' terminated
|
||||
dbgFilenameLen: int
|
||||
|
||||
proc dbgRegisterFilename(filename: cstring) {.compilerproc.} =
|
||||
# XXX we could check for duplicates here for DLL support
|
||||
dbgFilenames[dbgFilenameLen] = filename
|
||||
inc dbgFilenameLen
|
||||
|
||||
proc dbgRegisterBreakpoint(line: int,
|
||||
filename, name: cstring) {.compilerproc.} =
|
||||
let x = dbgBPlen
|
||||
if x >= high(dbgBP):
|
||||
#debugOut("[Warning] cannot register breakpoint")
|
||||
return
|
||||
inc(dbgBPlen)
|
||||
dbgBP[x].filename = filename
|
||||
dbgBP[x].low = line
|
||||
dbgBP[x].high = line
|
||||
dbgBPbloom = dbgBPbloom or line
|
||||
|
||||
proc addBreakpoint*(filename: cstring, lo, hi: int): bool =
|
||||
let x = dbgBPlen
|
||||
if x >= high(dbgBP): return false
|
||||
inc(dbgBPlen)
|
||||
result = true
|
||||
dbgBP[x].filename = filename
|
||||
dbgBP[x].low = lo
|
||||
dbgBP[x].high = hi
|
||||
for line in lo..hi: dbgBPbloom = dbgBPbloom or line
|
||||
|
||||
const
|
||||
FileSystemCaseInsensitive = defined(windows) or defined(dos) or defined(os2)
|
||||
|
||||
proc fileMatches(c, bp: cstring): bool =
|
||||
# bp = breakpoint filename
|
||||
# c = current filename
|
||||
# we consider it a match if bp is a suffix of c
|
||||
# and the character for the suffix does not exist or
|
||||
# is one of: \ / :
|
||||
# depending on the OS case does not matter!
|
||||
var blen: int = bp.len
|
||||
var clen: int = c.len
|
||||
if blen > clen: return false
|
||||
# check for \ / :
|
||||
if clen-blen-1 >= 0 and c[clen-blen-1] notin {'\\', '/', ':'}:
|
||||
return false
|
||||
var i = 0
|
||||
while i < blen:
|
||||
var x = bp[i]
|
||||
var y = c[i+clen-blen]
|
||||
when FileSystemCaseInsensitive:
|
||||
if x >= 'A' and x <= 'Z': x = chr(ord(x) - ord('A') + ord('a'))
|
||||
if y >= 'A' and y <= 'Z': y = chr(ord(y) - ord('A') + ord('a'))
|
||||
if x != y: return false
|
||||
inc(i)
|
||||
return true
|
||||
|
||||
proc canonFilename*(filename: cstring): cstring =
|
||||
## returns 'nil' if the filename cannot be found.
|
||||
for i in 0 .. dbgFilenameLen-1:
|
||||
result = dbgFilenames[i]
|
||||
if fileMatches(result, filename): return result
|
||||
result = nil
|
||||
|
||||
iterator listBreakpoints*(): ptr Breakpoint =
|
||||
## lists all breakpoints.
|
||||
for i in 0..dbgBPlen-1: yield addr(dbgBP[i])
|
||||
|
||||
proc isActive*(b: ptr Breakpoint): bool = b.low > 0
|
||||
proc flip*(b: ptr Breakpoint) =
|
||||
## enables or disables 'b' depending on its current state.
|
||||
b.low = -b.low; b.high = -b.high
|
||||
|
||||
proc checkBreakpoints*(filename: cstring, line: int): ptr Breakpoint =
|
||||
## in which breakpoint (if any) we are.
|
||||
if (dbgBPbloom and line) != line: return nil
|
||||
for b in listBreakpoints():
|
||||
if line >= b.low and line <= b.high and filename == b.filename: return b
|
||||
|
||||
# ------------------- watchpoint support ------------------------------------
|
||||
|
||||
type
|
||||
Hash = int
|
||||
Watchpoint {.pure, final.} = object
|
||||
name: cstring
|
||||
address: pointer
|
||||
typ: PNimType
|
||||
oldValue: Hash
|
||||
|
||||
var
|
||||
watchpoints: array[0..99, Watchpoint]
|
||||
watchpointsLen: int
|
||||
|
||||
proc `!&`(h: Hash, val: int): Hash {.inline.} =
|
||||
result = h +% val
|
||||
result = result +% result shl 10
|
||||
result = result xor (result shr 6)
|
||||
|
||||
proc `!$`(h: Hash): Hash {.inline.} =
|
||||
result = h +% h shl 3
|
||||
result = result xor (result shr 11)
|
||||
result = result +% result shl 15
|
||||
|
||||
proc hash(data: pointer, size: int): Hash =
|
||||
var h: Hash = 0
|
||||
var p = cast[cstring](data)
|
||||
var i = 0
|
||||
var s = size
|
||||
while s > 0:
|
||||
h = h !& ord(p[i])
|
||||
inc(i)
|
||||
dec(s)
|
||||
result = !$h
|
||||
|
||||
proc hashGcHeader(data: pointer): Hash =
|
||||
const headerSize = sizeof(int)*2
|
||||
result = hash(cast[pointer](cast[int](data) -% headerSize), headerSize)
|
||||
|
||||
proc genericHashAux(dest: pointer, mt: PNimType, shallow: bool,
|
||||
h: Hash): Hash
|
||||
proc genericHashAux(dest: pointer, n: ptr TNimNode, shallow: bool,
|
||||
h: Hash): Hash =
|
||||
var d = cast[ByteAddress](dest)
|
||||
case n.kind
|
||||
of nkSlot:
|
||||
result = genericHashAux(cast[pointer](d +% n.offset), n.typ, shallow, h)
|
||||
of nkList:
|
||||
result = h
|
||||
for i in 0..n.len-1:
|
||||
result = result !& genericHashAux(dest, n.sons[i], shallow, result)
|
||||
of nkCase:
|
||||
result = h !& hash(cast[pointer](d +% n.offset), n.typ.size)
|
||||
var m = selectBranch(dest, n)
|
||||
if m != nil: result = genericHashAux(dest, m, shallow, result)
|
||||
of nkNone: sysAssert(false, "genericHashAux")
|
||||
|
||||
proc genericHashAux(dest: pointer, mt: PNimType, shallow: bool,
|
||||
h: Hash): Hash =
|
||||
sysAssert(mt != nil, "genericHashAux 2")
|
||||
case mt.kind
|
||||
of tyString:
|
||||
var x = cast[PPointer](dest)[]
|
||||
result = h
|
||||
if x != nil:
|
||||
let s = cast[NimString](x)
|
||||
when defined(trackGcHeaders):
|
||||
result = result !& hashGcHeader(x)
|
||||
else:
|
||||
result = result !& hash(x, s.len)
|
||||
of tySequence:
|
||||
var x = cast[PPointer](dest)
|
||||
var dst = cast[ByteAddress](cast[PPointer](dest)[])
|
||||
result = h
|
||||
if dst != 0:
|
||||
when defined(trackGcHeaders):
|
||||
result = result !& hashGcHeader(cast[PPointer](dest)[])
|
||||
else:
|
||||
for i in 0..cast[PGenericSeq](dst).len-1:
|
||||
result = result !& genericHashAux(
|
||||
cast[pointer](dst +% i*% mt.base.size +% GenericSeqSize),
|
||||
mt.base, shallow, result)
|
||||
of tyObject, tyTuple:
|
||||
# we don't need to copy m_type field for tyObject, as they are equal anyway
|
||||
result = genericHashAux(dest, mt.node, shallow, h)
|
||||
of tyArray, tyArrayConstr:
|
||||
let d = cast[ByteAddress](dest)
|
||||
result = h
|
||||
for i in 0..(mt.size div mt.base.size)-1:
|
||||
result = result !& genericHashAux(cast[pointer](d +% i*% mt.base.size),
|
||||
mt.base, shallow, result)
|
||||
of tyRef:
|
||||
when defined(trackGcHeaders):
|
||||
var s = cast[PPointer](dest)[]
|
||||
if s != nil:
|
||||
result = result !& hashGcHeader(s)
|
||||
else:
|
||||
if shallow:
|
||||
result = h !& hash(dest, mt.size)
|
||||
else:
|
||||
result = h
|
||||
var s = cast[PPointer](dest)[]
|
||||
if s != nil:
|
||||
result = result !& genericHashAux(s, mt.base, shallow, result)
|
||||
else:
|
||||
result = h !& hash(dest, mt.size) # hash raw bits
|
||||
|
||||
proc genericHash(dest: pointer, mt: PNimType): int =
|
||||
result = genericHashAux(dest, mt, false, 0)
|
||||
|
||||
proc dbgRegisterWatchpoint(address: pointer, name: cstring,
|
||||
typ: PNimType) {.compilerproc.} =
|
||||
let L = watchPointsLen
|
||||
for i in 0 .. pred(L):
|
||||
if watchPoints[i].name == name:
|
||||
# address may have changed:
|
||||
watchPoints[i].address = address
|
||||
return
|
||||
if L >= watchPoints.high:
|
||||
#debugOut("[Warning] cannot register watchpoint")
|
||||
return
|
||||
watchPoints[L].name = name
|
||||
watchPoints[L].address = address
|
||||
watchPoints[L].typ = typ
|
||||
watchPoints[L].oldValue = genericHash(address, typ)
|
||||
inc watchPointsLen
|
||||
|
||||
proc dbgUnregisterWatchpoints*() =
|
||||
watchPointsLen = 0
|
||||
|
||||
var
|
||||
dbgLineHook*: proc () {.nimcall.}
|
||||
## set this variable to provide a procedure that should be called before
|
||||
## each executed instruction. This should only be used by debuggers!
|
||||
## Only code compiled with the ``debugger:on`` switch calls this hook.
|
||||
|
||||
dbgWatchpointHook*: proc (watchpointName: cstring) {.nimcall.}
|
||||
|
||||
proc checkWatchpoints =
|
||||
let L = watchPointsLen
|
||||
for i in 0 .. pred(L):
|
||||
let newHash = genericHash(watchPoints[i].address, watchPoints[i].typ)
|
||||
if newHash != watchPoints[i].oldValue:
|
||||
dbgWatchpointHook(watchPoints[i].name)
|
||||
watchPoints[i].oldValue = newHash
|
||||
|
||||
proc endb(line: int, file: cstring) {.compilerproc, noinline.} =
|
||||
# This proc is called before every Nim code line!
|
||||
if framePtr == nil: return
|
||||
if dbgWatchpointHook != nil: checkWatchpoints()
|
||||
framePtr.line = line # this is done here for smaller code size!
|
||||
framePtr.filename = file
|
||||
if dbgLineHook != nil: dbgLineHook()
|
||||
|
||||
include "system/endb"
|
||||
@@ -1,579 +0,0 @@
|
||||
#
|
||||
#
|
||||
# Nim's Runtime Library
|
||||
# (c) Copyright 2013 Andreas Rumpf
|
||||
#
|
||||
# See the file "copying.txt", included in this
|
||||
# distribution, for details about the copyright.
|
||||
#
|
||||
|
||||
# This file implements the embedded debugger that can be linked
|
||||
# with the application. Mostly we do not use dynamic memory here as that
|
||||
# would interfere with the GC and trigger ON/OFF errors if the
|
||||
# user program corrupts memory. Unfortunately, for dispaying
|
||||
# variables we use the ``system.repr()`` proc which uses Nim
|
||||
# strings and thus allocates memory from the heap. Pity, but
|
||||
# I do not want to implement ``repr()`` twice.
|
||||
|
||||
const
|
||||
EndbBeg = "*** endb"
|
||||
EndbEnd = "***\n"
|
||||
|
||||
type
|
||||
StaticStr = object
|
||||
len: int
|
||||
data: array[0..100, char]
|
||||
|
||||
BreakpointFilename = object
|
||||
b: ptr Breakpoint
|
||||
filename: StaticStr
|
||||
|
||||
DbgState = enum
|
||||
dbOff, # debugger is turned off
|
||||
dbStepInto, # debugger is in tracing mode
|
||||
dbStepOver,
|
||||
dbSkipCurrent,
|
||||
dbQuiting, # debugger wants to quit
|
||||
dbBreakpoints # debugger is only interested in breakpoints
|
||||
|
||||
var
|
||||
dbgUser: StaticStr # buffer for user input; first command is ``step_into``
|
||||
# needs to be global cause we store the last command
|
||||
# in it
|
||||
dbgState: DbgState # state of debugger
|
||||
dbgSkipToFrame: PFrame # frame to be skipped to
|
||||
|
||||
maxDisplayRecDepth: int = 5 # do not display too much data!
|
||||
|
||||
brkPoints: array[0..127, BreakpointFilename]
|
||||
|
||||
proc setLen(s: var StaticStr, newLen=0) =
|
||||
s.len = newLen
|
||||
s.data[newLen] = '\0'
|
||||
|
||||
proc add(s: var StaticStr, c: char) =
|
||||
if s.len < high(s.data)-1:
|
||||
s.data[s.len] = c
|
||||
s.data[s.len+1] = '\0'
|
||||
inc s.len
|
||||
|
||||
proc add(s: var StaticStr, c: cstring) =
|
||||
var i = 0
|
||||
while c[i] != '\0':
|
||||
add s, c[i]
|
||||
inc i
|
||||
|
||||
proc assign(s: var StaticStr, c: cstring) =
|
||||
setLen(s)
|
||||
add s, c
|
||||
|
||||
proc `==`(a, b: StaticStr): bool =
|
||||
if a.len == b.len:
|
||||
for i in 0 .. a.len-1:
|
||||
if a.data[i] != b.data[i]: return false
|
||||
return true
|
||||
|
||||
proc `==`(a: StaticStr, b: cstring): bool =
|
||||
result = c_strcmp(unsafeAddr a.data, b) == 0
|
||||
|
||||
proc write(f: CFilePtr, s: cstring) = c_fputs(s, f)
|
||||
proc writeLine(f: CFilePtr, s: cstring) =
|
||||
c_fputs(s, f)
|
||||
c_fputs("\n", f)
|
||||
|
||||
proc write(f: CFilePtr, s: StaticStr) =
|
||||
write(f, cstring(unsafeAddr s.data))
|
||||
|
||||
proc write(f: CFilePtr, i: int) =
|
||||
when sizeof(int) == 8:
|
||||
discard c_fprintf(f, "%lld", i)
|
||||
else:
|
||||
discard c_fprintf(f, "%ld", i)
|
||||
|
||||
proc close(f: CFilePtr): cint {.
|
||||
importc: "fclose", header: "<stdio.h>", discardable.}
|
||||
|
||||
proc c_fgetc(stream: CFilePtr): cint {.
|
||||
importc: "fgetc", header: "<stdio.h>".}
|
||||
proc c_ungetc(c: cint, f: CFilePtr): cint {.
|
||||
importc: "ungetc", header: "<stdio.h>", discardable.}
|
||||
|
||||
var
|
||||
cstdin* {.importc: "stdin", header: "<stdio.h>".}: CFilePtr
|
||||
|
||||
proc listBreakPoints() =
|
||||
write(cstdout, EndbBeg)
|
||||
write(cstdout, "| Breakpoints:\n")
|
||||
for b in listBreakpoints():
|
||||
write(cstdout, abs(b.low))
|
||||
if b.high != b.low:
|
||||
write(cstdout, "..")
|
||||
write(cstdout, abs(b.high))
|
||||
write(cstdout, " ")
|
||||
write(cstdout, b.filename)
|
||||
if b.isActive:
|
||||
write(cstdout, " [disabled]\n")
|
||||
else:
|
||||
write(cstdout, "\n")
|
||||
write(cstdout, EndbEnd)
|
||||
|
||||
proc openAppend(filename: cstring): CFilePtr =
|
||||
proc fopen(filename, mode: cstring): CFilePtr {.importc: "fopen", header: "<stdio.h>".}
|
||||
|
||||
result = fopen(filename, "ab")
|
||||
if result != nil:
|
||||
write(result, "----------------------------------------\n")
|
||||
|
||||
proc dbgRepr(p: pointer, typ: PNimType): string =
|
||||
var cl: ReprClosure
|
||||
initReprClosure(cl)
|
||||
cl.recDepth = maxDisplayRecDepth
|
||||
# locks for the GC turned out to be a bad idea...
|
||||
# inc(recGcLock)
|
||||
result = ""
|
||||
reprAux(result, p, typ, cl)
|
||||
# dec(recGcLock)
|
||||
deinitReprClosure(cl)
|
||||
|
||||
proc writeVariable(stream: CFilePtr, slot: VarSlot) =
|
||||
write(stream, slot.name)
|
||||
write(stream, " = ")
|
||||
writeLine(stream, dbgRepr(slot.address, slot.typ))
|
||||
|
||||
proc listFrame(stream: CFilePtr, f: PFrame) =
|
||||
write(stream, EndbBeg)
|
||||
write(stream, "| Frame (")
|
||||
write(stream, f.len)
|
||||
write(stream, " slots):\n")
|
||||
for i in 0 .. f.len-1:
|
||||
writeLine(stream, getLocal(f, i).name)
|
||||
write(stream, EndbEnd)
|
||||
|
||||
proc listLocals(stream: CFilePtr, f: PFrame) =
|
||||
write(stream, EndbBeg)
|
||||
write(stream, "| Frame (")
|
||||
write(stream, f.len)
|
||||
write(stream, " slots):\n")
|
||||
for i in 0 .. f.len-1:
|
||||
writeVariable(stream, getLocal(f, i))
|
||||
write(stream, EndbEnd)
|
||||
|
||||
proc listGlobals(stream: CFilePtr) =
|
||||
write(stream, EndbBeg)
|
||||
write(stream, "| Globals:\n")
|
||||
for i in 0 .. getGlobalLen()-1:
|
||||
writeLine(stream, getGlobal(i).name)
|
||||
write(stream, EndbEnd)
|
||||
|
||||
proc debugOut(msg: cstring) =
|
||||
# the *** *** markers are for easy recognition of debugger
|
||||
# output for external frontends.
|
||||
write(cstdout, EndbBeg)
|
||||
write(cstdout, "| ")
|
||||
write(cstdout, msg)
|
||||
write(cstdout, EndbEnd)
|
||||
|
||||
proc dbgFatal(msg: cstring) =
|
||||
debugOut(msg)
|
||||
dbgAborting = true # the debugger wants to abort
|
||||
quit(1)
|
||||
|
||||
proc dbgShowCurrentProc(dbgFramePointer: PFrame) =
|
||||
if dbgFramePointer != nil:
|
||||
write(cstdout, "*** endb| now in proc: ")
|
||||
write(cstdout, dbgFramePointer.procname)
|
||||
write(cstdout, " ***\n")
|
||||
else:
|
||||
write(cstdout, "*** endb| (proc name not available) ***\n")
|
||||
|
||||
proc dbgShowExecutionPoint() =
|
||||
write(cstdout, "*** endb| ")
|
||||
write(cstdout, framePtr.filename)
|
||||
write(cstdout, "(")
|
||||
write(cstdout, framePtr.line)
|
||||
write(cstdout, ") ")
|
||||
write(cstdout, framePtr.procname)
|
||||
write(cstdout, " ***\n")
|
||||
|
||||
proc scanAndAppendWord(src: cstring, a: var StaticStr, start: int): int =
|
||||
result = start
|
||||
# skip whitespace:
|
||||
while src[result] in {'\t', ' '}: inc(result)
|
||||
while true:
|
||||
case src[result]
|
||||
of 'a'..'z', '0'..'9': add(a, src[result])
|
||||
of '_': discard # just skip it
|
||||
of 'A'..'Z': add(a, chr(ord(src[result]) - ord('A') + ord('a')))
|
||||
else: break
|
||||
inc(result)
|
||||
|
||||
proc scanWord(src: cstring, a: var StaticStr, start: int): int =
|
||||
setlen(a)
|
||||
result = scanAndAppendWord(src, a, start)
|
||||
|
||||
proc scanFilename(src: cstring, a: var StaticStr, start: int): int =
|
||||
result = start
|
||||
setLen a
|
||||
while src[result] in {'\t', ' '}: inc(result)
|
||||
while src[result] notin {'\t', ' ', '\0'}:
|
||||
add(a, src[result])
|
||||
inc(result)
|
||||
|
||||
proc scanNumber(src: cstring, a: var int, start: int): int =
|
||||
result = start
|
||||
a = 0
|
||||
while src[result] in {'\t', ' '}: inc(result)
|
||||
while true:
|
||||
case src[result]
|
||||
of '0'..'9': a = a * 10 + ord(src[result]) - ord('0')
|
||||
of '_': discard # skip underscores (nice for long line numbers)
|
||||
else: break
|
||||
inc(result)
|
||||
|
||||
proc dbgHelp() =
|
||||
debugOut("""
|
||||
list of commands (see the manual for further help):
|
||||
GENERAL
|
||||
h, help display this help message
|
||||
q, quit quit the debugger and the program
|
||||
<ENTER> repeat the previous debugger command
|
||||
EXECUTING
|
||||
s, step single step, stepping into routine calls
|
||||
n, next single step, without stepping into routine calls
|
||||
f, skipcurrent continue execution until the current routine finishes
|
||||
c, continue, r, run continue execution until the next breakpoint
|
||||
i, ignore continue execution, ignore all breakpoints
|
||||
BREAKPOINTS
|
||||
b, break [fromline [toline]] [file]
|
||||
set a new breakpoint for line and file
|
||||
if line or file are omitted the current one is used
|
||||
breakpoints display the entire breakpoint list
|
||||
toggle fromline [file] enable or disable a breakpoint
|
||||
filenames list all valid filenames
|
||||
DATA DISPLAY
|
||||
e, eval <expr> evaluate the expression <expr>
|
||||
o, out <file> <expr> evaluate <expr> and write it to <file>
|
||||
w, where display the current execution point
|
||||
stackframe [file] display current stack frame [and write it to file]
|
||||
u, up go up in the call stack
|
||||
d, down go down in the call stack
|
||||
bt, backtrace display the entire call stack
|
||||
l, locals display available local variables
|
||||
g, globals display available global variables
|
||||
maxdisplay <integer> set the display's recursion maximum
|
||||
""")
|
||||
|
||||
proc invalidCommand() =
|
||||
debugOut("[Warning] invalid command ignored (type 'h' for help) ")
|
||||
|
||||
proc hasExt(s: cstring): bool =
|
||||
# returns true if s has a filename extension
|
||||
var i = 0
|
||||
while s[i] != '\0':
|
||||
if s[i] == '.': return true
|
||||
inc i
|
||||
|
||||
proc parseBreakpoint(s: cstring, start: int): Breakpoint =
|
||||
var dbgTemp: StaticStr
|
||||
var i = scanNumber(s, result.low, start)
|
||||
if result.low == 0: result.low = framePtr.line
|
||||
i = scanNumber(s, result.high, i)
|
||||
if result.high == 0: result.high = result.low
|
||||
i = scanFilename(s, dbgTemp, i)
|
||||
if dbgTemp.len != 0:
|
||||
if not hasExt(addr dbgTemp.data): add(dbgTemp, ".nim")
|
||||
result.filename = canonFilename(addr dbgTemp.data)
|
||||
if result.filename.isNil:
|
||||
debugOut("[Warning] no breakpoint could be set; unknown filename ")
|
||||
return
|
||||
else:
|
||||
result.filename = framePtr.filename
|
||||
|
||||
proc createBreakPoint(s: cstring, start: int) =
|
||||
let br = parseBreakpoint(s, start)
|
||||
if not br.filename.isNil:
|
||||
if not addBreakpoint(br.filename, br.low, br.high):
|
||||
debugOut("[Warning] no breakpoint could be set; out of breakpoint space ")
|
||||
|
||||
proc breakpointToggle(s: cstring, start: int) =
|
||||
var a = parseBreakpoint(s, start)
|
||||
if not a.filename.isNil:
|
||||
var b = checkBreakpoints(a.filename, a.low)
|
||||
if not b.isNil: b.flip
|
||||
else: debugOut("[Warning] unknown breakpoint ")
|
||||
|
||||
proc dbgEvaluate(stream: CFilePtr, s: cstring, start: int, f: PFrame) =
|
||||
var dbgTemp: StaticStr
|
||||
var i = scanWord(s, dbgTemp, start)
|
||||
while s[i] in {' ', '\t'}: inc(i)
|
||||
var v: VarSlot
|
||||
if s[i] == '.':
|
||||
inc(i)
|
||||
add(dbgTemp, '.')
|
||||
i = scanAndAppendWord(s, dbgTemp, i)
|
||||
for i in 0 .. getGlobalLen()-1:
|
||||
let v = getGlobal(i)
|
||||
if c_strcmp(v.name, addr dbgTemp.data) == 0:
|
||||
writeVariable(stream, v)
|
||||
else:
|
||||
for i in 0 .. f.len-1:
|
||||
let v = getLocal(f, i)
|
||||
if c_strcmp(v.name, addr dbgTemp.data) == 0:
|
||||
writeVariable(stream, v)
|
||||
|
||||
proc dbgOut(s: cstring, start: int, currFrame: PFrame) =
|
||||
var dbgTemp: StaticStr
|
||||
var i = scanFilename(s, dbgTemp, start)
|
||||
if dbgTemp.len == 0:
|
||||
invalidCommand()
|
||||
return
|
||||
var stream = openAppend(addr dbgTemp.data)
|
||||
if stream == nil:
|
||||
debugOut("[Warning] could not open or create file ")
|
||||
return
|
||||
dbgEvaluate(stream, s, i, currFrame)
|
||||
close(stream)
|
||||
|
||||
proc dbgStackFrame(s: cstring, start: int, currFrame: PFrame) =
|
||||
var dbgTemp: StaticStr
|
||||
var i = scanFilename(s, dbgTemp, start)
|
||||
if dbgTemp.len == 0:
|
||||
# just write it to cstdout:
|
||||
listFrame(cstdout, currFrame)
|
||||
else:
|
||||
var stream = openAppend(addr dbgTemp.data)
|
||||
if stream == nil:
|
||||
debugOut("[Warning] could not open or create file ")
|
||||
return
|
||||
listFrame(stream, currFrame)
|
||||
close(stream)
|
||||
|
||||
proc readLine(f: CFilePtr, line: var StaticStr): bool =
|
||||
while true:
|
||||
var c = c_fgetc(f)
|
||||
if c < 0'i32:
|
||||
if line.len > 0: break
|
||||
else: return false
|
||||
if c == 10'i32: break # LF
|
||||
if c == 13'i32: # CR
|
||||
c = c_fgetc(f) # is the next char LF?
|
||||
if c != 10'i32: discard c_ungetc(c, f) # no, put the character back
|
||||
break
|
||||
add line, chr(int(c))
|
||||
result = true
|
||||
|
||||
proc listFilenames() =
|
||||
write(cstdout, EndbBeg)
|
||||
write(cstdout, "| Files:\n")
|
||||
var i = 0
|
||||
while true:
|
||||
let x = dbgFilenames[i]
|
||||
if x.isNil: break
|
||||
write(cstdout, x)
|
||||
write(cstdout, "\n")
|
||||
inc i
|
||||
write(cstdout, EndbEnd)
|
||||
|
||||
proc dbgWriteStackTrace(f: PFrame)
|
||||
proc commandPrompt() =
|
||||
# if we return from this routine, user code executes again
|
||||
var
|
||||
again = true
|
||||
dbgFramePtr = framePtr # for going down and up the stack
|
||||
dbgDown = 0 # how often we did go down
|
||||
dbgTemp: StaticStr
|
||||
|
||||
while again:
|
||||
write(cstdout, "*** endb| >>")
|
||||
let oldLen = dbgUser.len
|
||||
dbgUser.len = 0
|
||||
if not readLine(cstdin, dbgUser): break
|
||||
if dbgUser.len == 0: dbgUser.len = oldLen
|
||||
# now look what we have to do:
|
||||
var i = scanWord(addr dbgUser.data, dbgTemp, 0)
|
||||
template `?`(x: untyped): untyped = dbgTemp == cstring(x)
|
||||
if ?"s" or ?"step":
|
||||
dbgState = dbStepInto
|
||||
again = false
|
||||
elif ?"n" or ?"next":
|
||||
dbgState = dbStepOver
|
||||
dbgSkipToFrame = framePtr
|
||||
again = false
|
||||
elif ?"f" or ?"skipcurrent":
|
||||
dbgState = dbSkipCurrent
|
||||
dbgSkipToFrame = framePtr.prev
|
||||
again = false
|
||||
elif ?"c" or ?"continue" or ?"r" or ?"run":
|
||||
dbgState = dbBreakpoints
|
||||
again = false
|
||||
elif ?"i" or ?"ignore":
|
||||
dbgState = dbOff
|
||||
again = false
|
||||
elif ?"h" or ?"help":
|
||||
dbgHelp()
|
||||
elif ?"q" or ?"quit":
|
||||
dbgState = dbQuiting
|
||||
dbgAborting = true
|
||||
again = false
|
||||
quit(1) # BUGFIX: quit with error code > 0
|
||||
elif ?"e" or ?"eval":
|
||||
var
|
||||
prevState = dbgState
|
||||
prevSkipFrame = dbgSkipToFrame
|
||||
dbgState = dbSkipCurrent
|
||||
dbgEvaluate(cstdout, addr dbgUser.data, i, dbgFramePtr)
|
||||
dbgState = prevState
|
||||
dbgSkipToFrame = prevSkipFrame
|
||||
elif ?"o" or ?"out":
|
||||
dbgOut(addr dbgUser.data, i, dbgFramePtr)
|
||||
elif ?"stackframe":
|
||||
dbgStackFrame(addr dbgUser.data, i, dbgFramePtr)
|
||||
elif ?"w" or ?"where":
|
||||
dbgShowExecutionPoint()
|
||||
elif ?"l" or ?"locals":
|
||||
var
|
||||
prevState = dbgState
|
||||
prevSkipFrame = dbgSkipToFrame
|
||||
dbgState = dbSkipCurrent
|
||||
listLocals(cstdout, dbgFramePtr)
|
||||
dbgState = prevState
|
||||
dbgSkipToFrame = prevSkipFrame
|
||||
elif ?"g" or ?"globals":
|
||||
var
|
||||
prevState = dbgState
|
||||
prevSkipFrame = dbgSkipToFrame
|
||||
dbgState = dbSkipCurrent
|
||||
listGlobals(cstdout)
|
||||
dbgState = prevState
|
||||
dbgSkipToFrame = prevSkipFrame
|
||||
elif ?"u" or ?"up":
|
||||
if dbgDown <= 0:
|
||||
debugOut("[Warning] cannot go up any further ")
|
||||
else:
|
||||
dbgFramePtr = framePtr
|
||||
for j in 0 .. dbgDown-2: # BUGFIX
|
||||
dbgFramePtr = dbgFramePtr.prev
|
||||
dec(dbgDown)
|
||||
dbgShowCurrentProc(dbgFramePtr)
|
||||
elif ?"d" or ?"down":
|
||||
if dbgFramePtr != nil:
|
||||
inc(dbgDown)
|
||||
dbgFramePtr = dbgFramePtr.prev
|
||||
dbgShowCurrentProc(dbgFramePtr)
|
||||
else:
|
||||
debugOut("[Warning] cannot go down any further ")
|
||||
elif ?"bt" or ?"backtrace":
|
||||
dbgWriteStackTrace(framePtr)
|
||||
elif ?"b" or ?"break":
|
||||
createBreakPoint(addr dbgUser.data, i)
|
||||
elif ?"breakpoints":
|
||||
listBreakPoints()
|
||||
elif ?"toggle":
|
||||
breakpointToggle(addr dbgUser.data, i)
|
||||
elif ?"filenames":
|
||||
listFilenames()
|
||||
elif ?"maxdisplay":
|
||||
var parsed: int
|
||||
i = scanNumber(addr dbgUser.data, parsed, i)
|
||||
if dbgUser.data[i-1] in {'0'..'9'}:
|
||||
if parsed == 0: maxDisplayRecDepth = -1
|
||||
else: maxDisplayRecDepth = parsed
|
||||
else:
|
||||
invalidCommand()
|
||||
else: invalidCommand()
|
||||
|
||||
proc endbStep() =
|
||||
# we get into here if an unhandled exception has been raised
|
||||
# XXX: do not allow the user to run the program any further?
|
||||
# XXX: BUG: the frame is lost here!
|
||||
dbgShowExecutionPoint()
|
||||
commandPrompt()
|
||||
|
||||
proc dbgWriteStackTrace(f: PFrame) =
|
||||
const
|
||||
firstCalls = 32
|
||||
var
|
||||
it = f
|
||||
i = 0
|
||||
total = 0
|
||||
tempFrames: array[0..127, PFrame]
|
||||
# setup long head:
|
||||
while it != nil and i <= high(tempFrames)-firstCalls:
|
||||
tempFrames[i] = it
|
||||
inc(i)
|
||||
inc(total)
|
||||
it = it.prev
|
||||
# go up the stack to count 'total':
|
||||
var b = it
|
||||
while it != nil:
|
||||
inc(total)
|
||||
it = it.prev
|
||||
var skipped = 0
|
||||
if total > len(tempFrames):
|
||||
# skip N
|
||||
skipped = total-i-firstCalls+1
|
||||
for j in 1..skipped:
|
||||
if b != nil: b = b.prev
|
||||
# create '...' entry:
|
||||
tempFrames[i] = nil
|
||||
inc(i)
|
||||
# setup short tail:
|
||||
while b != nil and i <= high(tempFrames):
|
||||
tempFrames[i] = b
|
||||
inc(i)
|
||||
b = b.prev
|
||||
for j in countdown(i-1, 0):
|
||||
if tempFrames[j] == nil:
|
||||
write(cstdout, "(")
|
||||
write(cstdout, skipped)
|
||||
write(cstdout, " calls omitted) ...")
|
||||
else:
|
||||
write(cstdout, tempFrames[j].filename)
|
||||
if tempFrames[j].line > 0:
|
||||
write(cstdout, "(")
|
||||
write(cstdout, tempFrames[j].line)
|
||||
write(cstdout, ")")
|
||||
write(cstdout, " ")
|
||||
write(cstdout, tempFrames[j].procname)
|
||||
write(cstdout, "\n")
|
||||
|
||||
proc checkForBreakpoint =
|
||||
let b = checkBreakpoints(framePtr.filename, framePtr.line)
|
||||
if b != nil:
|
||||
write(cstdout, "*** endb| reached ")
|
||||
write(cstdout, framePtr.filename)
|
||||
write(cstdout, "(")
|
||||
write(cstdout, framePtr.line)
|
||||
write(cstdout, ") ")
|
||||
write(cstdout, framePtr.procname)
|
||||
write(cstdout, " ***\n")
|
||||
commandPrompt()
|
||||
|
||||
proc lineHookImpl() {.nimcall.} =
|
||||
case dbgState
|
||||
of dbStepInto:
|
||||
# we really want the command prompt here:
|
||||
dbgShowExecutionPoint()
|
||||
commandPrompt()
|
||||
of dbSkipCurrent, dbStepOver: # skip current routine
|
||||
if framePtr == dbgSkipToFrame:
|
||||
dbgShowExecutionPoint()
|
||||
commandPrompt()
|
||||
else:
|
||||
# breakpoints are wanted though (I guess)
|
||||
checkForBreakpoint()
|
||||
of dbBreakpoints:
|
||||
# debugger is only interested in breakpoints
|
||||
checkForBreakpoint()
|
||||
else: discard
|
||||
|
||||
proc watchpointHookImpl(name: cstring) {.nimcall.} =
|
||||
dbgWriteStackTrace(framePtr)
|
||||
debugOut(name)
|
||||
|
||||
proc initDebugger {.inline.} =
|
||||
dbgState = dbStepInto
|
||||
dbgUser.len = 1
|
||||
dbgUser.data[0] = 's'
|
||||
dbgWatchpointHook = watchpointHookImpl
|
||||
dbgLineHook = lineHookImpl
|
||||
@@ -38,10 +38,7 @@ proc showErrorMessage(data: cstring) {.gcsafe.} =
|
||||
writeToStdErr(data)
|
||||
|
||||
proc quitOrDebug() {.inline.} =
|
||||
when defined(endb):
|
||||
endbStep() # call the debugger
|
||||
else:
|
||||
quit(1)
|
||||
quit(1)
|
||||
|
||||
proc chckIndx(i, a, b: int): int {.inline, compilerproc, benign.}
|
||||
proc chckRange(i, a, b: int): int {.inline, compilerproc, benign.}
|
||||
@@ -469,10 +466,6 @@ proc nimFrame(s: PFrame) {.compilerRtl, inl.} =
|
||||
framePtr = s
|
||||
if s.calldepth == nimCallDepthLimit: callDepthLimitReached()
|
||||
|
||||
when defined(endb):
|
||||
var
|
||||
dbgAborting: bool # whether the debugger wants to abort
|
||||
|
||||
when defined(cpp) and appType != "lib" and
|
||||
not defined(js) and not defined(nimscript) and
|
||||
hostOS != "standalone" and not defined(noCppExceptions):
|
||||
@@ -515,8 +508,6 @@ when not defined(noSignalHandler) and not defined(useNimRtl):
|
||||
elif s == SIGSEGV:
|
||||
action("SIGSEGV: Illegal storage access. (Attempt to read from nil?)\n")
|
||||
elif s == SIGABRT:
|
||||
when defined(endb):
|
||||
if dbgAborting: return # the debugger wants to abort
|
||||
action("SIGABRT: Abnormal termination.\n")
|
||||
elif s == SIGFPE: action("SIGFPE: Arithmetic error.\n")
|
||||
elif s == SIGILL: action("SIGILL: Illegal operation.\n")
|
||||
@@ -546,7 +537,6 @@ when not defined(noSignalHandler) and not defined(useNimRtl):
|
||||
msg = y
|
||||
processSignal(sign, asgn)
|
||||
showErrorMessage(msg)
|
||||
when defined(endb): dbgAborting = true
|
||||
quit(1) # always quit when SIGABRT
|
||||
|
||||
proc registerSignalHandler() =
|
||||
|
||||
@@ -511,7 +511,7 @@ when not defined(useNimRtl):
|
||||
gch.tracing = true
|
||||
|
||||
proc GC_fullCollect() =
|
||||
var oldThreshold = gch.cycleThreshold
|
||||
let oldThreshold = gch.cycleThreshold
|
||||
gch.cycleThreshold = 0 # forces cycle collection
|
||||
collectCT(gch, 0)
|
||||
gch.cycleThreshold = oldThreshold
|
||||
|
||||
@@ -85,7 +85,7 @@ iterator pairs*[T](a: openArray[T]): tuple[key: int, val: T] {.inline.} =
|
||||
yield (i, a[i])
|
||||
inc(i)
|
||||
|
||||
iterator mpairs*[T](a: var openArray[T]): tuple[key:int, val:var T]{.inline.} =
|
||||
iterator mpairs*[T](a: var openArray[T]): tuple[key: int, val: var T]{.inline.} =
|
||||
## Iterates over each item of `a`. Yields ``(index, a[index])`` pairs.
|
||||
## ``a[index]`` can be modified.
|
||||
var i = 0
|
||||
@@ -102,7 +102,7 @@ iterator pairs*[IX, T](a: array[IX, T]): tuple[key: IX, val: T] {.inline.} =
|
||||
if i >= high(IX): break
|
||||
inc(i)
|
||||
|
||||
iterator mpairs*[IX, T](a:var array[IX, T]):tuple[key:IX,val:var T] {.inline.} =
|
||||
iterator mpairs*[IX, T](a: var array[IX, T]): tuple[key: IX, val: var T] {.inline.} =
|
||||
## Iterates over each item of `a`. Yields ``(index, a[index])`` pairs.
|
||||
## ``a[index]`` can be modified.
|
||||
var i = low(IX)
|
||||
@@ -179,7 +179,6 @@ iterator mpairs*(a: var cstring): tuple[key: int, val: var char] {.inline.} =
|
||||
yield (i, a[i])
|
||||
inc(i)
|
||||
|
||||
|
||||
iterator items*[T](a: seq[T]): T {.inline.} =
|
||||
## Iterates over each item of `a`.
|
||||
var i = 0
|
||||
@@ -224,7 +223,7 @@ iterator fields*[T: tuple|object](x: T): RootObj {.
|
||||
## **Warning**: This really transforms the 'for' and unrolls the loop.
|
||||
## The current implementation also has a bug
|
||||
## that affects symbol binding in the loop body.
|
||||
iterator fields*[S:tuple|object, T:tuple|object](x: S, y: T): tuple[a,b: untyped] {.
|
||||
iterator fields*[S:tuple|object, T:tuple|object](x: S, y: T): tuple[a, b: RootObj] {.
|
||||
magic: "Fields", noSideEffect.}
|
||||
## Iterates over every field of `x` and `y`.
|
||||
##
|
||||
@@ -266,7 +265,7 @@ iterator fieldPairs*[T: tuple|object](x: T): RootObj {.
|
||||
## loop body.
|
||||
|
||||
iterator fieldPairs*[S: tuple|object, T: tuple|object](x: S, y: T): tuple[
|
||||
a, b: untyped] {.
|
||||
a, b: RootObj] {.
|
||||
magic: "FieldPairs", noSideEffect.}
|
||||
## Iterates over every field of `x` and `y`.
|
||||
##
|
||||
|
||||
@@ -717,3 +717,15 @@ proc processCategory(r: var TResults, cat: Category,
|
||||
inc testsRun
|
||||
if testsRun == 0:
|
||||
echo "[Warning] - Invalid category specified \"", cat.string, "\", no tests were run"
|
||||
|
||||
proc processPattern(r: var TResults, pattern, options: string; simulate: bool) =
|
||||
var testsRun = 0
|
||||
for name in walkPattern(pattern):
|
||||
if simulate:
|
||||
echo "Detected test: ", name
|
||||
else:
|
||||
var test = makeTest(name, options, Category"pattern")
|
||||
testSpec r, test
|
||||
inc testsRun
|
||||
if testsRun == 0:
|
||||
echo "no tests were found for pattern: ", pattern
|
||||
|
||||
@@ -7,7 +7,8 @@ var packages*: seq[tuple[name, cmd: string; hasDeps: bool; url: string]] = @[]
|
||||
|
||||
pkg "argparse"
|
||||
pkg "arraymancer", "nim c -r src/arraymancer.nim", true
|
||||
pkg "ast_pattern_matching", "nim c -r tests/test1.nim"
|
||||
pkg "ast_pattern_matching", "nim c -r --useVersion=0.19 tests/test1.nim"
|
||||
pkg "bigints"
|
||||
pkg "binaryheap", "nim c -r binaryheap.nim"
|
||||
pkg "blscurve", "", true
|
||||
pkg "bncurve", "", true
|
||||
@@ -56,12 +57,12 @@ pkg "nimquery"
|
||||
pkg "nimsl", "", true
|
||||
pkg "nimsvg"
|
||||
pkg "nimx", "nim c --threads:on test/main.nim", true
|
||||
pkg "norm", "nim c -r tests/testsqlite.nim", true
|
||||
pkg "norm", "nim c -r tests/tsqlite.nim", true
|
||||
pkg "npeg"
|
||||
pkg "ormin", "nim c -o:orminn ormin.nim", true
|
||||
pkg "parsetoml"
|
||||
pkg "patty"
|
||||
pkg "plotly", "nim c examples/all.nim", true
|
||||
pkg "plotly", "nim c --useVersion:0.19 examples/all.nim", true
|
||||
pkg "protobuf", "nim c -o:protobuff -r src/protobuf.nim", true
|
||||
pkg "regex", "nim c src/regex", true
|
||||
pkg "result", "nim c -r result.nim"
|
||||
@@ -71,7 +72,7 @@ pkg "sdl2_nim", "nim c -r sdl2/sdl.nim"
|
||||
pkg "snip", "", false, "https://github.com/genotrance/snip"
|
||||
pkg "stint", "nim c -o:stintt -r stint.nim"
|
||||
pkg "strunicode", "nim c -r src/strunicode.nim", true
|
||||
pkg "telebot", "nim c -o:tbot -r telebot.nim", true
|
||||
pkg "telebot", "nim c -o:tbot --useVersion:0.19 -r telebot.nim", true
|
||||
pkg "tiny_sqlite"
|
||||
pkg "unicodedb"
|
||||
pkg "unicodeplus", "", true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#
|
||||
#
|
||||
# Nim Tester
|
||||
# Nim Testament
|
||||
# (c) Copyright 2017 Andreas Rumpf
|
||||
#
|
||||
# See the file "copying.txt", included in this
|
||||
@@ -25,9 +25,10 @@ const
|
||||
resultsFile = "testresults.html"
|
||||
#jsonFile = "testresults.json" # not used
|
||||
Usage = """Usage:
|
||||
tester [options] command [arguments]
|
||||
testament [options] command [arguments]
|
||||
|
||||
Command:
|
||||
p|pat|pattern <glob> run all the tests matching the given pattern
|
||||
all run all tests
|
||||
c|cat|category <category> run all the tests of a certain category
|
||||
r|run <test> run single test file
|
||||
@@ -435,6 +436,10 @@ proc testSpecHelper(r: var TResults, test: TTest, expected: TSpec, target: TTarg
|
||||
if isJsTarget:
|
||||
exeCmd = nodejs
|
||||
args = concat(@[exeFile], args)
|
||||
elif defined(posix) and not exeFile.contains('/'):
|
||||
# "security" in Posix is actually just a euphemism
|
||||
# for "unproductive arbitrary shit"
|
||||
exeCmd = "./" & exeFile
|
||||
else:
|
||||
exeCmd = exeFile
|
||||
var (_, buf, exitCode) = execCmdEx2(exeCmd, args, input = expected.input)
|
||||
@@ -594,7 +599,7 @@ proc main() =
|
||||
|
||||
var p = initOptParser()
|
||||
p.next()
|
||||
while p.kind == cmdLongoption:
|
||||
while p.kind in {cmdLongoption, cmdShortOption}:
|
||||
case p.key.string.normalize
|
||||
of "print", "verbose": optPrintResults = true
|
||||
of "failing": optFailing = true
|
||||
@@ -646,7 +651,7 @@ proc main() =
|
||||
of "all":
|
||||
#processCategory(r, Category"megatest", p.cmdLineRest.string, testsDir, runJoinableTests = false)
|
||||
|
||||
var myself = quoteShell(findExe("testament" / "tester"))
|
||||
var myself = quoteShell(findExe("testament" / "testament"))
|
||||
if targetsStr.len > 0:
|
||||
myself &= " " & quoteShell("--targets:" & targetsStr)
|
||||
|
||||
@@ -694,6 +699,11 @@ proc main() =
|
||||
var cat = Category(p.key)
|
||||
p.next
|
||||
processCategory(r, cat, p.cmdLineRest.string, testsDir, runJoinableTests = false)
|
||||
of "p", "pat", "pattern":
|
||||
skips = loadSkipFrom(skipFrom)
|
||||
let pattern = p.key
|
||||
p.next
|
||||
processPattern(r, pattern, p.cmdLineRest.string, simulate)
|
||||
of "r", "run":
|
||||
# at least one directory is required in the path, to use as a category name
|
||||
let pathParts = split(p.key.string, {DirSep, AltSep})
|
||||
@@ -143,5 +143,5 @@ block tsubrange:
|
||||
var level: n16 = 1
|
||||
let maxLevel: n16 = 1
|
||||
|
||||
level = min(level + 2, maxLevel)
|
||||
level = min(level + 2, maxLevel).n16
|
||||
doAssert level == 1
|
||||
|
||||
@@ -6,6 +6,74 @@ action: compile
|
||||
|
||||
import ../ast_pattern_matching
|
||||
|
||||
template expectNimNode(arg: untyped): NimNode = arg
|
||||
## This template here is just to be injected by `myquote`, so that
|
||||
## a nice error message appears when the captured symbols are not of
|
||||
## type `NimNode`.
|
||||
|
||||
proc substitudeComments(symbols, values, n: NimNode): NimNode =
|
||||
## substitudes all nodes of kind nnkCommentStmt to parameter
|
||||
## symbols. Consumes the argument `n`.
|
||||
if n.kind == nnkCommentStmt:
|
||||
values.add newCall(bindSym"newCommentStmtNode", newLit(n.strVal))
|
||||
# Gensym doesn't work for parameters. These identifiers won't
|
||||
# clash unless an argument is constructed to clash here.
|
||||
symbols.add ident("comment" & $values.len & "_XObBdOnh6meCuJK2smZV")
|
||||
return symbols[^1]
|
||||
for i in 0 ..< n.len:
|
||||
n[i] = substitudeComments(symbols, values, n[i])
|
||||
return n
|
||||
|
||||
macro myquote*(args: varargs[untyped]): untyped =
|
||||
expectMinLen(args, 1)
|
||||
|
||||
# This is a workaround for #10430 where comments are removed in
|
||||
# template expansions. This workaround lifts all comments
|
||||
# statements to be arguments of the temporary template.
|
||||
|
||||
let extraCommentSymbols = newNimNode(nnkBracket)
|
||||
let extraCommentGenExpr = newNimNode(nnkBracket)
|
||||
let body = substitudeComments(
|
||||
extraCommentSymbols, extraCommentGenExpr, args[^1]
|
||||
)
|
||||
|
||||
let formalParams = nnkFormalParams.newTree(ident"untyped")
|
||||
for i in 0 ..< args.len-1:
|
||||
formalParams.add nnkIdentDefs.newTree(
|
||||
args[i], ident"untyped", newEmptyNode()
|
||||
)
|
||||
for sym in extraCommentSymbols:
|
||||
formalParams.add nnkIdentDefs.newTree(
|
||||
sym, ident"untyped", newEmptyNode()
|
||||
)
|
||||
|
||||
let templateSym = genSym(nskTemplate)
|
||||
let templateDef = nnkTemplateDef.newTree(
|
||||
templateSym,
|
||||
newEmptyNode(),
|
||||
newEmptyNode(),
|
||||
formalParams,
|
||||
nnkPragma.newTree(ident"dirty"),
|
||||
newEmptyNode(),
|
||||
args[^1]
|
||||
)
|
||||
|
||||
let templateCall = newCall(templateSym)
|
||||
for i in 0 ..< args.len-1:
|
||||
let symName = args[i]
|
||||
# identifiers and quoted identifiers are allowed.
|
||||
if symName.kind == nnkAccQuoted:
|
||||
symName.expectLen 1
|
||||
symName[0].expectKind nnkIdent
|
||||
else:
|
||||
symName.expectKind nnkIdent
|
||||
templateCall.add newCall(bindSym"expectNimNode", symName)
|
||||
for expr in extraCommentGenExpr:
|
||||
templateCall.add expr
|
||||
let getAstCall = newCall(bindSym"getAst", templateCall)
|
||||
result = newStmtList(templateDef, getAstCall)
|
||||
|
||||
|
||||
macro testAddrAst(arg: typed): bool =
|
||||
arg.expectKind nnkStmtListExpr
|
||||
arg[0].expectKind(nnkVarSection)
|
||||
@@ -49,34 +117,35 @@ static:
|
||||
echo "OK"
|
||||
|
||||
|
||||
testPattern nnkIntLit(intVal = 42) , 42
|
||||
testPattern nnkInt8Lit(intVal = 42) , 42'i8
|
||||
testPattern nnkInt16Lit(intVal = 42) , 42'i16
|
||||
testPattern nnkInt32Lit(intVal = 42) , 42'i32
|
||||
testPattern nnkInt64Lit(intVal = 42) , 42'i64
|
||||
testPattern nnkUInt8Lit(intVal = 42) , 42'u8
|
||||
testPattern nnkUInt16Lit(intVal = 42) , 42'u16
|
||||
testPattern nnkUInt32Lit(intVal = 42) , 42'u32
|
||||
testPattern nnkUInt64Lit(intVal = 42) , 42'u64
|
||||
#testPattern nnkFloat64Lit(floatVal = 42.0) , 42.0
|
||||
testPattern nnkFloat32Lit(floatVal = 42.0) , 42.0'f32
|
||||
#testPattern nnkFloat64Lit(floatVal = 42.0) , 42.0'f64
|
||||
testPattern nnkStrLit(strVal = "abc") , "abc"
|
||||
testPattern nnkRStrLit(strVal = "abc") , r"abc"
|
||||
testPattern nnkTripleStrLit(strVal = "abc") , """abc"""
|
||||
testPattern nnkCharLit(intVal = 32) , ' '
|
||||
testPattern nnkNilLit() , nil
|
||||
testPattern nnkIdent(strVal = "myIdentifier") , myIdentifier
|
||||
testPattern nnkIntLit(intVal = 42), 42
|
||||
testPattern nnkInt8Lit(intVal = 42), 42'i8
|
||||
testPattern nnkInt16Lit(intVal = 42), 42'i16
|
||||
testPattern nnkInt32Lit(intVal = 42), 42'i32
|
||||
testPattern nnkInt64Lit(intVal = 42), 42'i64
|
||||
testPattern nnkUInt8Lit(intVal = 42), 42'u8
|
||||
testPattern nnkUInt16Lit(intVal = 42), 42'u16
|
||||
testPattern nnkUInt32Lit(intVal = 42), 42'u32
|
||||
testPattern nnkUInt64Lit(intVal = 42), 42'u64
|
||||
#testPattern nnkFloat64Lit(floatVal = 42.0), 42.0
|
||||
testPattern nnkFloat32Lit(floatVal = 42.0), 42.0'f32
|
||||
#testPattern nnkFloat64Lit(floatVal = 42.0), 42.0'f64
|
||||
testPattern nnkStrLit(strVal = "abc"), "abc"
|
||||
testPattern nnkRStrLit(strVal = "abc"), r"abc"
|
||||
testPattern nnkTripleStrLit(strVal = "abc"), """abc"""
|
||||
testPattern nnkCharLit(intVal = 32), ' '
|
||||
testPattern nnkNilLit(), nil
|
||||
testPattern nnkIdent(strVal = "myIdentifier"), myIdentifier
|
||||
|
||||
testPatternFail nnkInt8Lit(intVal = 42), 42'i16
|
||||
testPatternFail nnkInt16Lit(intVal = 42), 42'i8
|
||||
|
||||
testPatternFail nnkInt8Lit(intVal = 42) , 42'i16
|
||||
testPatternFail nnkInt16Lit(intVal = 42) , 42'i8
|
||||
|
||||
|
||||
# this should be just `block` but it doesn't work that way anymore because of VM.
|
||||
macro scope(arg: untyped): untyped =
|
||||
let procSym = genSym(nskProc)
|
||||
result = quote do:
|
||||
proc `procSym`(): void {.compileTime.} =
|
||||
proc `procSym`() {.compileTime.} =
|
||||
`arg`
|
||||
|
||||
`procSym`()
|
||||
@@ -85,7 +154,7 @@ static:
|
||||
## Command call
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
echo "abc", "xyz"
|
||||
|
||||
ast.matchAst:
|
||||
@@ -95,7 +164,7 @@ static:
|
||||
## Call with ``()``
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
echo("abc", "xyz")
|
||||
|
||||
ast.matchAst:
|
||||
@@ -140,7 +209,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
? "xyz"
|
||||
|
||||
ast.matchAst(err):
|
||||
@@ -155,7 +224,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
proc identifier*
|
||||
|
||||
ast[0].matchAst(err):
|
||||
@@ -185,7 +254,7 @@ static:
|
||||
|
||||
## Call with raw string literal
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
echo"abc"
|
||||
|
||||
|
||||
@@ -230,7 +299,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
cast[T](x)
|
||||
|
||||
ast.matchAst:
|
||||
@@ -242,7 +311,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
x.y
|
||||
|
||||
ast.matchAst:
|
||||
@@ -264,7 +333,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
(1, 2, (3))
|
||||
|
||||
ast.matchAst:
|
||||
@@ -276,7 +345,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
{1, 2, 3}
|
||||
|
||||
ast.matchAst:
|
||||
@@ -285,7 +354,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
{a: 3, b: 5}
|
||||
|
||||
ast.matchAst:
|
||||
@@ -300,7 +369,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
[1, 2, 3]
|
||||
|
||||
ast.matchAst:
|
||||
@@ -312,7 +381,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
1..3
|
||||
|
||||
ast.matchAst:
|
||||
@@ -328,7 +397,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
if cond1: expr1 elif cond2: expr2 else: expr3
|
||||
|
||||
ast.matchAst:
|
||||
@@ -343,7 +412,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
## This is a comment
|
||||
## This is part of the first comment
|
||||
stmt1
|
||||
@@ -357,12 +426,12 @@ static:
|
||||
):
|
||||
echo "ok"
|
||||
else:
|
||||
echo "NOT OK!!!"
|
||||
echo "warning!"
|
||||
echo ast.treeRepr
|
||||
echo "TEST causes no fail, because of a regression in Nim."
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
{.emit: "#include <stdio.h>".}
|
||||
|
||||
ast.matchAst:
|
||||
@@ -375,7 +444,7 @@ static:
|
||||
echo "ok"
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
{.pragma: cdeclRename, cdecl.}
|
||||
|
||||
ast.matchAst:
|
||||
@@ -391,7 +460,7 @@ static:
|
||||
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
if cond1:
|
||||
stmt1
|
||||
elif cond2:
|
||||
@@ -413,7 +482,7 @@ static:
|
||||
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
x = 42
|
||||
|
||||
ast.matchAst:
|
||||
@@ -423,7 +492,7 @@ static:
|
||||
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
stmt1
|
||||
stmt2
|
||||
stmt3
|
||||
@@ -439,7 +508,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
case expr1
|
||||
of expr2, expr3..expr4:
|
||||
stmt1
|
||||
@@ -464,7 +533,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
while expr1:
|
||||
stmt1
|
||||
|
||||
@@ -477,7 +546,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
for ident1, ident2 in expr1:
|
||||
stmt1
|
||||
|
||||
@@ -490,7 +559,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
try:
|
||||
stmt1
|
||||
except e1, e2:
|
||||
@@ -517,7 +586,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
return expr1
|
||||
|
||||
ast.matchAst:
|
||||
@@ -528,7 +597,7 @@ static:
|
||||
## Continue statement
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
continue
|
||||
|
||||
ast.matchAst:
|
||||
@@ -539,7 +608,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
break otherLocation
|
||||
|
||||
ast.matchAst:
|
||||
@@ -550,10 +619,12 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
template blockStatement {.dirty.} =
|
||||
block name:
|
||||
discard
|
||||
|
||||
let ast = getAst(blockStatement())
|
||||
|
||||
ast.matchAst:
|
||||
of nnkBlockStmt(ident"name", nnkStmtList):
|
||||
echo "ok"
|
||||
@@ -562,7 +633,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
asm """some asm"""
|
||||
|
||||
ast.matchAst:
|
||||
@@ -576,7 +647,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
import math
|
||||
|
||||
ast.matchAst:
|
||||
@@ -585,7 +656,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
import math except pow
|
||||
|
||||
ast.matchAst:
|
||||
@@ -594,7 +665,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
import strutils as su
|
||||
|
||||
ast.matchAst:
|
||||
@@ -611,7 +682,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
from math import pow
|
||||
|
||||
ast.matchAst:
|
||||
@@ -622,7 +693,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
export unsigned
|
||||
|
||||
ast.matchAst:
|
||||
@@ -631,7 +702,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
export math except pow # we're going to implement our own exponentiation
|
||||
|
||||
ast.matchAst:
|
||||
@@ -642,7 +713,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
include blocks
|
||||
|
||||
ast.matchAst:
|
||||
@@ -653,7 +724,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
var a = 3
|
||||
|
||||
ast.matchAst:
|
||||
@@ -670,7 +741,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
let a = 3
|
||||
|
||||
ast.matchAst:
|
||||
@@ -687,7 +758,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
const a = 3
|
||||
|
||||
ast.matchAst:
|
||||
@@ -704,7 +775,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
type A = int
|
||||
|
||||
ast.matchAst:
|
||||
@@ -719,7 +790,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
type MyInt = distinct int
|
||||
|
||||
ast.peelOff({nnkTypeSection}).matchAst:
|
||||
@@ -735,7 +806,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
type A[T] = expr1
|
||||
|
||||
ast.matchAst:
|
||||
@@ -757,7 +828,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
type IO = object of RootObj
|
||||
|
||||
ast.peelOff(nnkTypeSection).matchAst:
|
||||
@@ -840,7 +911,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
type X = enum
|
||||
First
|
||||
|
||||
@@ -853,7 +924,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
type Con = concept x,y,z
|
||||
(x & y & z) is string
|
||||
|
||||
@@ -865,7 +936,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let astX = quote do:
|
||||
let astX = myquote:
|
||||
type
|
||||
A[T: static[int]] = object
|
||||
|
||||
@@ -880,7 +951,7 @@ static:
|
||||
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
type MyProc[T] = proc(x: T)
|
||||
|
||||
ast.peelOff({nnkStmtList, nnkTypeSection}).matchAst(err):
|
||||
@@ -952,7 +1023,7 @@ static:
|
||||
proc hello*[T: SomeInteger](x: int = 3, y: float32): int {.inline.} = discard
|
||||
|
||||
scope:
|
||||
var ast = quote do:
|
||||
var ast = myquote:
|
||||
proc foobar(a, b: int): void
|
||||
|
||||
ast = ast[3]
|
||||
@@ -971,7 +1042,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
proc hello(): var int
|
||||
|
||||
ast[3].matchAst: # subAst
|
||||
@@ -986,7 +1057,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
iterator nonsense[T](x: seq[T]): float {.closure.} =
|
||||
discard
|
||||
|
||||
@@ -998,7 +1069,7 @@ static:
|
||||
|
||||
scope:
|
||||
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
converter toBool(x: float): bool
|
||||
|
||||
ast.matchAst:
|
||||
@@ -1008,7 +1079,7 @@ static:
|
||||
## Template declaration
|
||||
|
||||
scope:
|
||||
let ast = quote do:
|
||||
let ast = myquote:
|
||||
template optOpt{expr1}(a: int): int
|
||||
|
||||
ast.matchAst:
|
||||
|
||||
@@ -12,4 +12,7 @@ proc f3(_: typedesc) = discard
|
||||
f3(typeof(nil))
|
||||
|
||||
proc f4[T](_: T) = discard
|
||||
f4(nil)
|
||||
f4(nil)
|
||||
|
||||
proc f5(): typeof(nil) = nil
|
||||
discard f5()
|
||||
|
||||
18
tests/destructor/t12037.nim
Normal file
18
tests/destructor/t12037.nim
Normal file
@@ -0,0 +1,18 @@
|
||||
discard """
|
||||
cmd: '''nim c --newruntime $file'''
|
||||
output: '''
|
||||
showing original type, length, and contents seq[int] 1 @[42]
|
||||
copy length and contents 1 @[42]
|
||||
'''
|
||||
"""
|
||||
|
||||
proc test() =
|
||||
var sq1 = @[42]
|
||||
echo "showing original type, length, and contents ", sq1.typeof, " ", sq1.len, " ", sq1
|
||||
doAssert cast[int](sq1[0].unsafeAddr) != 0
|
||||
var sq2 = sq1 # copy of original
|
||||
echo "copy length and contents ", sq2.len, " ", sq2
|
||||
doAssert cast[int](sq2[0].unsafeAddr) != 0
|
||||
doAssert cast[int](sq1[0].unsafeAddr) != 0
|
||||
|
||||
test()
|
||||
14
tests/destructor/tdestructor_too_late.nim
Normal file
14
tests/destructor/tdestructor_too_late.nim
Normal file
@@ -0,0 +1,14 @@
|
||||
discard """
|
||||
errmsg: "cannot bind another '=destroy' to: Obj; previous declaration was constructed here implicitly: tdestructor_too_late.nim(7, 16)"
|
||||
"""
|
||||
type Obj* = object
|
||||
v*: int
|
||||
|
||||
proc something(this: sink Obj) =
|
||||
discard
|
||||
|
||||
proc `=destroy`(this: var Obj) =
|
||||
echo "igotdestroyed"
|
||||
this.v = -1
|
||||
|
||||
var test* = Obj(v: 42)
|
||||
@@ -175,4 +175,19 @@ proc myfuncLoop(x: int): MySeqNonCopyable =
|
||||
var cc = newMySeq(i, 5.0)
|
||||
result = cc
|
||||
|
||||
discard myfuncLoop(3)
|
||||
discard myfuncLoop(3)
|
||||
|
||||
#------------------------------------------------------------
|
||||
# Move into table via openarray
|
||||
#------------------------------------------------------------
|
||||
|
||||
type
|
||||
TableNonCopyable = object
|
||||
x: seq[(string, MySeqNonCopyable)]
|
||||
|
||||
proc toTable(pairs: sink openArray[(string, MySeqNonCopyable)]): TableNonCopyable =
|
||||
discard
|
||||
|
||||
|
||||
let mytable = {"a": newMySeq(2, 5.0)}.toTable
|
||||
|
||||
|
||||
@@ -75,5 +75,13 @@ proc selfAssign =
|
||||
|
||||
selfAssign()
|
||||
|
||||
# bug #11833
|
||||
type FooAt = object
|
||||
|
||||
proc testWrongAt() =
|
||||
var x = @[@[FooAt()]]
|
||||
|
||||
testWrongAt()
|
||||
|
||||
let (a, d) = allocCounters()
|
||||
discard cprintf("%ld new: %ld\n", a - unpairedEnvAllocs() - d, allocs)
|
||||
|
||||
@@ -2,7 +2,6 @@ discard """
|
||||
cmd: '''nim c --newruntime $file'''
|
||||
output: '''@[1]
|
||||
@[116, 101, 115, 116]
|
||||
test
|
||||
@[1953719668, 875770417]'''
|
||||
"""
|
||||
|
||||
@@ -13,7 +12,7 @@ echo cast[seq[uint8]](@[1])
|
||||
echo cast[seq[uint8]]("test")
|
||||
|
||||
discard cast[string](@[116'u8, 101, 115, 116])
|
||||
echo cast[string](@[116'u8, 101, 115, 116])
|
||||
#echo cast[string](@[116'u8, 101, 115, 116, 0])
|
||||
var a = cast[seq[uint32]]("test1234")
|
||||
a.setLen(2)
|
||||
echo a
|
||||
|
||||
@@ -13,3 +13,18 @@ echo a.str
|
||||
|
||||
beforeCodeReload:
|
||||
echo " 2: before!"
|
||||
|
||||
# testing a construct of 2 functions in the same module which reference each other
|
||||
# https://github.com/nim-lang/Nim/issues/11608
|
||||
proc rec_1(depth: int)
|
||||
proc rec_2(depth: int) =
|
||||
rec_1(depth + 1)
|
||||
proc rec_1(depth: int) =
|
||||
if depth < 3:
|
||||
rec_2(depth)
|
||||
else:
|
||||
echo("max mutual recursion reached!")
|
||||
|
||||
# https://github.com/nim-lang/Nim/issues/11996
|
||||
let rec_2_func_ref = rec_2
|
||||
rec_2_func_ref(0)
|
||||
|
||||
@@ -29,6 +29,7 @@ main: hasAnyModuleChanged? true
|
||||
0: before - improved!
|
||||
main: before
|
||||
2: random string
|
||||
max mutual recursion reached!
|
||||
1
|
||||
bar
|
||||
0: after - closure iterator: 0
|
||||
|
||||
17
tests/errmsgs/t5870.nim
Normal file
17
tests/errmsgs/t5870.nim
Normal file
@@ -0,0 +1,17 @@
|
||||
discard """
|
||||
errormsg: "invalid type for const: seq[SomeRefObj]"
|
||||
line: 14
|
||||
"""
|
||||
|
||||
# bug #5870
|
||||
type SomeRefObj = ref object of RootObj
|
||||
someIntMember: int
|
||||
|
||||
proc createSomeRefObj(v: int): SomeRefObj=
|
||||
result.new()
|
||||
result.someIntMember = v
|
||||
|
||||
const compileTimeSeqOfRefObjs = @[createSomeRefObj(100500), createSomeRefObj(2)]
|
||||
|
||||
for i in 0..1:
|
||||
echo compileTimeSeqOfRefObjs[i].someIntMember
|
||||
@@ -36,7 +36,7 @@ tsigmatch.nim(143, 13) Error: type mismatch: got <array[0..0, proc (x: int){.gcs
|
||||
but expected one of:
|
||||
proc takesFuncs(fs: openArray[proc (x: int) {.gcsafe, locks: 0.}])
|
||||
first type mismatch at position: 1
|
||||
required type for fs: openarray[proc (x: int){.closure, gcsafe, locks: 0.}]
|
||||
required type for fs: openArray[proc (x: int){.closure, gcsafe, locks: 0.}]
|
||||
but expression '[proc (x: int) {.gcsafe, locks: 0.} = echo [x]]' is of type: array[0..0, proc (x: int){.gcsafe, locks: 0.}]
|
||||
|
||||
expression: takesFuncs([proc (x: int) {.gcsafe, locks: 0.} = echo [x]])
|
||||
|
||||
@@ -6,11 +6,11 @@ twrong_at_operator.nim(22, 30) Error: type mismatch: got <array[0..0, type int]>
|
||||
but expected one of:
|
||||
proc `@`[T](a: openArray[T]): seq[T]
|
||||
first type mismatch at position: 1
|
||||
required type for a: openarray[T]
|
||||
required type for a: openArray[T]
|
||||
but expression '[int]' is of type: array[0..0, type int]
|
||||
proc `@`[IDX, T](a: array[IDX, T]): seq[T]
|
||||
proc `@`[IDX, T](a: sink array[IDX, T]): seq[T]
|
||||
first type mismatch at position: 1
|
||||
required type for a: array[IDX, T]
|
||||
required type for a: sink array[IDX, T]
|
||||
but expression '[int]' is of type: array[0..0, type int]
|
||||
|
||||
expression: @[int]
|
||||
|
||||
@@ -27,13 +27,11 @@ method eval(e: ref TPlusExpr): int =
|
||||
|
||||
proc newLit(x: int): ref TLiteral =
|
||||
new(result)
|
||||
{.watchpoint: result.}
|
||||
result.x = x
|
||||
result.op1 = $getOccupiedMem()
|
||||
|
||||
proc newPlus(a, b: ref TExpr): ref TPlusExpr =
|
||||
new(result)
|
||||
{.watchpoint: result.}
|
||||
result.a = a
|
||||
result.b = b
|
||||
result.op2 = $getOccupiedMem()
|
||||
|
||||
@@ -110,7 +110,7 @@ proc setNestingLevel(self: ref SimpleLoop, level: int) =
|
||||
self.nestingLevel = level
|
||||
if level == 0: self.isRoot = true
|
||||
|
||||
var loop_counter: int = 0
|
||||
var loopCounter: int = 0
|
||||
|
||||
type
|
||||
Lsg = object
|
||||
@@ -119,8 +119,8 @@ type
|
||||
|
||||
proc createNewLoop(self: var Lsg): ref SimpleLoop =
|
||||
result = newSimpleLoop()
|
||||
loop_counter += 1
|
||||
result.counter = loop_counter
|
||||
loopCounter += 1
|
||||
result.counter = loopCounter
|
||||
|
||||
proc addLoop(self: var Lsg, l: ref SimpleLoop) =
|
||||
self.loops.add l
|
||||
@@ -170,13 +170,13 @@ proc union(self: ref UnionFindNode, unionFindNode: ref UnionFindNode) =
|
||||
|
||||
|
||||
const
|
||||
BB_TOP = 0 # uninitialized
|
||||
BB_NONHEADER = 1 # a regular BB
|
||||
BB_REDUCIBLE = 2 # reducible loop
|
||||
BB_SELF = 3 # single BB loop
|
||||
BB_IRREDUCIBLE = 4 # irreducible loop
|
||||
BB_DEAD = 5 # a dead BB
|
||||
BB_LAST = 6 # Sentinel
|
||||
BB_TOP = 0 # uninitialized
|
||||
BB_NONHEADER = 1 # a regular BB
|
||||
BB_REDUCIBLE = 2 # reducible loop
|
||||
BB_SELF = 3 # single BB loop
|
||||
BB_IRREDUCIBLE = 4 # irreducible loop
|
||||
BB_DEAD = 5 # a dead BB
|
||||
BB_LAST = 6 # Sentinel
|
||||
|
||||
# # Marker for uninitialized nodes.
|
||||
UNVISITED = -1
|
||||
@@ -196,7 +196,9 @@ proc newHavlakLoopFinder(cfg: Cfg, lsg: Lsg): HavlakLoopFinder =
|
||||
proc isAncestor(w: int, v: int, last: seq[int]): bool =
|
||||
w <= v and v <= last[w]
|
||||
|
||||
proc dfs(currentNode: ref BasicBlock, nodes: var seq[ref UnionFindNode], number: var Table[ref BasicBlock, int], last: var seq[int], current: int): int =
|
||||
proc dfs(currentNode: ref BasicBlock, nodes: var seq[ref UnionFindNode],
|
||||
number: var Table[ref BasicBlock, int],
|
||||
last: var seq[int], current: int): int =
|
||||
var stack = @[(currentNode, current)]
|
||||
while stack.len > 0:
|
||||
let (currentNode, current) = stack.pop()
|
||||
@@ -215,13 +217,13 @@ proc findLoops(self: var HavlakLoopFinder): int =
|
||||
if startNode == nil: return 0
|
||||
var size = self.cfg.getNumNodes
|
||||
|
||||
var nonBackPreds = newSeq[HashSet[int]]()
|
||||
var backPreds = newSeq[seq[int]]()
|
||||
var number = initTable[ref BasicBlock, int]()
|
||||
var header = newSeq[int](size)
|
||||
var types = newSeq[int](size)
|
||||
var last = newSeq[int](size)
|
||||
var nodes = newSeq[ref UnionFindNode]()
|
||||
var nonBackPreds = newSeq[HashSet[int]]()
|
||||
var backPreds = newSeq[seq[int]]()
|
||||
var number = initTable[ref BasicBlock, int]()
|
||||
var header = newSeq[int](size)
|
||||
var types = newSeq[int](size)
|
||||
var last = newSeq[int](size)
|
||||
var nodes = newSeq[ref UnionFindNode]()
|
||||
|
||||
for i in 1..size:
|
||||
nonBackPreds.add initSet[int](1)
|
||||
|
||||
@@ -27,3 +27,62 @@ for a in items(arr):
|
||||
|
||||
echo ""
|
||||
|
||||
#--------------------------------------------------------------------
|
||||
# Lent iterators
|
||||
#--------------------------------------------------------------------
|
||||
type
|
||||
NonCopyable = object
|
||||
x: int
|
||||
|
||||
|
||||
proc `=destroy`(o: var NonCopyable) =
|
||||
discard
|
||||
|
||||
proc `=copy`(dst: var NonCopyable, src: NonCopyable) {.error.}
|
||||
|
||||
proc `=sink`(dst: var NonCopyable, src: NonCopyable) =
|
||||
dst.x = src.x
|
||||
|
||||
iterator lentItems[T](a: openarray[T]): lent T =
|
||||
for i in 0..a.high:
|
||||
yield a[i]
|
||||
|
||||
iterator lentPairs[T](a: array[0..1, T]): tuple[key: int, val: lent T] =
|
||||
for i in 0..a.high:
|
||||
yield (i, a[i])
|
||||
|
||||
|
||||
let arr1 = [1, 2, 3]
|
||||
let arr2 = @["a", "b", "c"]
|
||||
let arr3 = [NonCopyable(x: 1), NonCopyable(x: 2)]
|
||||
let arr4 = @[(1, "a"), (2, "b"), (3, "c")]
|
||||
|
||||
var accum: string
|
||||
for x in lentItems(arr1):
|
||||
accum &= $x
|
||||
doAssert(accum == "123")
|
||||
|
||||
accum = ""
|
||||
for x in lentItems(arr2):
|
||||
accum &= $x
|
||||
doAssert(accum == "abc")
|
||||
|
||||
accum = ""
|
||||
for val in lentItems(arr3):
|
||||
accum &= $val.x
|
||||
doAssert(accum == "12")
|
||||
|
||||
accum = ""
|
||||
for i, val in lentPairs(arr3):
|
||||
accum &= $i & "-" & $val.x & " "
|
||||
doAssert(accum == "0-1 1-2 ")
|
||||
|
||||
accum = ""
|
||||
for i, val in lentItems(arr4):
|
||||
accum &= $i & "-" & $val & " "
|
||||
doAssert(accum == "1-a 2-b 3-c ")
|
||||
|
||||
accum = ""
|
||||
for (i, val) in lentItems(arr4):
|
||||
accum &= $i & "-" & $val & " "
|
||||
doAssert(accum == "1-a 2-b 3-c ")
|
||||
|
||||
@@ -48,3 +48,27 @@ doAssert hasDefault2(int) == "int"
|
||||
doAssert hasDefault2(string) == "string"
|
||||
doAssert hasDefault2() == "string"
|
||||
|
||||
|
||||
# bug #9195
|
||||
type
|
||||
Error = enum
|
||||
erA, erB, erC
|
||||
Result[T, U] = object
|
||||
x: T
|
||||
u: U
|
||||
PB = object
|
||||
|
||||
proc decodeUVarint*(itzzz: typedesc[SomeUnsignedInt],
|
||||
data: openArray[char]): Result[itzzz, Error] =
|
||||
result = Result[itzzz, Error](x: 0, u: erC)
|
||||
|
||||
discard decodeUVarint(uint32, "abc")
|
||||
|
||||
type
|
||||
X = object
|
||||
Y[T] = object
|
||||
|
||||
proc testObj(typ: typedesc[object]): Y[typ] =
|
||||
discard
|
||||
|
||||
discard testObj(X)
|
||||
|
||||
@@ -14,3 +14,32 @@ block: # isNamedTuple
|
||||
doAssert not Foo3.isNamedTuple
|
||||
doAssert not Foo4.isNamedTuple
|
||||
doAssert not (1,).type.isNamedTuple
|
||||
|
||||
proc typeToString*(t: typedesc, prefer = "preferTypeName"): string {.magic: "TypeTrait".}
|
||||
## Returns the name of the given type, with more flexibility than `name`,
|
||||
## and avoiding the potential clash with a variable named `name`.
|
||||
## prefer = "preferResolved" will resolve type aliases recursively.
|
||||
# Move to typetraits.nim once api stabilized.
|
||||
|
||||
block: # typeToString
|
||||
type MyInt = int
|
||||
type
|
||||
C[T0, T1] = object
|
||||
type C2=C # alias => will resolve as C
|
||||
type C2b=C # alias => will resolve as C (recursively)
|
||||
type C3[U,V] = C[V,U]
|
||||
type C4[X] = C[X,X]
|
||||
template name2(T): string = typeToString(T, "preferResolved")
|
||||
doAssert MyInt.name2 == "int"
|
||||
doAssert C3[MyInt, C2b].name2 == "C3[int, C]"
|
||||
# C3 doesn't get resolved to C, not an alias (nor does C4)
|
||||
doAssert C2b[MyInt, C4[cstring]].name2 == "C[int, C4[cstring]]"
|
||||
doAssert C4[MyInt].name2 == "C4[int]"
|
||||
when BiggestFloat is float and cint is int:
|
||||
doAssert C2b[cint, BiggestFloat].name2 == "C3[int, C3[float, int32]]"
|
||||
|
||||
template name3(T): string = typeToString(T, "preferMixed")
|
||||
doAssert MyInt.name3 == "MyInt{int}"
|
||||
doAssert (tuple[a: MyInt, b: float]).name3 == "tuple[a: MyInt{int}, b: float]"
|
||||
doAssert (tuple[a: C2b[MyInt, C4[cstring]], b: cint, c: float]).name3 ==
|
||||
"tuple[a: C2b{C}[MyInt{int}, C4[cstring]], b: cint{int32}, c: float]"
|
||||
|
||||
@@ -48,7 +48,6 @@ echo(["a", "b", "c", "d"].len)
|
||||
for x in items(["What's", "your", "name", "?", ]):
|
||||
echo(x)
|
||||
var `name` = readLine(stdin)
|
||||
{.breakpoint.}
|
||||
echo("Hi " & thallo.name & "!\n")
|
||||
debug(name)
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
discard """
|
||||
output: '''(x: "string here", a: 1)'''
|
||||
output: '''(x: "string here", a: 1)
|
||||
b is 5
|
||||
x is 12'''
|
||||
"""
|
||||
|
||||
proc simple[T](a: T) =
|
||||
@@ -28,3 +30,35 @@ proc test(baz: int, qux: var int): int =
|
||||
|
||||
var x1 = 456
|
||||
discard test(123, x1)
|
||||
|
||||
# bug #11958
|
||||
proc foo() =
|
||||
var a = 5
|
||||
proc bar() {.nimcall.} =
|
||||
var b = 5
|
||||
for k, v in fieldpairs(locals()):
|
||||
echo k, " is ", v
|
||||
|
||||
bar()
|
||||
foo()
|
||||
|
||||
|
||||
proc foo2() =
|
||||
var a = 5
|
||||
proc bar2() {.nimcall.} =
|
||||
for k, v in fieldpairs(locals()):
|
||||
echo k, " is ", v
|
||||
|
||||
bar2()
|
||||
foo2()
|
||||
|
||||
|
||||
proc foo3[T](y: T) =
|
||||
var a = 5
|
||||
proc bar2[T](x: T) {.nimcall.} =
|
||||
for k, v in fieldpairs(locals()):
|
||||
echo k, " is ", v
|
||||
|
||||
bar2(y)
|
||||
|
||||
foo3(12)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
discard """
|
||||
targets: "c cpp"
|
||||
output: '''
|
||||
body executed
|
||||
body executed
|
||||
@@ -7,6 +8,14 @@ macros api OK
|
||||
'''
|
||||
"""
|
||||
|
||||
# This is for travis. The keyword ``alignof`` only exists in ``c++11``
|
||||
# and newer. On travis gcc does not default to c++11 yet.
|
||||
when defined(cpp) and not defined(windows):
|
||||
{.passC: "-std=c++11".}
|
||||
|
||||
# Object offsets are different for inheritance objects when compiling
|
||||
# to c++.
|
||||
|
||||
type
|
||||
TMyEnum = enum
|
||||
tmOne, tmTwo, tmThree, tmFour
|
||||
@@ -143,6 +152,14 @@ type
|
||||
ValueA
|
||||
ValueB
|
||||
|
||||
# Must have more than 32 elements so that set[MyEnum33] will become compile to an int64.
|
||||
MyEnum33 {.pure.} = enum
|
||||
Value1, Value2, Value3, Value4, Value5, Value6,
|
||||
Value7, Value8, Value9, Value10, Value11, Value12,
|
||||
Value13, Value14, Value15, Value16, Value17, Value18,
|
||||
Value19, Value20, Value21, Value22, Value23, Value24,
|
||||
Value25, Value26, Value27, Value28, Value29, Value30,
|
||||
Value31, Value32, Value33
|
||||
|
||||
proc transformObjectconfigPacked(arg: NimNode): NimNode =
|
||||
let debug = arg.kind == nnkPragmaExpr
|
||||
@@ -296,6 +313,10 @@ testinstance:
|
||||
b: int8
|
||||
c: int8
|
||||
|
||||
PaddingOfSetEnum33 = object
|
||||
cause: int8
|
||||
theSet: set[MyEnum33]
|
||||
|
||||
Bazing {.objectconfig.} = object of RootObj
|
||||
a: int64
|
||||
# TODO test on 32 bit system
|
||||
@@ -328,6 +349,7 @@ testinstance:
|
||||
var g : RecursiveStuff
|
||||
var ro : RootObj
|
||||
var go : GenericObject[int64]
|
||||
var po : PaddingOfSetEnum33
|
||||
|
||||
var
|
||||
e1: Enum1
|
||||
@@ -346,16 +368,16 @@ testinstance:
|
||||
else:
|
||||
doAssert sizeof(SimpleAlignment) > 10
|
||||
|
||||
testSizeAlignOf(t,a,b,c,d,e,f,g,ro,go, e1, e2, e4, e8, eoa, eob)
|
||||
testSizeAlignOf(t,a,b,c,d,e,f,g,ro,go,po, e1, e2, e4, e8, eoa, eob)
|
||||
|
||||
when not defined(cpp):
|
||||
type
|
||||
WithBitsize {.objectconfig.} = object
|
||||
bitfieldA {.bitsize: 16.}: uint32
|
||||
bitfieldB {.bitsize: 16.}: uint32
|
||||
|
||||
var wbs: WithBitsize
|
||||
testSize(wbs)
|
||||
type
|
||||
WithBitsize {.objectconfig.} = object
|
||||
bitfieldA {.bitsize: 16.}: uint32
|
||||
bitfieldB {.bitsize: 16.}: uint32
|
||||
|
||||
var wbs: WithBitsize
|
||||
testSize(wbs)
|
||||
|
||||
testOffsetOf(TrivialType, x)
|
||||
testOffsetOf(TrivialType, y)
|
||||
@@ -383,11 +405,13 @@ testinstance:
|
||||
|
||||
testOffsetOf(Foobar, c)
|
||||
|
||||
when not defined(cpp):
|
||||
testOffsetOf(Bazing, a)
|
||||
testOffsetOf(InheritanceA, a)
|
||||
testOffsetOf(InheritanceB, b)
|
||||
testOffsetOf(InheritanceC, c)
|
||||
testOffsetOf(PaddingOfSetEnum33, cause)
|
||||
testOffsetOf(PaddingOfSetEnum33, theSet)
|
||||
|
||||
testOffsetOf(Bazing, a)
|
||||
testOffsetOf(InheritanceA, a)
|
||||
testOffsetOf(InheritanceB, b)
|
||||
testOffsetOf(InheritanceC, c)
|
||||
|
||||
testOffsetOf(EnumObjectA, a)
|
||||
testOffsetOf(EnumObjectA, b)
|
||||
@@ -619,9 +643,6 @@ doAssert offsetof(MyPackedCaseObject, val3) == 13
|
||||
doAssert offsetof(MyPackedCaseObject, val4) == 9
|
||||
doAssert offsetof(MyPackedCaseObject, val5) == 13
|
||||
|
||||
reject:
|
||||
const off4 = offsetof(MyPackedCaseObject, val1)
|
||||
|
||||
reject:
|
||||
const off5 = offsetof(MyPackedCaseObject, val2)
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ block tobject2:
|
||||
z: int # added a field
|
||||
|
||||
proc getPoint( p: var TPoint2d) =
|
||||
{.breakpoint.}
|
||||
writeLine(stdout, p.x)
|
||||
|
||||
var p: TPoint3d
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
discard """
|
||||
errormsg: "invalid type: 'openarray[int]' for result"
|
||||
errormsg: "invalid type: 'openArray[int]' for result"
|
||||
line: 6
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
discard """
|
||||
output: '''[1, 2, 3, 4]
|
||||
3
|
||||
['1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C']
|
||||
OK
|
||||
'''
|
||||
"""
|
||||
@@ -66,4 +67,18 @@ var
|
||||
doAssert y1 == ([1, 2], 3)
|
||||
doAssert y2 == [1, 2, 3, 4]
|
||||
|
||||
template newOpenArray(x: var string, size: int): openArray[char] =
|
||||
var z = 1
|
||||
toOpenArray(x, z, size)
|
||||
|
||||
template doSomethingAndCreate(x: var string): openArray[char] =
|
||||
let size = 12
|
||||
newOpenArray(x, size)
|
||||
|
||||
proc sinkk(x: openArray[char]) =
|
||||
echo x
|
||||
|
||||
var xArrayDeref = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
sinkk doSomethingAndCreate(xArrayDeref)
|
||||
|
||||
echo "OK"
|
||||
|
||||
@@ -9,9 +9,9 @@ proc term(k: float): float = 4 * math.pow(-1, k) / (2*k + 1)
|
||||
|
||||
proc piU(n: int): float =
|
||||
var ch = newSeq[FlowVar[float]](n+1)
|
||||
for k in 0..n:
|
||||
for k in 0..ch.high:
|
||||
ch[k] = spawn term(float(k))
|
||||
for k in 0..n:
|
||||
for k in 0..ch.high:
|
||||
result += ^ch[k]
|
||||
|
||||
proc piS(n: int): float =
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
discard """
|
||||
output: '''ob @[]
|
||||
ob3 @[]
|
||||
ob2 @[]
|
||||
3
|
||||
output: '''ob2 @[]
|
||||
ob @[]
|
||||
ob3 @[]
|
||||
ob2 @[]'''
|
||||
3
|
||||
ob2 @[]
|
||||
ob @[]
|
||||
ob3 @[]
|
||||
'''
|
||||
cmd: "nim c -r --threads:on $file"
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
discard """
|
||||
cmd: "nim check $file"
|
||||
errmsg: ""
|
||||
nimout: '''tillegalreturntype.nim(8, 11) Error: return type 'typed' is only valid for macros and templates
|
||||
tillegalreturntype.nim(11, 11) Error: return type 'untyped' is only valid for macros and templates'''
|
||||
nimout: '''
|
||||
tillegalreturntype.nim(11, 11) Error: return type 'typed' is only valid for macros and templates
|
||||
tillegalreturntype.nim(14, 11) Error: return type 'untyped' is only valid for macros and templates
|
||||
tillegalreturntype.nim(17, 41) Error: return type 'auto' cannot be used in forward declarations
|
||||
'''
|
||||
"""
|
||||
|
||||
proc x(): typed =
|
||||
@@ -10,3 +13,9 @@ proc x(): typed =
|
||||
|
||||
proc y(): untyped =
|
||||
discard
|
||||
|
||||
proc test_proc[T, U](arg1: T, arg2: U): auto
|
||||
|
||||
proc test_proc[T, U](arg1: T, arg2: U): auto =
|
||||
echo "Proc has been called"
|
||||
return arg1 / arg2
|
||||
|
||||
@@ -118,3 +118,21 @@ block:
|
||||
x3 = R32(4)
|
||||
|
||||
doAssert $x1 & $x2 & $x3 == "444"
|
||||
|
||||
block:
|
||||
var x1: range[0'f..1'f] = 1
|
||||
const x2: range[0'f..1'f] = 1
|
||||
var x3: range[0'u8..1'u8] = 1
|
||||
const x4: range[0'u8..1'u8] = 1
|
||||
|
||||
var x5: range[0'f32..1'f32] = 1'f64
|
||||
const x6: range[0'f32..1'f32] = 1'f64
|
||||
|
||||
reject:
|
||||
var x09: range[0'i8..1'i8] = 1.int
|
||||
reject:
|
||||
var x10: range[0'i64..1'i64] = 1'u64
|
||||
|
||||
const x11: range[0'f..1'f] = 2'f
|
||||
reject:
|
||||
const x12: range[0'f..1'f] = 2
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user