Merge pull request #1363 from Araq/devel

Merge devel into master
This commit is contained in:
Andreas Rumpf
2014-07-15 01:42:19 +02:00
199 changed files with 8268 additions and 4793 deletions

564
.gitignore vendored
View File

@@ -1,36 +1,27 @@
*
!**/
!*.*
nimcache/
*.o
!/icons/*.o
*.exe
*.so
*.dylib
*.zip
*.iss
nimcache
lib/nimcache
tools/nimcache
tools/nimweb
tests/nimcache
tests/accept/run/nimcache
tests/accept/compile/nimcache
tests/reject/nimcache
compiler/nimcache
compiler/c2nim/nimcache
compiler/pas2nim/nimcache
misc
mapping.txt
tags
install.sh
deinstall.sh
doc/*.html
doc/*.pdf
doc/*.idx
/web/upload
koch
compiler/nimrod*
build/[0-9]_[0-9]
bin/nimrod
examples/cross_calculator/nimrod_commandline/nimcalculator
examples/cross_todo/nimrod_backend/*.html
examples/cross_todo/nimrod_backend/backend
examples/cross_todo/nimrod_backend/testbackend
examples/cross_todo/nimrod_backend/todo.sqlite3
examples/cross_todo/nimrod_commandline/nimtodo
install.sh
deinstall.sh
build/*
bin/*
# iOS specific wildcards.
*.mode1v3
@@ -42,534 +33,11 @@ deinstall.sh
project.xcworkspace/
xcuserdata/
# iOS specific absolute paths
examples/cross_calculator/ios/resources/ui/*.m
examples/cross_calculator/ios/tags
# Android specific absolute paths.
examples/cross_calculator/android/bin/
examples/cross_calculator/android/gen/
examples/cross_calculator/android/jni/backend-jni.h
examples/cross_calculator/android/libs/
examples/cross_calculator/android/local.properties
examples/cross_calculator/android/obj/
examples/cross_calculator/android/tags
# Generated files.
/compile.json
/compiler/c2nim/c2nim
/compiler/nimrod.dot
/compiler/pas2nim/pas2nim
/compiler/service
/examples/allany
/examples/cairoex
/examples/cgiex
/examples/cgi/cgi_stacktrace
/examples/cgi/example
/examples/curlex
/examples/debugging
/examples/docstrings
/examples/filterex
/examples/fizzbuzz
/examples/gtk/ex1
/examples/gtk/ex2
/examples/gtk/ex3
/examples/gtk/ex4
/examples/gtk/ex5
/examples/gtk/ex6
/examples/gtk/ex7
/examples/gtk/ex8
/examples/gtk/ex9
/examples/hallo
/examples/htmlrefs
/examples/htmltitle
/examples/httpserver2
/examples/iupex1
/examples/keyval
/examples/keyval2
/examples/luaex
/examples/maximum
/examples/parsecfgex
/examples/pythonex
/examples/sdlex
/examples/statcsv
/examples/talk/dsl
/examples/talk/formatoptimizer
/examples/talk/hoisting
/examples/talk/lazyeval
/examples/talk/quasiquote
/examples/talk/tags
/examples/tclex
/examples/transff
/examples/tunit
/examples/wingui
/examples/x11ex
/lib/libnimrtl.dylib
/lib/libserver.dylib
/lib/packages/docutils/highlite
/lib/pure/actors
/lib/pure/algorithm
/lib/pure/asyncio
/lib/pure/base64
/lib/pure/basic2d
/lib/pure/basic3d
/lib/pure/browsers
/lib/pure/cgi
/lib/pure/collections/sequtils
/lib/pure/collections/tables
/lib/pure/colors
/lib/pure/complex
/lib/pure/cookies
/lib/pure/dynlib
/lib/pure/encodings
/lib/pure/endians
/lib/pure/events
/lib/pure/fsmonitor
/lib/pure/ftpclient
/lib/pure/gentabs
/lib/pure/hashes
/lib/pure/htmlgen
/lib/pure/htmlparser
/lib/pure/httpclient
/lib/pure/httpserver
/lib/pure/irc
/lib/pure/json
/lib/pure/lexbase
/lib/pure/lib/
/lib/pure/marshal
/lib/pure/matchers
/lib/pure/math
/lib/pure/md5
/lib/pure/memfiles
/lib/pure/mersenne
/lib/pure/mimetypes
/lib/pure/nimprof
/lib/pure/numeric
/lib/pure/oids
/lib/pure/os
/lib/pure/osproc
/lib/pure/parsecfg
/lib/pure/parsecsv
/lib/pure/parseopt
/lib/pure/parseopt2
/lib/pure/parsesql
/lib/pure/parseurl
/lib/pure/parseutils
/lib/pure/parsexml
/lib/pure/pegs
/lib/pure/poly
/lib/pure/redis
/lib/pure/romans
/lib/pure/ropes
/lib/pure/scgi
/lib/pure/smtp
/lib/pure/sockets
/lib/pure/streams
/lib/pure/strtabs
/lib/pure/strutils
/lib/pure/subexes
/lib/pure/terminal
/lib/pure/times
/lib/pure/typetraits
/lib/pure/unicode
/lib/pure/unittest
/lib/pure/uri
/lib/pure/xmldom
/lib/pure/xmldomparser
/lib/pure/xmlparser
/lib/pure/xmltree
/lib/system/sysio
/lib/weird
/lib/wrappers/gtk/gtk2
/reject.json
/run.json
/testresults.html
/testresults.json
/tests/caas/absurd_nesting
/tests/caas/completion_dot_syntax_main
/tests/caasdriver
/tests/caas/forward_declarations
/tests/caas/idetools_api
/tests/caas/imported
/tests/caas/issue_416_template_shift
/tests/caas/issue_452_export_shift
/tests/caas/issue_477_dynamic_dispatch
/tests/caas/its_full_of_procs
/tests/caas/main
/tests/caas/SymbolProcRun.*/
/tests/ccg/tmissingbracket
/tests/compile/talias
/tests/compile/tambsym2
/tests/compile/tarrindx
/tests/compile/tassign
/tests/compile/tbindoverload
/tests/compile/tcan_alias_generic
/tests/compile/tcan_alias_specialised_generic
/tests/compile/tcan_inherit_generic
/tests/compile/tcan_specialise_generic
/tests/compile/tccgen1
/tests/compile/tclosure4
/tests/compile/tclosurebug2
/tests/compile/tcmdline
/tests/compile/tcodegenbug1
/tests/compile/tcolonisproc
/tests/compile/tcolors
/tests/compile/tcommontype
/tests/compile/tcompiles
/tests/compile/tcomputedgoto
/tests/compile/tconsteval
/tests/compile/tconstraints
/tests/compile/tconvcolors
/tests/compile/tcputime
/tests/compile/tdefaultprocparam
/tests/compile/tdictdestruct
/tests/compile/tdiscardable
/tests/compile/tdllvar
/tests/compile/tdumpast
/tests/compile/tdumpast2
/tests/compile/techo
/tests/compile/teffects1
/tests/compile/temptyecho
/tests/compile/tendian
/tests/compile/tenum
/tests/compile/tenum2
/tests/compile/tenum3
/tests/compile/teval1
/tests/compile/texport
/tests/compile/tfib
/tests/compile/tforwardgeneric
/tests/compile/tforwty
/tests/compile/tforwty2
/tests/compile/tgeneric
/tests/compile/tgeneric2
/tests/compile/tgeneric3
/tests/compile/tgeneric4
/tests/compile/tgenericmatcher
/tests/compile/tgenericmatcher2
/tests/compile/tgenericprocvar
/tests/compile/tgenericprop
/tests/compile/tgenericrefs
/tests/compile/tgenerictmpl
/tests/compile/tgenericvariant
/tests/compile/tgensymgeneric
/tests/compile/tgetstartmilsecs
/tests/compile/tglobalforvar
/tests/compile/thallo
/tests/compile/theaproots
/tests/compile/thexrange
/tests/compile/thygienictempl
/tests/compile/tident
/tests/compile/timplicititems
/tests/compile/timplictderef
/tests/compile/tinheritref
/tests/compile/tio
/tests/compile/tircbot
/tests/compile/titer
/tests/compile/titer2
/tests/compile/titer_no_tuple_unpack
/tests/compile/titerovl
/tests/compile/tlastmod
/tests/compile/tlinearscanend
/tests/compile/tloops
/tests/compile/tmacro1
/tests/compile/tmacro2
/tests/compile/tmacroaspragma
/tests/compile/tmacrostmt
/tests/compile/tmandelbrot
/tests/compile/tmarshal
/tests/compile/tmath
/tests/compile/tmatrix1
/tests/compile/tmatrix2
/tests/compile/tmongo
/tests/compile/tnamedparamanonproc
/tests/compile/tnamedparams
/tests/compile/tnestedproc
/tests/compile/tnew
/tests/compile/tnewsets
/tests/compile/tnewuns
/tests/compile/tnoargopenarray
/tests/compile/tnoforward
/tests/compile/tobjconstr2
/tests/compile/tobjcov
/tests/compile/tobject2
/tests/compile/tobject3
/tests/compile/tobjects
/tests/compile/toop
/tests/compile/toptions
/tests/compile/tos
/tests/compile/toverprc
/tests/compile/tparedef
/tests/compile/tparscfg
/tests/compile/tparsefloat
/tests/compile/tparsopt
/tests/compile/tposix
/tests/compile/tprep
/tests/compile/tprocvars
/tests/compile/tpush
/tests/compile/tquicksort
/tests/compile/tquit
/tests/compile/tradix
/tests/compile/treadln
/tests/compile/treadx
/tests/compile/trecmod
/tests/compile/trecmod2
/tests/compile/trectuple
/tests/compile/trectuples
/tests/compile/tredef
/tests/compile/trepr
/tests/compile/tsecondarrayproperty
/tests/compile/tseq2
/tests/compile/tseqcon2
/tests/compile/tshadow_magic_type
/tests/compile/tsizeof
/tests/compile/tslurp
/tests/compile/tsockets
/tests/compile/tsortcall
/tests/compile/tspecialised_is_equivalent
/tests/compile/tstrace
/tests/compile/tstrdesc
/tests/compile/tstrdist
/tests/compile/tstreams
/tests/compile/tstrset
/tests/compile/tstrtabs
/tests/compile/ttableconstr
/tests/compile/ttempl
/tests/compile/ttempl3
/tests/compile/ttempl4
/tests/compile/ttempl5
/tests/compile/ttemplreturntype
/tests/compile/tthread_generic
/tests/compile/ttime
/tests/compile/ttuple1
/tests/compile/ttypeclasses
/tests/compile/ttypeconverter1
/tests/compile/tuserpragma
/tests/compile/tvoid
/tests/compile/twalker
/tests/compile/typalias
/tests/dll/client
/tests/gc/closureleak
/tests/gc/cycleleak
/tests/gc/gcbench
/tests/gc/gcleak
/tests/gc/gcleak2
/tests/gc/gcleak3
/tests/gc/gctest
/tests/gc/weakrefs
/tests/manyloc/argument_parser/ex_wget
/tests/manyloc/nake/nakefile
/tests/manyloc/packages/noconflicts
/tests/manyloc/standalone/barebone
/tests/patterns/targlist
/tests/patterns/tcse
/tests/patterns/thoist
/tests/patterns/tmatrix
/tests/patterns/tnoalias
/tests/patterns/tnoendlessrec
/tests/patterns/tor
/tests/patterns/tpartial
/tests/patterns/tstar
/tests/patterns/tstmtlist
/tests/reject/t99bott
/tests/reject/tcheckedfield1
/tests/reject/tdeprecated
/tests/reject/tdisallowif
/tests/reject/tuninit1
/tests/rodfiles/aconv
/tests/rodfiles/bconv
/tests/rodfiles/bmethods
/tests/rodfiles/bmethods2
/tests/rodfiles/deada
/tests/rodfiles/deada2
/tests/rodfiles/hallo
/tests/rodfiles/hallo2
/tests/rodfiles/tgeneric1
/tests/rodfiles/tgeneric2
/tests/run/tack
/tests/run/tactiontable
/tests/run/tambsym2
/tests/run/tambsys
/tests/run/tanontuples
/tests/run/tarray
/tests/run/tarray2
/tests/run/tarray3
/tests/run/tarraycons
/tests/run/tassert
/tests/run/tastoverload1
/tests/run/tasynciossl
/tests/run/tasyncudp
/tests/run/tbind1
/tests/run/tbind3
/tests/run/tbintre2
/tests/run/tbintree
/tests/run/tborrow
/tests/run/tbug499771
/tests/run/tbug511622
/tests/run/tcase_setconstr
/tests/run/tcasestm
/tests/run/tcgbug
/tests/run/tclosure2
/tests/run/tclosure3
/tests/run/tcnstseq
/tests/run/tcnstseq2
/tests/run/tcnstseq3
/tests/run/tconcat
/tests/run/tconstr2
/tests/run/tcontinue
/tests/run/tcontinuexc
/tests/run/tcopy
/tests/run/tcountup
/tests/run/tcritbits
/tests/run/tcurrncy
/tests/run/tdestructor
/tests/run/tdrdobbs_examples
/tests/run/temit
/tests/run/tenumhole
/tests/run/tenumitems
/tests/run/teventemitter
/tests/run/tevents
/tests/run/texceptions
/tests/run/texcpt1
/tests/run/texcsub
/tests/run/texplicitgeneric1
/tests/run/tfieldindex
/tests/run/tfielditerator
/tests/run/tfielditerator2
/tests/run/tfilter
/tests/run/tfinally
/tests/run/tfinally2
/tests/run/tfinally3
/tests/run/tfinalobj
/tests/run/tfloat1
/tests/run/tfloat2
/tests/run/tfloat3
/tests/run/tformat
/tests/run/tfriends
/tests/run/tgenericassign
/tests/run/tgenericassigntuples
/tests/run/tgenericconverter
/tests/run/tgenericprocvar
/tests/run/tgenerics1
/tests/run/tgensym
/tests/run/tglobal
/tests/run/thashes
/tests/run/thexlit
/tests/run/thintoff
/tests/run/tidgen
/tests/run/tindent1
/tests/run/tinit
/tests/run/tinterf
/tests/run/tints
/tests/run/tisopr
/tests/run/titer3
/tests/run/titer5
/tests/run/titer6
/tests/run/titer7
/tests/run/titer8
/tests/run/titer9
/tests/run/titerslice
/tests/run/titervaropenarray
/tests/run/tkoeniglookup
/tests/run/tlet
/tests/run/tlists
/tests/run/tlocals
/tests/run/tlowhigh
/tests/run/tmacro2
/tests/run/tmacro3
/tests/run/tmacro4
/tests/run/tmacros1
/tests/run/tmath
/tests/run/tmatrix
/tests/run/tmemoization
/tests/run/tmethods1
/tests/run/tmixin
/tests/run/tmoditer
/tests/run/tmultim1
/tests/run/tmultim2
/tests/run/tmultim3
/tests/run/tmultim4
/tests/run/tmultim6
/tests/run/tnamedenumfields
/tests/run/tnestif
/tests/run/tnestprc
/tests/run/tnewderef
/tests/run/tnodeadlocks
/tests/run/tobjasgn
/tests/run/tobjconstr
/tests/run/tobject
/tests/run/tofopr
/tests/run/tonraise
/tests/run/toop1
/tests/run/topenarrayrepr
/tests/run/topenlen
/tests/run/toprprec
/tests/run/toverflw
/tests/run/toverflw2
/tests/run/toverl2
/tests/run/toverl3
/tests/run/toverwr
/tests/run/tovfint
/tests/run/tpatterns
/tests/run/tpegs
/tests/run/tpos
/tests/run/tprecedence
/tests/run/tprintf
/tests/run/tprocvar
/tests/run/tquotewords
/tests/run/tregex
/tests/run/treguse
/tests/run/trepr
/tests/run/treraise
/tests/run/tromans
/tests/run/tseqcon
/tests/run/tseqtuple
/tests/run/tsequtils
/tests/run/tsets
/tests/run/tsets2
/tests/run/tsidee2
/tests/run/tsidee3
/tests/run/tsimmeth
/tests/run/tsimplesort
/tests/run/tslices
/tests/run/tsortdev
/tests/run/tsplit
/tests/run/tstempl
/tests/run/tstmtexprs
/tests/run/tstrange
/tests/run/tstringinterp
/tests/run/tstrlits
/tests/run/tstrutil
/tests/run/tsubrange
/tests/run/tsubrange2
/tests/run/ttables
/tests/run/ttables2
/tests/run/ttoseq
/tests/run/ttypedesc1
/tests/run/tunhandledexc
/tests/run/tunidecode
/tests/run/tunittests
/tests/run/tuserassert
/tests/run/tvarargs_vs_generic
/tests/run/tvardecl
/tests/run/tvariantasgn
/tests/run/tvariantstack
/tests/run/tvarious1
/tests/run/tvarnums
/tests/run/tvarres1
/tests/run/tvarres2
/tests/run/tvartup
/tests/run/tvtable
/tests/run/twrongexc
/tests/run/txmlgen
/tests/run/txmltree
/tests/run/tzeroarray
/tests/system/helpers/readall_echo
/tests/system/io
/tests/system/params
/tests/tester
/tests/threads/tactors
/tests/threads/tactors2
/tests/threads/tthreadanalysis2
/tests/threads/tthreadanalysis3
/tests/threads/tthreadheapviolation1
/tools/nimgrep
testament.db

View File

@@ -510,6 +510,7 @@ const
tfUncheckedArray* = tfVarargs
tfUnion* = tfNoSideEffect
tfGcSafe* = tfThread
tfObjHasKids* = tfEnumHasHoles
skError* = skUnknown
# type flags that are essential for type equality:
@@ -549,7 +550,7 @@ type
mFields, mFieldPairs, mOmpParFor,
mAppendStrCh, mAppendStrStr, mAppendSeqElem,
mInRange, mInSet, mRepr, mExit, mSetLengthStr, mSetLengthSeq,
mIsPartOf, mAstToStr, mRand,
mIsPartOf, mAstToStr, mParallel,
mSwap, mIsNil, mArrToSeq, mCopyStr, mCopyStrLast,
mNewString, mNewStringOfCap,
mReset,
@@ -560,7 +561,7 @@ type
mFloat, mFloat32, mFloat64, mFloat128,
mBool, mChar, mString, mCstring,
mPointer, mEmptySet, mIntSetBaseType, mNil, mExpr, mStmt, mTypeDesc,
mVoidType, mPNimrodNode, mShared, mGuarded, mLock, mSpawn,
mVoidType, mPNimrodNode, mShared, mGuarded, mLock, mSpawn, mDeepCopy,
mIsMainModule, mCompileDate, mCompileTime, mNimrodVersion, mNimrodMajor,
mNimrodMinor, mNimrodPatch, mCpuEndian, mHostOS, mHostCPU, mAppType,
mNaN, mInf, mNegInf,
@@ -597,18 +598,17 @@ const
mIntToStr, mInt64ToStr, mFloatToStr, mCStrToStr, mStrToStr, mEnumToStr,
mAnd, mOr, mEqStr, mLeStr, mLtStr, mEqSet, mLeSet, mLtSet, mMulSet,
mPlusSet, mMinusSet, mSymDiffSet, mConStrStr, mConArrArr, mConArrT,
mConTArr, mConTT, mSlice,
mConTArr, mConTT,
mAppendStrCh, mAppendStrStr, mAppendSeqElem,
mInRange, mInSet, mRepr,
mRand,
mCopyStr, mCopyStrLast}
# magics that require special semantic checking and
# thus cannot be overloaded (also documented in the spec!):
SpecialSemMagics* = {
mDefined, mDefinedInScope, mCompiles, mLow, mHigh, mSizeOf, mIs, mOf,
mEcho, mShallowCopy, mExpandToAst}
mEcho, mShallowCopy, mExpandToAst, mParallel, mSpawn}
type
type
PNode* = ref TNode
TNodeSeq* = seq[PNode]
PType* = ref TType
@@ -873,7 +873,7 @@ const
skMacro, skTemplate, skConverter, skEnumField, skLet, skStub}
PersistentNodeFlags*: TNodeFlags = {nfBase2, nfBase8, nfBase16,
nfDotSetter, nfDotField,
nfAllConst,nfIsRef}
nfIsRef}
namePos* = 0
patternPos* = 1 # empty except for term rewriting macros
genericParamsPos* = 2
@@ -886,6 +886,8 @@ const
nkCallKinds* = {nkCall, nkInfix, nkPrefix, nkPostfix,
nkCommand, nkCallStrLit, nkHiddenCallConv}
nkIdentKinds* = {nkIdent, nkSym, nkAccQuoted, nkOpenSymChoice,
nkClosedSymChoice}
nkLiterals* = {nkCharLit..nkTripleStrLit}
nkLambdaKinds* = {nkLambda, nkDo}
@@ -1046,7 +1048,7 @@ proc discardSons(father: PNode) =
father.sons = nil
when defined(useNodeIds):
const nodeIdToDebug* = 482228 # 612794
const nodeIdToDebug* = 310841 # 612794
#612840 # 612905 # 614635 # 614637 # 614641
# 423408
#429107 # 430443 # 441048 # 441090 # 441153

View File

@@ -19,7 +19,7 @@
import
os, llstream, renderer, clex, idents, strutils, pegs, ast, astalgo, msgs,
options, strtabs
options, strtabs, hashes, algorithm
type
TParserFlag = enum
@@ -63,6 +63,15 @@ type
ERetryParsing = object of ESynch
proc addTypeDef(section, name, t: PNode)
proc parseStruct(p: var TParser, stmtList: PNode, isUnion: bool): PNode
proc parseStructBody(p: var TParser, stmtList: PNode, isUnion: bool,
kind: TNodeKind = nkRecList): PNode
proc newParserOptions*(): PParserOptions =
new(result)
result.prefixes = @[]
@@ -682,24 +691,6 @@ proc parseField(p: var TParser, kind: TNodeKind): PNode =
else: result = mangledIdent(p.tok.s, p)
getTok(p, result)
proc parseStructBody(p: var TParser, isUnion: bool,
kind: TNodeKind = nkRecList): PNode =
result = newNodeP(kind, p)
eat(p, pxCurlyLe, result)
while p.tok.xkind notin {pxEof, pxCurlyRi}:
var baseTyp = typeAtom(p)
while true:
var def = newNodeP(nkIdentDefs, p)
var t = pointer(p, baseTyp)
var i = parseField(p, kind)
t = parseTypeSuffix(p, t)
addSon(def, i, t, ast.emptyNode)
addSon(result, def)
if p.tok.xkind != pxComma: break
getTok(p, def)
eat(p, pxSemicolon, lastSon(result))
eat(p, pxCurlyRi, result)
proc structPragmas(p: TParser, name: PNode, origName: string): PNode =
assert name.kind == nkIdent
result = newNodeP(nkPragmaExpr, p)
@@ -712,6 +703,75 @@ proc structPragmas(p: TParser, name: PNode, origName: string): PNode =
if pragmas.len > 0: addSon(result, pragmas)
else: addSon(result, ast.emptyNode)
proc hashPosition(p: TParser): string =
let lineInfo = parLineInfo(p)
let fileInfo = fileInfos[lineInfo.fileIndex]
result = $hash(fileInfo.shortName & "_" & $lineInfo.line & "_" & $lineInfo.col).uint
proc parseInnerStruct(p: var TParser, stmtList: PNode, isUnion: bool): PNode =
getTok(p, nil)
if p.tok.xkind != pxCurlyLe:
parMessage(p, errUser, "Expected '{' but found '" & $(p.tok[]) & "'")
let structName = if isUnion: "INNER_C_UNION_" & p.hashPosition
else: "INNER_C_STRUCT_" & p.hashPosition
let typeSection = newNodeP(nkTypeSection, p)
let newStruct = newNodeP(nkObjectTy, p)
var pragmas = ast.emptyNode
if isUnion:
pragmas = newNodeP(nkPragma, p)
addSon(pragmas, newIdentNodeP("union", p))
addSon(newStruct, pragmas, ast.emptyNode) # no inheritance
result = newNodeP(nkIdent, p)
result.ident = getIdent(structName)
let struct = parseStructBody(p, stmtList, isUnion)
let defName = newNodeP(nkIdent, p)
defName.ident = getIdent(structName)
addSon(newStruct, struct)
addTypeDef(typeSection, structPragmas(p, defName, "no_name"), newStruct)
addSon(stmtList, typeSection)
proc parseStructBody(p: var TParser, stmtList: PNode, isUnion: bool,
kind: TNodeKind = nkRecList): PNode =
result = newNodeP(kind, p)
eat(p, pxCurlyLe, result)
while p.tok.xkind notin {pxEof, pxCurlyRi}:
skipConst(p)
var baseTyp: PNode
if p.tok.xkind == pxSymbol and (p.tok.s == "struct" or p.tok.s == "union"):
let gotUnion = if p.tok.s == "union": true else: false
saveContext(p)
getTok(p, nil)
if p.tok.xkind == pxSymbol:
backtrackContext(p)
baseTyp = typeAtom(p)
else:
backtrackContext(p)
baseTyp = parseInnerStruct(p, stmtList, gotUnion)
if p.tok.xkind == pxSemiColon:
let def = newNodeP(nkIdentDefs, p)
var t = pointer(p, baseTyp)
let i = fieldIdent("ano_" & p.hashPosition, p)
t = parseTypeSuffix(p, t)
addSon(def, i, t, ast.emptyNode)
addSon(result, def)
getTok(p, nil)
continue
else:
baseTyp = typeAtom(p)
while true:
var def = newNodeP(nkIdentDefs, p)
var t = pointer(p, baseTyp)
var i = parseField(p, kind)
t = parseTypeSuffix(p, t)
addSon(def, i, t, ast.emptyNode)
addSon(result, def)
if p.tok.xkind != pxComma: break
getTok(p, def)
eat(p, pxSemicolon, lastSon(result))
eat(p, pxCurlyRi, result)
proc enumPragmas(p: TParser, name: PNode): PNode =
result = newNodeP(nkPragmaExpr, p)
addSon(result, name)
@@ -722,7 +782,7 @@ proc enumPragmas(p: TParser, name: PNode): PNode =
addSon(pragmas, e)
addSon(result, pragmas)
proc parseStruct(p: var TParser, isUnion: bool): PNode =
proc parseStruct(p: var TParser, stmtList: PNode, isUnion: bool): PNode =
result = newNodeP(nkObjectTy, p)
var pragmas = ast.emptyNode
if isUnion:
@@ -730,7 +790,7 @@ proc parseStruct(p: var TParser, isUnion: bool): PNode =
addSon(pragmas, newIdentNodeP("union", p))
addSon(result, pragmas, ast.emptyNode) # no inheritance
if p.tok.xkind == pxCurlyLe:
addSon(result, parseStructBody(p, isUnion))
addSon(result, parseStructBody(p, stmtList, isUnion))
else:
addSon(result, newNodeP(nkRecList, p))
@@ -855,9 +915,28 @@ proc parseTrailingDefinedTypes(p: var TParser, section, typ: PNode) =
newTyp = parseTypeSuffix(p, newTyp)
addTypeDef(section, newName, newTyp)
proc enumFields(p: var TParser): PNode =
proc createConst(name, typ, val: PNode, p: TParser): PNode =
result = newNodeP(nkConstDef, p)
addSon(result, name, typ, val)
proc exprToNumber(n: PNode not nil): tuple[succ: bool, val: BiggestInt] =
result = (false, 0.BiggestInt)
case n.kind:
of nkPrefix:
# Check for negative/positive numbers -3 or +6
if n.sons.len == 2 and n.sons[0].kind == nkIdent and n.sons[1].kind == nkIntLit:
let pre = n.sons[0]
let num = n.sons[1]
if pre.ident.s == "-": result = (true, - num.intVal)
elif pre.ident.s == "+": result = (true, num.intVal)
else: discard
proc enumFields(p: var TParser, constList: PNode): PNode =
result = newNodeP(nkEnumTy, p)
addSon(result, ast.emptyNode) # enum does not inherit from anything
var i: BiggestInt = 0
var field: tuple[id: BiggestInt, isNumber: bool, node: PNode]
var fields = newSeq[type(field)]()
while true:
var e = skipIdent(p)
if p.tok.xkind == pxAsgn:
@@ -867,17 +946,59 @@ proc enumFields(p: var TParser): PNode =
e = newNodeP(nkEnumFieldDef, p)
addSon(e, a, c)
skipCom(p, e)
addSon(result, e)
if c.kind == nkIntLit:
i = c.intVal
field.isNumber = true
else:
var (success, number) = exprToNumber(c)
if success:
i = number
field.isNumber = true
else:
field.isNumber = false
else:
inc(i)
field.isNumber = true
field.id = i
field.node = e
fields.add(field)
if p.tok.xkind != pxComma: break
getTok(p, e)
# allow trailing comma:
if p.tok.xkind == pxCurlyRi: break
fields.sort do (x, y: type(field)) -> int:
cmp(x.id, y.id)
var lastId: BiggestInt
var lastIdent: PNode
for count, f in fields:
if not f.isNumber:
addSon(result, f.node)
elif f.id == lastId and count > 0:
var currentIdent: PNode
case f.node.kind:
of nkEnumFieldDef:
if f.node.sons.len > 0 and f.node.sons[0].kind == nkIdent:
currentIdent = f.node.sons[0]
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
of nkIdent: currentIdent = f.node
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
var constant = createConst( currentIdent, ast.emptyNode, lastIdent, p)
constList.addSon(constant)
else:
addSon(result, f.node)
lastId = f.id
case f.node.kind:
of nkEnumFieldDef:
if f.node.sons.len > 0 and f.node.sons[0].kind == nkIdent:
lastIdent = f.node.sons[0]
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
of nkIdent: lastIdent = f.node
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
proc parseTypedefStruct(p: var TParser, result: PNode, isUnion: bool) =
proc parseTypedefStruct(p: var TParser, result: PNode, stmtList: PNode, isUnion: bool) =
getTok(p, result)
if p.tok.xkind == pxCurlyLe:
var t = parseStruct(p, isUnion)
var t = parseStruct(p, stmtList, isUnion)
var origName = p.tok.s
markTypeIdent(p, nil)
var name = skipIdent(p)
@@ -890,7 +1011,7 @@ proc parseTypedefStruct(p: var TParser, result: PNode, isUnion: bool) =
var nameOrType = skipIdent(p)
case p.tok.xkind
of pxCurlyLe:
var t = parseStruct(p, isUnion)
var t = parseStruct(p, stmtList, isUnion)
if p.tok.xkind == pxSymbol:
# typedef struct tagABC {} abc, *pabc;
# --> abc is a better type name than tagABC!
@@ -914,11 +1035,11 @@ proc parseTypedefStruct(p: var TParser, result: PNode, isUnion: bool) =
else:
expectIdent(p)
proc parseTypedefEnum(p: var TParser, result: PNode) =
proc parseTypedefEnum(p: var TParser, result, constSection: PNode) =
getTok(p, result)
if p.tok.xkind == pxCurlyLe:
getTok(p, result)
var t = enumFields(p)
var t = enumFields(p, constSection)
eat(p, pxCurlyRi, t)
var origName = p.tok.s
markTypeIdent(p, nil)
@@ -933,7 +1054,7 @@ proc parseTypedefEnum(p: var TParser, result: PNode) =
case p.tok.xkind
of pxCurlyLe:
getTok(p, result)
var t = enumFields(p)
var t = enumFields(p, constSection)
eat(p, pxCurlyRi, t)
if p.tok.xkind == pxSymbol:
# typedef enum tagABC {} abc, *pabc;
@@ -960,27 +1081,36 @@ proc parseTypedefEnum(p: var TParser, result: PNode) =
expectIdent(p)
proc parseTypeDef(p: var TParser): PNode =
result = newNodeP(nkTypeSection, p)
result = newNodeP(nkStmtList, p)
var typeSection = newNodeP(nkTypeSection, p)
var afterStatements = newNodeP(nkStmtList, p)
while p.tok.xkind == pxSymbol and p.tok.s == "typedef":
getTok(p, result)
getTok(p, typeSection)
inc(p.inTypeDef)
expectIdent(p)
case p.tok.s
of "struct": parseTypedefStruct(p, result, isUnion=false)
of "union": parseTypedefStruct(p, result, isUnion=true)
of "enum": parseTypedefEnum(p, result)
of "struct": parseTypedefStruct(p, typeSection, result, isUnion=false)
of "union": parseTypedefStruct(p, typeSection, result, isUnion=true)
of "enum":
var constSection = newNodeP(nkConstSection, p)
parseTypedefEnum(p, typeSection, constSection)
addSon(afterStatements, constSection)
of "class":
if pfCpp in p.options.flags:
parseTypedefStruct(p, result, isUnion=false)
parseTypedefStruct(p, typeSection, result, isUnion=false)
else:
var t = typeAtom(p)
otherTypeDef(p, result, t)
otherTypeDef(p, typeSection, t)
else:
var t = typeAtom(p)
otherTypeDef(p, result, t)
otherTypeDef(p, typeSection, t)
eat(p, pxSemicolon)
dec(p.inTypeDef)
addSon(result, typeSection)
for s in afterStatements:
addSon(result, s)
proc skipDeclarationSpecifiers(p: var TParser) =
while p.tok.xkind == pxSymbol:
case p.tok.s
@@ -1092,10 +1222,6 @@ proc declaration(p: var TParser): PNode =
result = parseVarDecl(p, baseTyp, rettyp, origName)
assert result != nil
proc createConst(name, typ, val: PNode, p: TParser): PNode =
result = newNodeP(nkConstDef, p)
addSon(result, name, typ, val)
proc enumSpecifier(p: var TParser): PNode =
saveContext(p)
getTok(p, nil) # skip "enum"
@@ -1141,12 +1267,16 @@ proc enumSpecifier(p: var TParser): PNode =
closeContext(p)
var name = result
# create a type section containing the enum
result = newNodeP(nkTypeSection, p)
result = newNodeP(nkStmtList, p)
var tSection = newNodeP(nkTypeSection, p)
var t = newNodeP(nkTypeDef, p)
getTok(p, t)
var e = enumFields(p)
var constSection = newNodeP(nkConstSection, p)
var e = enumFields(p, constSection)
addSon(t, exportSym(p, name, origName), ast.emptyNode, e)
addSon(result, t)
addSon(tSection, t)
addSon(result, tSection)
addSon(result, constSection)
eat(p, pxCurlyRi, result)
eat(p, pxSemicolon)
of pxSemicolon:
@@ -1608,8 +1738,8 @@ proc declarationOrStatement(p: var TParser): PNode =
result = expressionStatement(p)
assert result != nil
proc parseTuple(p: var TParser, isUnion: bool): PNode =
result = parseStructBody(p, isUnion, nkTupleTy)
proc parseTuple(p: var TParser, statements: PNode, isUnion: bool): PNode =
parseStructBody(p, statements, isUnion, nkTupleTy)
proc parseTrailingDefinedIdents(p: var TParser, result, baseTyp: PNode) =
var varSection = newNodeP(nkVarSection, p)
@@ -1640,13 +1770,13 @@ proc parseStandaloneStruct(p: var TParser, isUnion: bool): PNode =
if p.tok.xkind in {pxCurlyLe, pxSemiColon}:
if origName.len > 0:
var name = mangledIdent(origName, p)
var t = parseStruct(p, isUnion)
var t = parseStruct(p, result, isUnion)
var typeSection = newNodeP(nkTypeSection, p)
addTypeDef(typeSection, structPragmas(p, name, origName), t)
addSon(result, typeSection)
parseTrailingDefinedIdents(p, result, name)
else:
var t = parseTuple(p, isUnion)
var t = parseTuple(p, result, isUnion)
parseTrailingDefinedIdents(p, result, t)
else:
backtrackContext(p)
@@ -2034,7 +2164,7 @@ proc parseStandaloneClass(p: var TParser, isStruct: bool): PNode =
addTypeDef(typeSection, structPragmas(p, name, origName), t)
parseTrailingDefinedIdents(p, result, name)
else:
var t = parseTuple(p, isUnion=false)
var t = parseTuple(p, result, isUnion=false)
parseTrailingDefinedIdents(p, result, t)
else:
backtrackContext(p)

View File

@@ -0,0 +1,40 @@
enum vehicles
{
car = 0x10,
truck,
boat = 0x01,
ship = 1,
speedboat = 1,
bicycle = 4,
bobycar
};
enum
{
red = 4,
green = 2,
blue
};
typedef enum food
{
bread = 4,
toast = 4,
bun = 0x04,
cucumber = 2,
chocolate = 6
};
typedef enum numbers
{
one = 1,
two,
nten = - 10,
nnine,
four = 4,
three = + 3,
positivenine = + 9,
nfour = - 4,
negativeten = -10
};

View File

@@ -0,0 +1,27 @@
struct normal{
int a;
int b;
};
typedef struct outerStruct {
struct normal a_nomal_one;
int a;
struct {
union {
int b;
} a_union_in_the_struct;
int c;
};
union {
int d;
struct {
int e;
} a_struct_in_the_union;
} a_union;
};

View File

@@ -77,18 +77,38 @@ proc isInCurrentFrame(p: BProc, n: PNode): bool =
proc openArrayLoc(p: BProc, n: PNode): PRope =
var a: TLoc
initLocExpr(p, n, a)
case skipTypes(a.t, abstractVar).kind
of tyOpenArray, tyVarargs:
result = ropef("$1, $1Len0", [rdLoc(a)])
of tyString, tySequence:
if skipTypes(n.typ, abstractInst).kind == tyVar:
result = ropef("(*$1)->data, (*$1)->$2", [a.rdLoc, lenField()])
else:
result = ropef("$1->data, $1->$2", [a.rdLoc, lenField()])
of tyArray, tyArrayConstr:
result = ropef("$1, $2", [rdLoc(a), toRope(lengthOrd(a.t))])
else: internalError("openArrayLoc: " & typeToString(a.t))
let q = skipConv(n)
if getMagic(q) == mSlice:
# magic: pass slice to openArray:
var b, c: TLoc
initLocExpr(p, q[1], a)
initLocExpr(p, q[2], b)
initLocExpr(p, q[3], c)
let fmt =
case skipTypes(a.t, abstractVar+{tyPtr}).kind
of tyOpenArray, tyVarargs, tyArray, tyArrayConstr:
"($1)+($2), ($3)-($2)+1"
of tyString, tySequence:
if skipTypes(n.typ, abstractInst).kind == tyVar:
"(*$1)->data+($2), ($3)-($2)+1"
else:
"$1->data+($2), ($3)-($2)+1"
else: (internalError("openArrayLoc: " & typeToString(a.t)); "")
result = ropef(fmt, [rdLoc(a), rdLoc(b), rdLoc(c)])
else:
initLocExpr(p, n, a)
case skipTypes(a.t, abstractVar).kind
of tyOpenArray, tyVarargs:
result = ropef("$1, $1Len0", [rdLoc(a)])
of tyString, tySequence:
if skipTypes(n.typ, abstractInst).kind == tyVar:
result = ropef("(*$1)->data, (*$1)->$2", [a.rdLoc, lenField()])
else:
result = ropef("$1->data, $1->$2", [a.rdLoc, lenField()])
of tyArray, tyArrayConstr:
result = ropef("$1, $2", [rdLoc(a), toRope(lengthOrd(a.t))])
else: internalError("openArrayLoc: " & typeToString(a.t))
proc genArgStringToCString(p: BProc,
n: PNode): PRope {.inline.} =

View File

@@ -484,7 +484,7 @@ proc unaryArithOverflow(p: BProc, e: PNode, d: var TLoc, m: TMagic) =
opr: array[mUnaryMinusI..mAbsI64, string] = [
mUnaryMinusI: "((NI$2)-($1))",
mUnaryMinusI64: "-($1)",
mAbsI: "(NI$2)abs($1)",
mAbsI: "($1 > 0? ($1) : -($1))",
mAbsI64: "($1 > 0? ($1) : -($1))"]
var
a: TLoc
@@ -714,11 +714,12 @@ proc genFieldCheck(p: BProc, e: PNode, obj: PRope, field: PSym) =
assert(it.sons[0].kind == nkSym)
let op = it.sons[0].sym
if op.magic == mNot: it = it.sons[1]
assert(it.sons[2].kind == nkSym)
let disc = it.sons[2].skipConv
assert(disc.kind == nkSym)
initLoc(test, locNone, it.typ, OnStack)
initLocExpr(p, it.sons[1], u)
initLoc(v, locExpr, it.sons[2].typ, OnUnknown)
v.r = ropef("$1.$2", [obj, it.sons[2].sym.loc.r])
initLoc(v, locExpr, disc.typ, OnUnknown)
v.r = ropef("$1.$2", [obj, disc.sym.loc.r])
genInExprAux(p, it, u, v, test)
let id = nodeTableTestOrSet(p.module.dataCache,
newStrNode(nkStrLit, field.name.s), gBackendId)
@@ -1144,6 +1145,24 @@ proc genNewFinalize(p: BProc, e: PNode) =
genObjectInit(p, cpsStmts, bt, a, false)
gcUsage(e)
proc genOfHelper(p: BProc; dest: PType; a: PRope): PRope =
# unfortunately 'genTypeInfo' sets tfObjHasKids as a side effect, so we
# have to call it here first:
let ti = genTypeInfo(p.module, dest)
if tfFinal in dest.flags or (p.module.objHasKidsValid and
tfObjHasKids notin dest.flags):
result = ropef("$1.m_type == $2", a, ti)
else:
discard cgsym(p.module, "TNimType")
inc p.module.labels
let cache = con("Nim_OfCheck_CACHE", p.module.labels.toRope)
appf(p.module.s[cfsVars], "static TNimType* $#[2];$n", cache)
result = rfmt(p.module, "#isObjWithCache($#.m_type, $#, $#)", a, ti, cache)
when false:
# former version:
result = rfmt(p.module, "#isObj($1.m_type, $2)",
a, genTypeInfo(p.module, dest))
proc genOf(p: BProc, x: PNode, typ: PType, d: var TLoc) =
var a: TLoc
initLocExpr(p, x, a)
@@ -1163,11 +1182,9 @@ proc genOf(p: BProc, x: PNode, typ: PType, d: var TLoc) =
globalError(x.info, errGenerated,
"no 'of' operator available for pure objects")
if nilCheck != nil:
r = rfmt(p.module, "(($1) && #isObj($2.m_type, $3))",
nilCheck, r, genTypeInfo(p.module, dest))
r = rfmt(p.module, "(($1) && ($2))", nilCheck, genOfHelper(p, dest, r))
else:
r = rfmt(p.module, "#isObj($1.m_type, $2)",
r, genTypeInfo(p.module, dest))
r = rfmt(p.module, "($1)", genOfHelper(p, dest, r))
putIntoDest(p, d, getSysType(tyBool), r)
proc genOf(p: BProc, n: PNode, d: var TLoc) =
@@ -1382,10 +1399,10 @@ proc genSetOp(p: BProc, e: PNode, d: var TLoc, op: TMagic) =
of mIncl:
var ts = "NI" & $(size * 8)
binaryStmtInExcl(p, e, d,
"$1 |=((" & ts & ")(1)<<(($2)%(sizeof(" & ts & ")*8)));$n")
"$1 |= ((" & ts & ")1)<<(($2)%(sizeof(" & ts & ")*8));$n")
of mExcl:
var ts = "NI" & $(size * 8)
binaryStmtInExcl(p, e, d, "$1 &= ~((" & ts & ")(1) << (($2) % (sizeof(" &
binaryStmtInExcl(p, e, d, "$1 &= ~(((" & ts & ")1) << (($2) % (sizeof(" &
ts & ")*8)));$n")
of mCard:
if size <= 4: unaryExprChar(p, e, d, "#countBits32($1)")
@@ -1623,7 +1640,7 @@ proc genMagicExpr(p: BProc, e: PNode, d: var TLoc, op: TMagic) =
of mIncl, mExcl, mCard, mLtSet, mLeSet, mEqSet, mMulSet, mPlusSet, mMinusSet,
mInSet:
genSetOp(p, e, d, op)
of mNewString, mNewStringOfCap, mCopyStr, mCopyStrLast, mExit, mRand:
of mNewString, mNewStringOfCap, mCopyStr, mCopyStrLast, mExit:
var opr = e.sons[0].sym
if lfNoDecl notin opr.loc.flags:
discard cgsym(p.module, opr.loc.r.ropeToStr)
@@ -1636,7 +1653,10 @@ proc genMagicExpr(p: BProc, e: PNode, d: var TLoc, op: TMagic) =
of mSlurp..mQuoteAst:
localError(e.info, errXMustBeCompileTime, e.sons[0].sym.name.s)
of mSpawn:
let n = lowerings.wrapProcForSpawn(p.module.module, e.sons[1])
let n = lowerings.wrapProcForSpawn(p.module.module, e, e.typ, nil, nil)
expr(p, n, d)
of mParallel:
let n = semparallel.liftParallel(p.module.module, e)
expr(p, n, d)
else: internalError(e.info, "genMagicExpr: " & $op)

View File

@@ -593,6 +593,7 @@ proc genStringCase(p: BProc, t: PNode, d: var TLoc) =
else:
# else statement: nothing to do yet
# but we reserved a label, which we use later
discard
linefmt(p, cpsStmts, "switch (#hashString($1) & $2) {$n",
rdLoc(a), toRope(bitMask))
for j in countup(0, high(branches)):

View File

@@ -11,49 +11,10 @@
# ------------------------- Name Mangling --------------------------------
proc mangleField(name: string): string =
case name[0]
of 'a'..'z':
result = ""
add(result, chr(ord(name[0]) - ord('a') + ord('A')))
of '0'..'9', 'A'..'Z':
result = ""
add(result, name[0])
else: result = "HEX" & toHex(ord(name[0]), 2)
for i in countup(1, len(name) - 1):
case name[i]
of 'A'..'Z':
add(result, chr(ord(name[i]) - ord('A') + ord('a')))
of '_':
discard
of 'a'..'z', '0'..'9':
add(result, name[i])
else:
add(result, "HEX")
add(result, toHex(ord(name[i]), 2))
proc mangle(name: string): string =
when false:
case name[0]
of 'a'..'z':
result = ""
add(result, chr(ord(name[0]) - ord('a') + ord('A')))
of '0'..'9', 'A'..'Z':
result = ""
add(result, name[0])
else: result = "HEX" & toHex(ord(name[0]), 2)
result = ""
for i in countup(0, len(name) - 1):
case name[i]
of 'A'..'Z':
add(result, chr(ord(name[i]) - ord('A') + ord('a')))
of '_':
discard
of 'a'..'z', '0'..'9':
add(result, name[i])
else:
add(result, "HEX")
add(result, toHex(ord(name[i]), 2))
proc mangleField(name: string): string =
result = mangle(name)
result[0] = result[0].toUpper # Mangling makes everything lowercase,
# but some identifiers are C keywords
proc isKeyword(w: PIdent): bool =
# nimrod and C++ share some keywords
@@ -835,6 +796,11 @@ proc genObjectInfo(m: BModule, typ: PType, name: PRope) =
var tmp = getNimNode(m)
genObjectFields(m, typ, typ.n, tmp)
appf(m.s[cfsTypeInit3], "$1.node = &$2;$n", [name, tmp])
var t = typ.sons[0]
while t != nil:
t = t.skipTypes(abstractInst)
t.flags.incl tfObjHasKids
t = t.sons[0]
proc genTupleInfo(m: BModule, typ: PType, name: PRope) =
genTypeInfoAuxBase(m, typ, name, toRope("0"))

View File

@@ -161,6 +161,30 @@ proc makeSingleLineCString*(s: string): string =
result.add(c.toCChar)
result.add('\"')
proc mangle*(name: string): string =
## Lowercases the given name and manges any non-alphanumeric characters
## so they are represented as `HEX____`. If the name starts with a number,
## `N` is prepended
result = ""
case name[0]
of Letters:
result.add(name[0].toLower)
of Digits:
result.add("N" & name[0])
else:
result = "HEX" & toHex(ord(name[0]), 2)
for i in 1..(name.len-1):
let c = name[i]
case c
of 'A'..'Z':
add(result, c.toLower)
of '_':
discard
of 'a'..'z', '0'..'9':
add(result, c)
else:
add(result, "HEX" & toHex(ord(c), 2))
proc makeLLVMString*(s: string): PRope =
const MaxLineLength = 64
result = nil

View File

@@ -14,7 +14,8 @@ import
options, intsets,
nversion, nimsets, msgs, crc, bitsets, idents, lists, types, ccgutils, os,
times, ropes, math, passes, rodread, wordrecg, treetab, cgmeth,
rodutils, renderer, idgen, cgendata, ccgmerge, semfold, aliases, lowerings
rodutils, renderer, idgen, cgendata, ccgmerge, semfold, aliases, lowerings,
semparallel
when options.hasTinyCBackend:
import tccgen
@@ -503,7 +504,8 @@ proc assignLocalVar(p: BProc, s: PSym) =
if sfRegister in s.flags: app(decl, " register")
#elif skipTypes(s.typ, abstractInst).kind in GcTypeKinds:
# app(decl, " GC_GUARD")
if sfVolatile in s.flags or p.nestedTryStmts.len > 0:
if sfVolatile in s.flags or (p.nestedTryStmts.len > 0 and
gCmd != cmdCompileToCpp):
app(decl, " volatile")
appf(decl, " $1;$n", [s.loc.r])
else:
@@ -1048,6 +1050,7 @@ proc getSomeInitName(m: PSym, suffix: string): PRope =
assert m.owner.kind == skPackage
if {sfSystemModule, sfMainModule} * m.flags == {}:
result = m.owner.name.s.mangle.toRope
result.app "_"
result.app m.name.s
result.app suffix
@@ -1382,6 +1385,7 @@ proc myClose(b: PPassContext, n: PNode): PNode =
registerModuleToMain(m.module)
if sfMainModule in m.module.flags:
m.objHasKidsValid = true
var disp = generateMethodDispatchers()
for i in 0..sonsLen(disp)-1: genProcAux(m, disp.sons[i].sym)
genMainProc(m)

View File

@@ -96,6 +96,7 @@ type
# a frame var twice in an init proc
isHeaderFile*: bool # C source file is the header file
includesStringh*: bool # C source file already includes ``<string.h>``
objHasKidsValid*: bool # whether we can rely on tfObjHasKids
cfilename*: string # filename of the module (including path,
# without extension)
typeCache*: TIdTable # cache the generated types

View File

@@ -9,10 +9,34 @@
# This module handles the parsing of command line arguments.
# We do this here before the 'import' statement so 'defined' does not get
# confused with 'TGCMode.gcGenerational' etc.
template bootSwitch(name, expr, userString: expr): expr =
# Helper to build boot constants, for debugging you can 'echo' the else part.
const name = if expr: " " & userString else: ""
bootSwitch(usedRelease, defined(release), "-d:release")
bootSwitch(usedGnuReadline, defined(useGnuReadline), "-d:useGnuReadline")
bootSwitch(usedNoCaas, defined(noCaas), "-d:noCaas")
bootSwitch(usedBoehm, defined(boehmgc), "--gc:boehm")
bootSwitch(usedMarkAndSweep, defined(gcmarkandsweep), "--gc:markAndSweep")
bootSwitch(usedGenerational, defined(gcgenerational), "--gc:generational")
bootSwitch(usedNoGC, defined(nogc), "--gc:none")
import
os, msgs, options, nversion, condsyms, strutils, extccomp, platform, lists,
wordrecg, parseutils, babelcmd, idents
# but some have deps to imported modules. Yay.
bootSwitch(usedTinyC, hasTinyCBackend, "-d:tinyc")
bootSwitch(usedAvoidTimeMachine, noTimeMachine, "-d:avoidTimeMachine")
bootSwitch(usedNativeStacktrace,
defined(nativeStackTrace) and nativeStackTraceSupported,
"-d:nativeStackTrace")
bootSwitch(usedFFI, hasFFI, "-d:useFFI")
proc writeCommandLineUsage*()
type
@@ -55,6 +79,14 @@ proc writeVersionInfo(pass: TCmdLinePass) =
msgWriteln(`%`(HelpMessage, [VersionAsString,
platform.OS[platform.hostOS].name,
CPU[platform.hostCPU].name]))
const gitHash = gorge("git log -n 1 --format=%H")
if gitHash.strip.len == 40:
msgWriteln("git hash: " & gitHash)
msgWriteln("active boot switches:" & usedRelease & usedAvoidTimeMachine &
usedTinyC & usedGnuReadline & usedNativeStacktrace & usedNoCaas &
usedFFI & usedBoehm & usedMarkAndSweep & usedGenerational & usedNoGC)
quit(0)
var

View File

@@ -541,8 +541,14 @@ proc genOutFile(d: PDoc): PRope =
if toc != nil:
toc = ropeFormatNamedVars(getConfigVar("doc.toc"), ["content"], [toc])
for i in countup(low(TSymKind), high(TSymKind)): app(code, d.section[i])
if d.meta[metaTitle].len != 0: title = d.meta[metaTitle]
else: title = "Module " & extractFilename(changeFileExt(d.filename, ""))
# Extract the title. Non API modules generate an entry in the index table.
if d.meta[metaTitle].len != 0:
title = d.meta[metaTitle]
setIndexTerm(d[], "", title)
else:
# Modules get an automatic title for the HTML, but no entry in the index.
title = "Module " & extractFilename(changeFileExt(d.filename, ""))
let bodyname = if d.hasToc: "doc.body_toc" else: "doc.body_no_toc"
content = ropeFormatNamedVars(getConfigVar(bodyname), ["title",

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
#
#
# The Nimrod Compiler
# (c) Copyright 2013 Andreas Rumpf
# (c) Copyright 2014 Andreas Rumpf
#
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
@@ -9,7 +9,8 @@
## This module implements the 'implies' relation for guards.
import ast, astalgo, msgs, magicsys, nimsets, trees, types, renderer, idents
import ast, astalgo, msgs, magicsys, nimsets, trees, types, renderer, idents,
saturate
const
someEq = {mEqI, mEqI64, mEqF64, mEqEnum, mEqCh, mEqB, mEqRef, mEqProc,
@@ -25,6 +26,17 @@ const
someIn = {mInRange, mInSet}
someHigh = {mHigh}
# we don't list unsigned here because wrap around semantics suck for
# proving anything:
someAdd = {mAddI, mAddI64, mAddF64, mSucc}
someSub = {mSubI, mSubI64, mSubF64, mPred}
someMul = {mMulI, mMulI64, mMulF64}
someDiv = {mDivI, mDivI64, mDivF64}
someMod = {mModI, mModI64}
someMax = {mMaxI, mMaxI64, mMaxF64}
someMin = {mMinI, mMinI64, mMinF64}
proc isValue(n: PNode): bool = n.kind in {nkCharLit..nkNilLit}
proc isLocation(n: PNode): bool = not n.isValue
@@ -69,19 +81,24 @@ proc isLetLocation(m: PNode, isApprox: bool): bool =
proc interestingCaseExpr*(m: PNode): bool = isLetLocation(m, true)
proc getMagicOp(name: string, m: TMagic): PSym =
proc createMagic*(name: string, m: TMagic): PSym =
result = newSym(skProc, getIdent(name), nil, unknownLineInfo())
result.magic = m
let
opLe = getMagicOp("<=", mLeI)
opLt = getMagicOp("<", mLtI)
opAnd = getMagicOp("and", mAnd)
opOr = getMagicOp("or", mOr)
opNot = getMagicOp("not", mNot)
opIsNil = getMagicOp("isnil", mIsNil)
opContains = getMagicOp("contains", mInSet)
opEq = getMagicOp("==", mEqI)
opLe = createMagic("<=", mLeI)
opLt = createMagic("<", mLtI)
opAnd = createMagic("and", mAnd)
opOr = createMagic("or", mOr)
opNot = createMagic("not", mNot)
opIsNil = createMagic("isnil", mIsNil)
opContains = createMagic("contains", mInSet)
opEq = createMagic("==", mEqI)
opAdd = createMagic("+", mAddI)
opSub = createMagic("-", mSubI)
opMul = createMagic("*", mMulI)
opDiv = createMagic("div", mDivI)
opLen = createMagic("len", mLengthSeq)
proc swapArgs(fact: PNode, newOp: PSym): PNode =
result = newNodeI(nkCall, fact.info, 3)
@@ -137,17 +154,141 @@ proc neg(n: PNode): PNode =
result.sons[0] = newSymNode(opNot)
result.sons[1] = n
proc buildIsNil(arg: PNode): PNode =
result = newNodeI(nkCall, arg.info, 2)
result.sons[0] = newSymNode(opIsNil)
result.sons[1] = arg
proc buildCall(op: PSym; a: PNode): PNode =
result = newNodeI(nkCall, a.info, 2)
result.sons[0] = newSymNode(op)
result.sons[1] = a
proc buildCall(op: PSym; a, b: PNode): PNode =
result = newNodeI(nkInfix, a.info, 3)
result.sons[0] = newSymNode(op)
result.sons[1] = a
result.sons[2] = b
proc `|+|`(a, b: PNode): PNode =
result = copyNode(a)
if a.kind in {nkCharLit..nkUInt64Lit}: result.intVal = a.intVal |+| b.intVal
else: result.floatVal = a.floatVal + b.floatVal
proc `|*|`(a, b: PNode): PNode =
result = copyNode(a)
if a.kind in {nkCharLit..nkUInt64Lit}: result.intVal = a.intVal |*| b.intVal
else: result.floatVal = a.floatVal * b.floatVal
proc negate(a, b, res: PNode): PNode =
if b.kind in {nkCharLit..nkUInt64Lit} and b.intVal != low(BiggestInt):
var b = copyNode(b)
b.intVal = -b.intVal
if a.kind in {nkCharLit..nkUInt64Lit}:
b.intVal = b.intVal |+| a.intVal
result = b
else:
result = buildCall(opAdd, a, b)
elif b.kind in {nkFloatLit..nkFloat64Lit}:
var b = copyNode(b)
b.floatVal = -b.floatVal
result = buildCall(opAdd, a, b)
else:
result = res
proc zero(): PNode = nkIntLit.newIntNode(0)
proc one(): PNode = nkIntLit.newIntNode(1)
proc minusOne(): PNode = nkIntLit.newIntNode(-1)
proc lowBound*(x: PNode): PNode =
result = nkIntLit.newIntNode(firstOrd(x.typ))
result.info = x.info
proc highBound*(x: PNode): PNode =
result = if x.typ.skipTypes(abstractInst).kind == tyArray:
nkIntLit.newIntNode(lastOrd(x.typ))
else:
opAdd.buildCall(opLen.buildCall(x), minusOne())
result.info = x.info
proc reassociation(n: PNode): PNode =
result = n
# (foo+5)+5 --> foo+10; same for '*'
case result.getMagic
of someAdd:
if result[2].isValue and
result[1].getMagic in someAdd and result[1][2].isValue:
result = opAdd.buildCall(result[1][1], result[1][2] |+| result[2])
of someMul:
if result[2].isValue and
result[1].getMagic in someMul and result[1][2].isValue:
result = opAdd.buildCall(result[1][1], result[1][2] |*| result[2])
else: discard
proc canon*(n: PNode): PNode =
# XXX for now only the new code in 'semparallel' uses this
if n.safeLen >= 1:
result = shallowCopy(n)
for i in 0 .. < n.len:
result.sons[i] = canon(n.sons[i])
else:
result = n
case result.getMagic
of someEq, someAdd, someMul, someMin, someMax:
# these are symmetric; put value as last:
if result.sons[1].isValue and not result.sons[2].isValue:
result = swapArgs(result, result.sons[0].sym)
# (4 + foo) + 2 --> (foo + 4) + 2
of someHigh:
# high == len+(-1)
result = opAdd.buildCall(opLen.buildCall(result[1]), minusOne())
of mUnaryMinusI, mUnaryMinusI64:
result = buildCall(opAdd, result[1], newIntNode(nkIntLit, -1))
of someSub:
# x - 4 --> x + (-4)
result = negate(result[1], result[2], result)
of someLen:
result.sons[0] = opLen.newSymNode
else: discard
result = skipConv(result)
result = reassociation(result)
# most important rule: (x-4) < a.len --> x < a.len+4
case result.getMagic
of someLe, someLt:
let x = result[1]
let y = result[2]
if x.kind in nkCallKinds and x.len == 3 and x[2].isValue and
isLetLocation(x[1], true):
case x.getMagic
of someSub:
result = buildCall(result[0].sym, x[1],
reassociation(opAdd.buildCall(y, x[2])))
of someAdd:
# Rule A:
let plus = negate(y, x[2], nil).reassociation
if plus != nil: result = buildCall(result[0].sym, x[1], plus)
else: discard
elif y.kind in nkCallKinds and y.len == 3 and y[2].isValue and
isLetLocation(y[1], true):
# a.len < x-3
case y.getMagic
of someSub:
result = buildCall(result[0].sym, y[1],
reassociation(opAdd.buildCall(x, y[2])))
of someAdd:
let plus = negate(x, y[2], nil).reassociation
# ensure that Rule A will not trigger afterwards with the
# additional 'not isLetLocation' constraint:
if plus != nil and not isLetLocation(x, true):
result = buildCall(result[0].sym, plus, y[1])
else: discard
else: discard
proc `+@`*(a: PNode; b: BiggestInt): PNode =
canon(if b != 0: opAdd.buildCall(a, nkIntLit.newIntNode(b)) else: a)
proc usefulFact(n: PNode): PNode =
case n.getMagic
of someEq:
if skipConv(n.sons[2]).kind == nkNilLit and (
isLetLocation(n.sons[1], false) or isVar(n.sons[1])):
result = buildIsNil(n.sons[1])
result = opIsNil.buildCall(n.sons[1])
else:
if isLetLocation(n.sons[1], true) or isLetLocation(n.sons[2], true):
# XXX algebraic simplifications! 'i-1 < a.len' --> 'i < a.len+1'
@@ -217,7 +358,7 @@ proc addFactNeg*(m: var TModel, n: PNode) =
let n = n.neg
if n != nil: addFact(m, n)
proc sameTree(a, b: PNode): bool =
proc sameTree*(a, b: PNode): bool =
result = false
if a == b:
result = true
@@ -484,7 +625,7 @@ proc factImplies(fact, prop: PNode): TImplication =
# == not a or not b == not (a and b)
let arg = fact.sons[1]
case arg.getMagic
of mIsNil:
of mIsNil, mEqRef:
return ~factImplies(arg, prop)
of mAnd:
# not (a and b) means not a or not b:
@@ -519,7 +660,144 @@ proc doesImply*(facts: TModel, prop: PNode): TImplication =
if result != impUnknown: return
proc impliesNotNil*(facts: TModel, arg: PNode): TImplication =
result = doesImply(facts, buildIsNil(arg).neg)
result = doesImply(facts, opIsNil.buildCall(arg).neg)
proc simpleSlice*(a, b: PNode): BiggestInt =
# returns 'c' if a..b matches (i+c)..(i+c), -1 otherwise. (i)..(i) is matched
# as if it is (i+0)..(i+0).
if guards.sameTree(a, b):
if a.getMagic in someAdd and a[2].kind in {nkCharLit..nkUInt64Lit}:
result = a[2].intVal
else:
result = 0
else:
result = -1
proc pleViaModel(model: TModel; aa, bb: PNode): TImplication
proc ple(m: TModel; a, b: PNode): TImplication =
template `<=?`(a,b): expr = ple(m,a,b) == impYes
# 0 <= 3
if a.isValue and b.isValue:
return if leValue(a, b): impYes else: impNo
# use type information too: x <= 4 iff high(x) <= 4
if b.isValue and a.typ != nil and a.typ.isOrdinalType:
if lastOrd(a.typ) <= b.intVal: return impYes
# 3 <= x iff low(x) <= 3
if a.isValue and b.typ != nil and b.typ.isOrdinalType:
if firstOrd(b.typ) <= a.intVal: return impYes
# x <= x
if sameTree(a, b): return impYes
# 0 <= x.len
if b.getMagic in someLen and a.isValue:
if a.intVal <= 0: return impYes
# x <= y+c if 0 <= c and x <= y
if b.getMagic in someAdd and zero() <=? b[2] and a <=? b[1]: return impYes
# x+c <= y if c <= 0 and x <= y
if a.getMagic in someAdd and a[2] <=? zero() and a[1] <=? b: return impYes
# x <= y*c if 1 <= c and x <= y and 0 <= y
if b.getMagic in someMul:
if a <=? b[1] and one() <=? b[2] and zero() <=? b[1]: return impYes
# x div c <= y if 1 <= c and 0 <= y and x <= y:
if a.getMagic in someDiv:
if one() <=? a[2] and zero() <=? b and a[1] <=? b: return impYes
# slightly subtle:
# x <= max(y, z) iff x <= y or x <= z
# note that 'x <= max(x, z)' is a special case of the above rule
if b.getMagic in someMax:
if a <=? b[1] or a <=? b[2]: return impYes
# min(x, y) <= z iff x <= z or y <= z
if a.getMagic in someMin:
if a[1] <=? b or a[2] <=? b: return impYes
# use the knowledge base:
return pleViaModel(m, a, b)
#return doesImply(m, opLe.buildCall(a, b))
type TReplacements = seq[tuple[a,b: PNode]]
proc replaceSubTree(n, x, by: PNode): PNode =
if sameTree(n, x):
result = by
elif hasSubTree(n, x):
result = shallowCopy(n)
for i in 0 .. safeLen(n)-1:
result.sons[i] = replaceSubTree(n.sons[i], x, by)
else:
result = n
proc applyReplacements(n: PNode; rep: TReplacements): PNode =
result = n
for x in rep: result = result.replaceSubTree(x.a, x.b)
proc pleViaModelRec(m: var TModel; a, b: PNode): TImplication =
# now check for inferrable facts: a <= b and b <= c implies a <= c
for i in 0..m.high:
let fact = m[i]
if fact != nil and fact.getMagic in someLe:
# x <= y implies a <= b if a <= x and y <= b
let x = fact[1]
let y = fact[2]
# mark as used:
m[i] = nil
if ple(m, a, x) == impYes:
if ple(m, y, b) == impYes: return impYes
#if pleViaModelRec(m, y, b): return impYes
# fact: 16 <= i
# x y
# question: i <= 15? no!
result = impliesLe(fact, a, b)
if result != impUnknown: return result
if sameTree(y, a):
result = ple(m, x, b)
if result != impUnknown: return result
proc pleViaModel(model: TModel; aa, bb: PNode): TImplication =
# compute replacements:
var replacements: TReplacements = @[]
for fact in model:
if fact != nil and fact.getMagic in someEq:
let a = fact[1]
let b = fact[2]
if a.kind == nkSym: replacements.add((a,b))
else: replacements.add((b,a))
var m: TModel
var a = aa
var b = bb
if replacements.len > 0:
m = @[]
# make the other facts consistent:
for fact in model:
if fact != nil and fact.getMagic notin someEq:
# XXX 'canon' should not be necessary here, but it is
m.add applyReplacements(fact, replacements).canon
a = applyReplacements(aa, replacements)
b = applyReplacements(bb, replacements)
else:
# we have to make a copy here, because the model will be modified:
m = model
result = pleViaModelRec(m, a, b)
proc proveLe*(m: TModel; a, b: PNode): TImplication =
let x = canon(opLe.buildCall(a, b))
#echo "ROOT ", renderTree(x[1]), " <=? ", renderTree(x[2])
result = ple(m, x[1], x[2])
if result == impUnknown:
# try an alternative: a <= b iff not (b < a) iff not (b+1 <= a):
let y = canon(opLe.buildCall(opAdd.buildCall(b, one()), a))
result = ~ple(m, y[1], y[2])
proc addFactLe*(m: var TModel; a, b: PNode) =
m.add canon(opLe.buildCall(a, b))
proc settype(n: PNode): PType =
result = newType(tySet, n.typ.owner)

View File

@@ -93,7 +93,7 @@ proc rawImportSymbol(c: PContext, s: PSym) =
if hasPattern(s): addPattern(c, s)
proc importSymbol(c: PContext, n: PNode, fromMod: PSym) =
let ident = lookups.considerAcc(n)
let ident = lookups.considerQuotedIdent(n)
let s = strTableGet(fromMod.tab, ident)
if s == nil:
localError(n.info, errUndeclaredIdentifier, ident.s)
@@ -193,7 +193,7 @@ proc evalImportExcept*(c: PContext, n: PNode): PNode =
addDecl(c, m) # add symbol to symbol table of module
var exceptSet = initIntSet()
for i in countup(1, sonsLen(n) - 1):
let ident = lookups.considerAcc(n.sons[i])
let ident = lookups.considerQuotedIdent(n.sons[i])
exceptSet.incl(ident.id)
importAllSymbolsExcept(c, m, exceptSet)
importForwarded(c, m.ast, exceptSet)

View File

@@ -136,18 +136,6 @@ proc mapType(typ: PType): TJSTypeKind =
of tyProc: result = etyProc
of tyCString: result = etyString
proc mangle(name: string): string =
result = ""
for i in countup(0, len(name) - 1):
case name[i]
of 'A'..'Z':
add(result, chr(ord(name[i]) - ord('A') + ord('a')))
of '_':
discard
of 'a'..'z', '0'..'9':
add(result, name[i])
else: add(result, 'X' & toHex(ord(name[i]), 2))
proc mangleName(s: PSym): PRope =
result = s.loc.r
if result == nil:

View File

@@ -347,7 +347,7 @@ proc getNumber(L: var TLexer): TToken =
result.base = base2
while true:
case L.buf[pos]
of 'A'..'Z', 'a'..'z', '2'..'9', '.':
of '2'..'9', '.':
lexMessage(L, errInvalidNumber, result.literal)
inc(pos)
of '_':
@@ -363,7 +363,7 @@ proc getNumber(L: var TLexer): TToken =
result.base = base8
while true:
case L.buf[pos]
of 'A'..'Z', 'a'..'z', '8'..'9', '.':
of '8'..'9', '.':
lexMessage(L, errInvalidNumber, result.literal)
inc(pos)
of '_':
@@ -377,25 +377,22 @@ proc getNumber(L: var TLexer): TToken =
else: break
of 'O':
lexMessage(L, errInvalidNumber, result.literal)
of 'x', 'X':
of 'x', 'X':
result.base = base16
while true:
while true:
case L.buf[pos]
of 'G'..'Z', 'g'..'z':
lexMessage(L, errInvalidNumber, result.literal)
inc(pos)
of '_':
if L.buf[pos+1] notin {'0'..'9', 'a'..'f', 'A'..'F'}:
of '_':
if L.buf[pos+1] notin {'0'..'9', 'a'..'f', 'A'..'F'}:
lexMessage(L, errInvalidToken, "_")
break
inc(pos)
of '0'..'9':
of '0'..'9':
xi = `shl`(xi, 4) or (ord(L.buf[pos]) - ord('0'))
inc(pos)
of 'a'..'f':
of 'a'..'f':
xi = `shl`(xi, 4) or (ord(L.buf[pos]) - ord('a') + 10)
inc(pos)
of 'A'..'F':
of 'A'..'F':
xi = `shl`(xi, 4) or (ord(L.buf[pos]) - ord('A') + 10)
inc(pos)
else: break
@@ -424,8 +421,14 @@ proc getNumber(L: var TLexer): TToken =
if (result.iNumber < low(int32)) or (result.iNumber > high(int32)):
if result.tokType == tkIntLit:
result.tokType = tkInt64Lit
elif result.tokType in {tkInt8Lit, tkInt16Lit}:
lexMessage(L, errInvalidNumber, result.literal)
elif result.tokType in {tkInt8Lit, tkInt16Lit, tkInt32Lit}:
lexMessage(L, errNumberOutOfRange, result.literal)
elif result.tokType == tkInt8Lit and
(result.iNumber < int8.low or result.iNumber > int8.high):
lexMessage(L, errNumberOutOfRange, result.literal)
elif result.tokType == tkInt16Lit and
(result.iNumber < int16.low or result.iNumber > int16.high):
lexMessage(L, errNumberOutOfRange, result.literal)
except EInvalidValue:
lexMessage(L, errInvalidNumber, result.literal)
except EOverflow, EOutOfRange:
@@ -537,6 +540,10 @@ proc getString(L: var TLexer, tok: var TToken, rawMode: bool) =
tok.tokType = tkTripleStrLit # long string literal:
inc(pos, 2) # skip ""
# skip leading newline:
if buf[pos] in {' ', '\t'}:
var newpos = pos+1
while buf[newpos] in {' ', '\t'}: inc newpos
if buf[newpos] in {CR, LF}: pos = newpos
pos = handleCRLF(L, pos)
buf = L.buf
while true:

View File

@@ -82,6 +82,9 @@ when not defined(readLineFromStdin):
proc readLineFromStdin(prompt: string, line: var string): bool =
stdout.write(prompt)
result = readLine(stdin, line)
if not result:
stdout.write("\n")
quit(0)
proc endsWith*(x: string, s: set[char]): bool =
var i = x.len-1

View File

@@ -15,14 +15,15 @@ import
proc ensureNoMissingOrUnusedSymbols(scope: PScope)
proc considerAcc*(n: PNode): PIdent =
proc considerQuotedIdent*(n: PNode): PIdent =
## Retrieve a PIdent from a PNode, taking into account accent nodes.
case n.kind
of nkIdent: result = n.ident
of nkSym: result = n.sym.name
of nkAccQuoted:
case n.len
of 0: globalError(n.info, errIdentifierExpected, renderTree(n))
of 1: result = considerAcc(n.sons[0])
of 1: result = considerQuotedIdent(n.sons[0])
else:
var id = ""
for i in 0.. <n.len:
@@ -82,10 +83,10 @@ proc searchInScopes*(c: PContext, s: PIdent, filter: TSymKinds): PSym =
proc errorSym*(c: PContext, n: PNode): PSym =
## creates an error symbol to avoid cascading errors (for IDE support)
var m = n
# ensure that 'considerAcc' can't fail:
# ensure that 'considerQuotedIdent' can't fail:
if m.kind == nkDotExpr: m = m.sons[1]
let ident = if m.kind in {nkIdent, nkSym, nkAccQuoted}:
considerAcc(m)
considerQuotedIdent(m)
else:
getIdent("err:" & renderTree(m))
result = newSym(skError, ident, getCurrOwner(), n.info)
@@ -189,7 +190,7 @@ proc lookUp*(c: PContext, n: PNode): PSym =
of nkSym:
result = n.sym
of nkAccQuoted:
var ident = considerAcc(n)
var ident = considerQuotedIdent(n)
result = searchInScopes(c, ident)
if result == nil:
localError(n.info, errUndeclaredIdentifier, ident.s)
@@ -208,7 +209,7 @@ type
proc qualifiedLookUp*(c: PContext, n: PNode, flags = {checkUndeclared}): PSym =
case n.kind
of nkIdent, nkAccQuoted:
var ident = considerAcc(n)
var ident = considerQuotedIdent(n)
result = searchInScopes(c, ident)
if result == nil and checkUndeclared in flags:
localError(n.info, errUndeclaredIdentifier, ident.s)
@@ -228,7 +229,7 @@ proc qualifiedLookUp*(c: PContext, n: PNode, flags = {checkUndeclared}): PSym =
if n.sons[1].kind == nkIdent:
ident = n.sons[1].ident
elif n.sons[1].kind == nkAccQuoted:
ident = considerAcc(n.sons[1])
ident = considerQuotedIdent(n.sons[1])
if ident != nil:
if m == c.module:
result = strTableGet(c.topLevelScope.symbols, ident)
@@ -251,7 +252,7 @@ proc qualifiedLookUp*(c: PContext, n: PNode, flags = {checkUndeclared}): PSym =
proc initOverloadIter*(o: var TOverloadIter, c: PContext, n: PNode): PSym =
case n.kind
of nkIdent, nkAccQuoted:
var ident = considerAcc(n)
var ident = considerQuotedIdent(n)
o.scope = c.currentScope
o.mode = oimNoQualifier
while true:
@@ -272,7 +273,7 @@ proc initOverloadIter*(o: var TOverloadIter, c: PContext, n: PNode): PSym =
if n.sons[1].kind == nkIdent:
ident = n.sons[1].ident
elif n.sons[1].kind == nkAccQuoted:
ident = considerAcc(n.sons[1])
ident = considerQuotedIdent(n.sons[1])
if ident != nil:
if o.m == c.module:
# a module may access its private members:
@@ -354,5 +355,5 @@ when false:
if sfImmediate in a.flags: return a
a = nextOverloadIter(o, c, n)
if result == nil and checkUndeclared in flags:
localError(n.info, errUndeclaredIdentifier, n.considerAcc.s)
localError(n.info, errUndeclaredIdentifier, n.considerQuotedIdent.s)
result = errorSym(c, n)

View File

@@ -13,6 +13,8 @@ const
genPrefix* = ":tmp" # prefix for generated names
import ast, astalgo, types, idents, magicsys, msgs, options
from guards import createMagic
from trees import getMagic
proc newTupleAccess*(tup: PNode, i: int): PNode =
result = newNodeIT(nkBracketExpr, tup.info, tup.typ.skipTypes(
@@ -68,6 +70,7 @@ proc addField*(obj: PType; s: PSym) =
var field = newSym(skField, getIdent(s.name.s & $s.id), s.owner, s.info)
let t = skipIntLit(s.typ)
field.typ = t
assert t.kind != tyStmt
field.position = sonsLen(obj.n)
addSon(obj.n, newSymNode(field))
@@ -79,19 +82,30 @@ proc newDotExpr(obj, b: PSym): PNode =
addSon(result, newSymNode(field))
result.typ = field.typ
proc indirectAccess*(a: PNode, b: PSym, info: TLineInfo): PNode =
proc indirectAccess*(a: PNode, b: string, info: TLineInfo): PNode =
# returns a[].b as a node
var deref = newNodeI(nkHiddenDeref, info)
deref.typ = a.typ.sons[0]
assert deref.typ.kind == tyObject
let field = getSymFromList(deref.typ.n, getIdent(b.name.s & $b.id))
assert field != nil, b.name.s
deref.typ = a.typ.skipTypes(abstractInst).sons[0]
var t = deref.typ.skipTypes(abstractInst)
var field: PSym
while true:
assert t.kind == tyObject
field = getSymFromList(t.n, getIdent(b))
if field != nil: break
t = t.sons[0]
if t == nil: break
t = t.skipTypes(abstractInst)
assert field != nil, b
addSon(deref, a)
result = newNodeI(nkDotExpr, info)
addSon(result, deref)
addSon(result, newSymNode(field))
result.typ = field.typ
proc indirectAccess*(a: PNode, b: PSym, info: TLineInfo): PNode =
# returns a[].b as a node
result = indirectAccess(a, b.name.s & $b.id, info)
proc indirectAccess*(a, b: PSym, info: TLineInfo): PNode =
result = indirectAccess(newSymNode(a), b, info)
@@ -101,6 +115,11 @@ proc genAddrOf*(n: PNode): PNode =
result.typ = newType(tyPtr, n.typ.owner)
result.typ.rawAddSon(n.typ)
proc genDeref*(n: PNode): PNode =
result = newNodeIT(nkHiddenDeref, n.info,
n.typ.skipTypes(abstractInst).sons[0])
result.add n
proc callCodegenProc*(name: string, arg1: PNode;
arg2, arg3: PNode = nil): PNode =
result = newNodeI(nkCall, arg1.info)
@@ -112,13 +131,120 @@ proc callCodegenProc*(name: string, arg1: PNode;
result.add arg1
if arg2 != nil: result.add arg2
if arg3 != nil: result.add arg3
result.typ = sym.typ.sons[0]
proc callProc(a: PNode): PNode =
result = newNodeI(nkCall, a.info)
result.add a
result.typ = a.typ.sons[0]
# we have 4 cases to consider:
# - a void proc --> nothing to do
# - a proc returning GC'ed memory --> requires a flowVar
# - a proc returning non GC'ed memory --> pass as hidden 'var' parameter
# - not in a parallel environment --> requires a flowVar for memory safety
type
TSpawnResult = enum
srVoid, srFlowVar, srByVar
TFlowVarKind = enum
fvInvalid # invalid type T for 'FlowVar[T]'
fvGC # FlowVar of a GC'ed type
fvBlob # FlowVar of a blob type
proc spawnResult(t: PType; inParallel: bool): TSpawnResult =
if t.isEmptyType: srVoid
elif inParallel and not containsGarbageCollectedRef(t): srByVar
else: srFlowVar
proc flowVarKind(t: PType): TFlowVarKind =
if t.skipTypes(abstractInst).kind in {tyRef, tyString, tySequence}: fvGC
elif containsGarbageCollectedRef(t): fvInvalid
else: fvBlob
proc addLocalVar(varSection: PNode; owner: PSym; typ: PType; v: PNode): PSym =
result = newSym(skTemp, getIdent(genPrefix), owner, varSection.info)
result.typ = typ
incl(result.flags, sfFromGeneric)
var vpart = newNodeI(nkIdentDefs, varSection.info, 3)
vpart.sons[0] = newSymNode(result)
vpart.sons[1] = ast.emptyNode
vpart.sons[2] = v
varSection.add vpart
discard """
We generate roughly this:
proc f_wrapper(thread, args) =
barrierEnter(args.barrier) # for parallel statement
var a = args.a # thread transfer; deepCopy or shallowCopy or no copy
# depending on whether we're in a 'parallel' statement
var b = args.b
var fv = args.fv
fv.owner = thread # optional
nimArgsPassingDone() # signal parent that the work is done
#
args.fv.blob = f(a, b, ...)
nimFlowVarSignal(args.fv)
# - or -
f(a, b, ...)
barrierLeave(args.barrier) # for parallel statement
stmtList:
var scratchObj
scratchObj.a = a
scratchObj.b = b
nimSpawn(f_wrapper, addr scratchObj)
scratchObj.fv # optional
"""
proc createWrapperProc(f: PNode; threadParam, argsParam: PSym;
varSection, call: PNode): PSym =
varSection, call, barrier, fv: PNode;
spawnKind: TSpawnResult): PSym =
var body = newNodeI(nkStmtList, f.info)
var threadLocalBarrier: PSym
if barrier != nil:
var varSection = newNodeI(nkVarSection, barrier.info)
threadLocalBarrier = addLocalVar(varSection, argsParam.owner,
barrier.typ, barrier)
body.add varSection
body.add callCodeGenProc("barrierEnter", threadLocalBarrier.newSymNode)
var threadLocalProm: PSym
if spawnKind == srByVar:
threadLocalProm = addLocalVar(varSection, argsParam.owner, fv.typ, fv)
elif fv != nil:
internalAssert fv.typ.kind == tyGenericInst
threadLocalProm = addLocalVar(varSection, argsParam.owner, fv.typ, fv)
body.add varSection
body.add callCodeGenProc("nimArgsPassingDone", newSymNode(threadParam))
body.add call
if fv != nil and spawnKind != srByVar:
# generate:
# fv.owner = threadParam
body.add newAsgnStmt(indirectAccess(threadLocalProm.newSymNode,
"owner", fv.info), threadParam.newSymNode)
body.add callCodeGenProc("nimArgsPassingDone", threadParam.newSymNode)
if spawnKind == srByVar:
body.add newAsgnStmt(genDeref(threadLocalProm.newSymNode), call)
elif fv != nil:
let fk = fv.typ.sons[1].flowVarKind
if fk == fvInvalid:
localError(f.info, "cannot create a flowVar of type: " &
typeToString(fv.typ.sons[1]))
body.add newAsgnStmt(indirectAccess(threadLocalProm.newSymNode,
if fk == fvGC: "data" else: "blob", fv.info), call)
if barrier == nil:
# by now 'fv' is shared and thus might have beeen overwritten! we need
# to use the thread-local view instead:
body.add callCodeGenProc("nimFlowVarSignal", threadLocalProm.newSymNode)
else:
body.add call
if barrier != nil:
body.add callCodeGenProc("barrierLeave", threadLocalBarrier.newSymNode)
var params = newNodeI(nkFormalParams, f.info)
params.add emptyNode
@@ -146,10 +272,152 @@ proc createCastExpr(argsParam: PSym; objType: PType): PNode =
result.typ = newType(tyPtr, objType.owner)
result.typ.rawAddSon(objType)
proc wrapProcForSpawn*(owner: PSym; n: PNode): PNode =
result = newNodeI(nkStmtList, n.info)
if n.kind notin nkCallKinds or not n.typ.isEmptyType:
localError(n.info, "'spawn' takes a call expression of type void")
proc setupArgsForConcurrency(n: PNode; objType: PType; scratchObj: PSym,
castExpr, call, varSection, result: PNode) =
let formals = n[0].typ.n
let tmpName = getIdent(genPrefix)
for i in 1 .. <n.len:
# we pick n's type here, which hopefully is 'tyArray' and not
# 'tyOpenArray':
var argType = n[i].typ.skipTypes(abstractInst)
if i < formals.len and formals[i].typ.kind == tyVar:
localError(n[i].info, "'spawn'ed function cannot have a 'var' parameter")
elif containsTyRef(argType):
localError(n[i].info, "'spawn'ed function cannot refer to 'ref'/closure")
let fieldname = if i < formals.len: formals[i].sym.name else: tmpName
var field = newSym(skField, fieldname, objType.owner, n.info)
field.typ = argType
objType.addField(field)
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), n[i])
let temp = addLocalVar(varSection, objType.owner, argType,
indirectAccess(castExpr, field, n.info))
call.add(newSymNode(temp))
proc getRoot*(n: PNode): PSym =
## ``getRoot`` takes a *path* ``n``. A path is an lvalue expression
## like ``obj.x[i].y``. The *root* of a path is the symbol that can be
## determined as the owner; ``obj`` in the example.
case n.kind
of nkSym:
if n.sym.kind in {skVar, skResult, skTemp, skLet, skForVar}:
result = n.sym
of nkDotExpr, nkBracketExpr, nkHiddenDeref, nkDerefExpr,
nkObjUpConv, nkObjDownConv, nkCheckedFieldExpr:
result = getRoot(n.sons[0])
of nkHiddenStdConv, nkHiddenSubConv, nkConv:
result = getRoot(n.sons[1])
of nkCallKinds:
if getMagic(n) == mSlice: result = getRoot(n.sons[1])
else: discard
proc newIntLit(value: BiggestInt): PNode =
result = nkIntLit.newIntNode(value)
result.typ = getSysType(tyInt)
proc genHigh(n: PNode): PNode =
if skipTypes(n.typ, abstractVar).kind in {tyArrayConstr, tyArray}:
result = newIntLit(lastOrd(skipTypes(n.typ, abstractVar)))
else:
result = newNodeI(nkCall, n.info, 2)
result.typ = getSysType(tyInt)
result.sons[0] = newSymNode(createMagic("high", mHigh))
result.sons[1] = n
proc setupArgsForParallelism(n: PNode; objType: PType; scratchObj: PSym;
castExpr, call, varSection, result: PNode) =
let formals = n[0].typ.n
let tmpName = getIdent(genPrefix)
# we need to copy the foreign scratch object fields into local variables
# for correctness: These are called 'threadLocal' here.
for i in 1 .. <n.len:
let n = n[i]
let argType = skipTypes(if i < formals.len: formals[i].typ else: n.typ,
abstractInst)
if containsTyRef(argType):
localError(n.info, "'spawn'ed function cannot refer to 'ref'/closure")
let fieldname = if i < formals.len: formals[i].sym.name else: tmpName
var field = newSym(skField, fieldname, objType.owner, n.info)
if argType.kind in {tyVarargs, tyOpenArray}:
# important special case: we always create a zero-copy slice:
let slice = newNodeI(nkCall, n.info, 4)
slice.typ = n.typ
slice.sons[0] = newSymNode(createMagic("slice", mSlice))
var fieldB = newSym(skField, tmpName, objType.owner, n.info)
fieldB.typ = getSysType(tyInt)
objType.addField(fieldB)
if getMagic(n) == mSlice:
let a = genAddrOf(n[1])
field.typ = a.typ
objType.addField(field)
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), a)
var fieldA = newSym(skField, tmpName, objType.owner, n.info)
fieldA.typ = getSysType(tyInt)
objType.addField(fieldA)
result.add newFastAsgnStmt(newDotExpr(scratchObj, fieldA), n[2])
result.add newFastAsgnStmt(newDotExpr(scratchObj, fieldB), n[3])
let threadLocal = addLocalVar(varSection, objType.owner, fieldA.typ,
indirectAccess(castExpr, fieldA, n.info))
slice.sons[2] = threadLocal.newSymNode
else:
let a = genAddrOf(n)
field.typ = a.typ
objType.addField(field)
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), a)
result.add newFastAsgnStmt(newDotExpr(scratchObj, fieldB), genHigh(n))
slice.sons[2] = newIntLit(0)
# the array itself does not need to go through a thread local variable:
slice.sons[1] = genDeref(indirectAccess(castExpr, field, n.info))
let threadLocal = addLocalVar(varSection, objType.owner, fieldB.typ,
indirectAccess(castExpr, fieldB, n.info))
slice.sons[3] = threadLocal.newSymNode
call.add slice
elif (let size = computeSize(argType); size < 0 or size > 16) and
n.getRoot != nil:
# it is more efficient to pass a pointer instead:
let a = genAddrOf(n)
field.typ = a.typ
objType.addField(field)
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), a)
let threadLocal = addLocalVar(varSection, objType.owner, field.typ,
indirectAccess(castExpr, field, n.info))
call.add(genDeref(threadLocal.newSymNode))
else:
# boring case
field.typ = argType
objType.addField(field)
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), n)
let threadLocal = addLocalVar(varSection, objType.owner, field.typ,
indirectAccess(castExpr, field, n.info))
call.add(threadLocal.newSymNode)
proc wrapProcForSpawn*(owner: PSym; spawnExpr: PNode; retType: PType;
barrier, dest: PNode = nil): PNode =
# if 'barrier' != nil, then it is in a 'parallel' section and we
# generate quite different code
let n = spawnExpr[1]
let spawnKind = spawnResult(retType, barrier!=nil)
case spawnKind
of srVoid:
internalAssert dest == nil
result = newNodeI(nkStmtList, n.info)
of srFlowVar:
internalAssert dest == nil
result = newNodeIT(nkStmtListExpr, n.info, retType)
of srByVar:
if dest == nil: localError(n.info, "'spawn' must not be discarded")
result = newNodeI(nkStmtList, n.info)
if n.kind notin nkCallKinds:
localError(n.info, "'spawn' takes a call expression")
return
if optThreadAnalysis in gGlobalOptions:
if {tfThread, tfNoSideEffect} * n[0].typ.flags == {}:
@@ -162,6 +430,7 @@ proc wrapProcForSpawn*(owner: PSym; n: PNode): PNode =
threadParam.typ = ptrType
argsParam.typ = ptrType
argsParam.position = 1
var objType = createObj(owner, n.info)
incl(objType.flags, tfFinal)
let castExpr = createCastExpr(argsParam, objType)
@@ -174,7 +443,7 @@ proc wrapProcForSpawn*(owner: PSym; n: PNode): PNode =
varSectionB.addVar(scratchObj.newSymNode)
result.add varSectionB
var call = newNodeI(nkCall, n.info)
var call = newNodeIT(nkCall, n.info, n.typ)
var fn = n.sons[0]
# templates and macros are in fact valid here due to the nature of
# the transformation:
@@ -194,35 +463,44 @@ proc wrapProcForSpawn*(owner: PSym; n: PNode): PNode =
call.add(fn)
var varSection = newNodeI(nkVarSection, n.info)
let formals = n[0].typ.n
let tmpName = getIdent(genPrefix)
for i in 1 .. <n.len:
# we pick n's type here, which hopefully is 'tyArray' and not
# 'tyOpenArray':
var argType = n[i].typ.skipTypes(abstractInst)
if i < formals.len and formals[i].typ.kind == tyVar:
localError(n[i].info, "'spawn'ed function cannot have a 'var' parameter")
elif containsTyRef(argType):
localError(n[i].info, "'spawn'ed function cannot refer to 'ref'/closure")
if barrier.isNil:
setupArgsForConcurrency(n, objType, scratchObj, castExpr, call, varSection, result)
else:
setupArgsForParallelism(n, objType, scratchObj, castExpr, call, varSection, result)
let fieldname = if i < formals.len: formals[i].sym.name else: tmpName
var field = newSym(skField, fieldname, owner, n.info)
field.typ = argType
var barrierAsExpr: PNode = nil
if barrier != nil:
let typ = newType(tyPtr, owner)
typ.rawAddSon(magicsys.getCompilerProc("Barrier").typ)
var field = newSym(skField, getIdent"barrier", owner, n.info)
field.typ = typ
objType.addField(field)
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), n[i])
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), barrier)
barrierAsExpr = indirectAccess(castExpr, field, n.info)
var temp = newSym(skTemp, tmpName, owner, n.info)
temp.typ = argType
incl(temp.flags, sfFromGeneric)
var fvField, fvAsExpr: PNode = nil
if spawnKind == srFlowVar:
var field = newSym(skField, getIdent"fv", owner, n.info)
field.typ = retType
objType.addField(field)
fvField = newDotExpr(scratchObj, field)
fvAsExpr = indirectAccess(castExpr, field, n.info)
# create flowVar:
result.add newFastAsgnStmt(fvField, callProc(spawnExpr[2]))
if barrier == nil:
result.add callCodeGenProc("nimFlowVarCreateCondVar", fvField)
var vpart = newNodeI(nkIdentDefs, n.info, 3)
vpart.sons[0] = newSymNode(temp)
vpart.sons[1] = ast.emptyNode
vpart.sons[2] = indirectAccess(castExpr, field, n.info)
varSection.add vpart
elif spawnKind == srByVar:
var field = newSym(skField, getIdent"fv", owner, n.info)
field.typ = newType(tyPtr, objType.owner)
field.typ.rawAddSon(retType)
objType.addField(field)
fvAsExpr = indirectAccess(castExpr, field, n.info)
result.add newFastAsgnStmt(newDotExpr(scratchObj, field), genAddrOf(dest))
call.add(newSymNode(temp))
let wrapper = createWrapperProc(fn, threadParam, argsParam, varSection, call)
let wrapper = createWrapperProc(fn, threadParam, argsParam, varSection, call,
barrierAsExpr, fvAsExpr, spawnKind)
result.add callCodeGenProc("nimSpawn", wrapper.newSymNode,
genAddrOf(scratchObj.newSymNode))
if spawnKind == srFlowVar: result.add fvField

View File

@@ -69,6 +69,7 @@ proc commandCompileToC =
# echo "BEFORE CHECK DEP"
# discard checkDepMem(gProjectMainIdx)
# echo "CHECK DEP COMPLETE"
discard
compileProject()
cgenWriteModules()
@@ -283,11 +284,11 @@ proc resetMemory =
echo GC_getStatistics()
const
SimiluateCaasMemReset = false
SimulateCaasMemReset = false
PrintRopeCacheStats = false
proc mainCommand* =
when SimiluateCaasMemReset:
when SimulateCaasMemReset:
gGlobalOptions.incl(optCaasEnabled)
# In "nimrod serve" scenario, each command must reset the registered passes
@@ -309,7 +310,7 @@ proc mainCommand* =
of "cpp", "compiletocpp":
extccomp.cExt = ".cpp"
gCmd = cmdCompileToCpp
if cCompiler == ccGcc: setCC("gpp")
if cCompiler == ccGcc: setCC("gcc")
wantMainModule()
defineSymbol("cpp")
commandCompileToC()
@@ -453,6 +454,6 @@ proc mainCommand* =
echo " efficiency: ", formatFloat(1-(gCacheMisses.float/gCacheTries.float),
ffDecimal, 3)
when SimiluateCaasMemReset:
when SimulateCaasMemReset:
resetMemory()

View File

@@ -115,7 +115,7 @@ proc newModule(fileIdx: int32): PSym =
new(result)
result.id = - 1 # for better error checking
result.kind = skModule
let filename = fileIdx.toFilename
let filename = fileIdx.toFullPath
result.name = getIdent(splitFile(filename).name)
if not isNimrodIdentifier(result.name.s):
rawMessage(errInvalidModuleName, result.name.s)

View File

@@ -116,7 +116,7 @@ type
warnSmallLshouldNotBeUsed, warnUnknownMagic, warnRedefinitionOfLabel,
warnUnknownSubstitutionX, warnLanguageXNotSupported, warnCommentXIgnored,
warnNilStatement, warnAnalysisLoophole,
warnDifferentHeaps, warnWriteToForeignHeap, warnImplicitClosure,
warnDifferentHeaps, warnWriteToForeignHeap, warnUnsafeCode,
warnEachIdentIsTuple, warnShadowIdent,
warnProveInit, warnProveField, warnProveIndex, warnGcUnsafe, warnGcUnsafe2,
warnUninit, warnGcMem, warnUser,
@@ -380,7 +380,7 @@ const
warnAnalysisLoophole: "thread analysis incomplete due to unknown call '$1' [AnalysisLoophole]",
warnDifferentHeaps: "possible inconsistency of thread local heaps [DifferentHeaps]",
warnWriteToForeignHeap: "write to foreign heap [WriteToForeignHeap]",
warnImplicitClosure: "implicit closure convention: '$1' [ImplicitClosure]",
warnUnsafeCode: "unsafe code: '$1' [UnsafeCode]",
warnEachIdentIsTuple: "each identifier is a tuple [EachIdentIsTuple]",
warnShadowIdent: "shadowed identifier: '$1' [ShadowIdent]",
warnProveInit: "Cannot prove that '$1' is initialized. This will become a compile time error in the future. [ProveInit]",
@@ -416,7 +416,7 @@ const
"RedefinitionOfLabel", "UnknownSubstitutionX", "LanguageXNotSupported",
"CommentXIgnored", "NilStmt",
"AnalysisLoophole", "DifferentHeaps", "WriteToForeignHeap",
"ImplicitClosure", "EachIdentIsTuple", "ShadowIdent",
"UnsafeCode", "EachIdentIsTuple", "ShadowIdent",
"ProveInit", "ProveField", "ProveIndex", "GcUnsafe", "GcUnsafe2", "Uninit",
"GcMem", "User"]

View File

@@ -3,7 +3,7 @@ Name: "Nimrod"
Version: "$version"
Platforms: """
windows: i386;amd64
linux: i386;amd64;powerpc64;arm;sparc;mips
linux: i386;amd64;powerpc64;arm;sparc;mips;powerpc
macosx: i386;amd64;powerpc64
solaris: i386;amd64;sparc
freebsd: i386;amd64
@@ -79,6 +79,7 @@ Files: "lib/system/*.nim"
Files: "lib/core/*.nim"
Files: "lib/pure/*.nim"
Files: "lib/pure/collections/*.nim"
Files: "lib/pure/concurrency/*.nim"
Files: "lib/impure/*.nim"
Files: "lib/wrappers/*.nim"

View File

@@ -12,10 +12,9 @@
const
MaxSetElements* = 1 shl 16 # (2^16) to support unicode character sets?
defaultAsmMarkerSymbol* = '!'
VersionMajor* = 0
VersionMinor* = 9
VersionPatch* = 4
VersionPatch* = 5
VersionAsString* = $VersionMajor & "." & $VersionMinor & "." & $VersionPatch
RodFileVersion* = "1215" # modify this if the rod-format changes!

View File

@@ -16,7 +16,7 @@ const
hasFFI* = defined(useFFI)
newScopeForIf* = true
useCaas* = not defined(noCaas)
noTimeMachine = defined(avoidTimeMachine) and defined(macosx)
noTimeMachine* = defined(avoidTimeMachine) and defined(macosx)
type # please make sure we have under 32 options
# (improves code efficiency a lot!)

View File

@@ -10,7 +10,7 @@
## This module implements the pattern matching features for term rewriting
## macro support.
import strutils, ast, astalgo, types, msgs, idents, renderer, wordrecg
import strutils, ast, astalgo, types, msgs, idents, renderer, wordrecg, trees
# we precompile the pattern here for efficiency into some internal
# stack based VM :-) Why? Because it's fun; I did no benchmarks to see if that
@@ -215,6 +215,9 @@ proc isAssignable*(owner: PSym, n: PNode): TAssignableResult =
result = arLValue
of nkObjUpConv, nkObjDownConv, nkCheckedFieldExpr:
result = isAssignable(owner, n.sons[0])
of nkCallKinds:
# builtin slice keeps lvalue-ness:
if getMagic(n) == mSlice: result = isAssignable(owner, n.sons[1])
else:
discard

View File

@@ -28,25 +28,20 @@ import
llstream, lexer, idents, strutils, ast, astalgo, msgs
type
TParser*{.final.} = object # a TParser object represents a module that
TParser*{.final.} = object # A TParser object represents a module that
# is being parsed
currInd: int # current indentation
firstTok, strongSpaces: bool
lex*: TLexer # the lexer that is used for parsing
tok*: TToken # the current token
inPragma: int
currInd: int # current indentation level
firstTok, strongSpaces: bool # Has the first token been read?
# Is strongSpaces on?
lex*: TLexer # The lexer that is used for parsing
tok*: TToken # The current token
inPragma: int # Pragma level
inSemiStmtList: int
proc parseAll*(p: var TParser): PNode
proc closeParser*(p: var TParser)
proc parseTopLevelStmt*(p: var TParser): PNode
# implements an iterator. Returns the next top-level statement or
# emtyNode if end of stream.
proc parseString*(s: string, filename: string = "", line: int = 0): PNode
# filename and line could be set optionally, when the string originates
# from a certain source file. This way, the compiler could generate
# correct error messages referring to the original source.
# helpers for the other parsers
proc isOperator*(tok: TToken): bool
@@ -68,15 +63,19 @@ proc optInd*(p: var TParser, n: PNode)
proc indAndComment*(p: var TParser, n: PNode)
proc setBaseFlags*(n: PNode, base: TNumericalBase)
proc parseSymbol*(p: var TParser, allowNil = false): PNode
proc parseTry(p: var TParser): PNode
proc parseTry(p: var TParser; isExpr: bool): PNode
proc parseCase(p: var TParser): PNode
# implementation
proc getTok(p: var TParser) =
proc getTok(p: var TParser) =
## Get the next token from the parser's lexer, and store it in the parser's
## `tok` member.
rawGetTok(p.lex, p.tok)
proc openParser*(p: var TParser, fileIdx: int32, inputStream: PLLStream,
strongSpaces=false) =
## Open a parser, using the given arguments to set up its internal state.
##
initToken(p.tok)
openLexer(p.lex, fileIdx, inputStream)
getTok(p) # read the first token
@@ -87,13 +86,16 @@ proc openParser*(p: var TParser, filename: string, inputStream: PLLStream,
strongSpaces=false) =
openParser(p, filename.fileInfoIdx, inputstream, strongSpaces)
proc closeParser(p: var TParser) =
proc closeParser(p: var TParser) =
## Close a parser, freeing up its resources.
closeLexer(p.lex)
proc parMessage(p: TParser, msg: TMsgKind, arg: string = "") =
proc parMessage(p: TParser, msg: TMsgKind, arg = "") =
## Produce and emit the parser message `arg` to output.
lexMessage(p.lex, msg, arg)
proc parMessage(p: TParser, msg: TMsgKind, tok: TToken) =
proc parMessage(p: TParser, msg: TMsgKind, tok: TToken) =
## Produce and emit a parser message to output about the token `tok`
lexMessage(p.lex, msg, prettyTok(tok))
template withInd(p: expr, body: stmt) {.immediate.} =
@@ -143,10 +145,15 @@ proc expectIdent(p: TParser) =
lexMessage(p.lex, errIdentifierExpected, prettyTok(p.tok))
proc eat(p: var TParser, tokType: TTokType) =
if p.tok.tokType == tokType: getTok(p)
else: lexMessage(p.lex, errTokenExpected, TokTypeToStr[tokType])
## Move the parser to the next token if the current token is of type
## `tokType`, otherwise error.
if p.tok.tokType == tokType:
getTok(p)
else:
lexMessage(p.lex, errTokenExpected, TokTypeToStr[tokType])
proc parLineInfo(p: TParser): TLineInfo =
## Retrieve the line information associated with the parser's current state.
result = getLineInfo(p.lex, p.tok)
proc indAndComment(p: var TParser, n: PNode) =
@@ -192,9 +199,11 @@ proc isSigilLike(tok: TToken): bool {.inline.} =
result = tok.tokType == tkOpr and relevantOprChar(tok.ident) == '@'
proc isLeftAssociative(tok: TToken): bool {.inline.} =
## Determines whether the token is left assocative.
result = tok.tokType != tkOpr or relevantOprChar(tok.ident) != '^'
proc getPrecedence(tok: TToken, strongSpaces: bool): int =
## Calculates the precedence of the given token.
template considerStrongSpaces(x): expr =
x + (if strongSpaces: 100 - tok.strongSpaceA.int*10 else: 0)
@@ -224,22 +233,26 @@ proc getPrecedence(tok: TToken, strongSpaces: bool): int =
else: result = -10
proc isOperator(tok: TToken): bool =
## Determines if the given token is an operator type token.
tok.tokType in {tkOpr, tkDiv, tkMod, tkShl, tkShr, tkIn, tkNotin, tkIs,
tkIsnot, tkNot, tkOf, tkAs, tkDotDot, tkAnd, tkOr, tkXor}
proc isUnary(p: TParser): bool =
## Check if the current parser token is a unary operator
p.strongSpaces and p.tok.tokType in {tkOpr, tkDotDot} and
p.tok.strongSpaceB == 0 and
p.tok.strongSpaceA > 0
proc checkBinary(p: TParser) {.inline.} =
## Check if the current parser token is a binary operator.
# we don't check '..' here as that's too annoying
if p.strongSpaces and p.tok.tokType == tkOpr:
if p.tok.strongSpaceB > 0 and p.tok.strongSpaceA != p.tok.strongSpaceB:
parMessage(p, errGenerated, "number of spaces around '$#' not consistent"%
prettyTok(p.tok))
parMessage(p, errGenerated,
"Number of spaces around '$#' not consistent" %
prettyTok(p.tok))
elif p.tok.strongSpaceA notin {0,1,2,4,8}:
parMessage(p, errGenerated, "number of spaces must be 0,1,2,4 or 8")
parMessage(p, errGenerated, "Number of spaces must be 0,1,2,4 or 8")
#| module = stmt ^* (';' / IND{=})
#|
@@ -274,7 +287,7 @@ proc colcom(p: var TParser, n: PNode) =
skipComment(p, n)
proc parseSymbol(p: var TParser, allowNil = false): PNode =
#| symbol = '`' (KEYW|IDENT|operator|'(' ')'|'[' ']'|'{' '}'|'='|literal)+ '`'
#| symbol = '`' (KEYW|IDENT|literal|(operator|'('|')'|'['|']'|'{'|'}'|'=')+)+ '`'
#| | IDENT
case p.tok.tokType
of tkSymbol:
@@ -285,31 +298,22 @@ proc parseSymbol(p: var TParser, allowNil = false): PNode =
getTok(p)
while true:
case p.tok.tokType
of tkBracketLe:
add(result, newIdentNodeP(getIdent"[]", p))
getTok(p)
eat(p, tkBracketRi)
of tkEquals:
add(result, newIdentNodeP(getIdent"=", p))
getTok(p)
of tkParLe:
add(result, newIdentNodeP(getIdent"()", p))
getTok(p)
eat(p, tkParRi)
of tkCurlyLe:
add(result, newIdentNodeP(getIdent"{}", p))
getTok(p)
eat(p, tkCurlyRi)
of tokKeywordLow..tokKeywordHigh, tkSymbol, tkOpr, tkDot, tkDotDot:
add(result, newIdentNodeP(p.tok.ident, p))
getTok(p)
of tkIntLit..tkCharLit:
add(result, newIdentNodeP(getIdent(tokToStr(p.tok)), p))
getTok(p)
else:
of tkAccent:
if result.len == 0:
parMessage(p, errIdentifierExpected, p.tok)
break
of tkOpr, tkDot, tkDotDot, tkEquals, tkParLe..tkParDotRi:
var accm = ""
while p.tok.tokType in {tkOpr, tkDot, tkDotDot, tkEquals,
tkParLe..tkParDotRi}:
accm.add(tokToStr(p.tok))
getTok(p)
result.add(newIdentNodeP(getIdent(accm), p))
of tokKeywordLow..tokKeywordHigh, tkSymbol, tkIntLit..tkCharLit:
result.add(newIdentNodeP(getIdent(tokToStr(p.tok)), p))
getTok(p)
else:
parMessage(p, errIdentifierExpected, p.tok)
eat(p, tkAccent)
else:
if allowNil and p.tok.tokType == tkNil:
@@ -841,7 +845,7 @@ proc parseIdentColonEquals(p: var TParser, flags: TDeclaredIdentFlags): PNode =
addSon(result, parseTypeDesc(p))
else:
addSon(result, ast.emptyNode)
if (p.tok.tokType != tkEquals) and not (withBothOptional in flags):
if p.tok.tokType != tkEquals and withBothOptional notin flags:
parMessage(p, errColonOrEqualsExpected, p.tok)
if p.tok.tokType == tkEquals:
getTok(p)
@@ -982,6 +986,7 @@ proc parseSymbolList(p: var TParser, result: PNode, allowNil = false) =
proc parseTypeDescKAux(p: var TParser, kind: TNodeKind,
mode: TPrimaryMode): PNode =
#| distinct = 'distinct' optInd typeDesc
result = newNodeP(kind, p)
getTok(p)
optInd(p, result)
@@ -999,13 +1004,13 @@ proc parseExpr(p: var TParser): PNode =
#| expr = (ifExpr
#| | whenExpr
#| | caseExpr
#| | tryStmt)
#| | tryExpr)
#| / simpleExpr
case p.tok.tokType:
of tkIf: result = parseIfExpr(p, nkIfExpr)
of tkWhen: result = parseIfExpr(p, nkWhenExpr)
of tkCase: result = parseCase(p)
of tkTry: result = parseTry(p)
of tkTry: result = parseTry(p, isExpr=true)
else: result = simpleExpr(p)
proc parseEnum(p: var TParser): PNode
@@ -1108,6 +1113,7 @@ proc parseTypeDefAux(p: var TParser): PNode =
result = simpleExpr(p, pmTypeDef)
proc makeCall(n: PNode): PNode =
## Creates a call if the given node isn't already a call.
if n.kind in nkCallKinds:
result = n
else:
@@ -1357,22 +1363,25 @@ proc parseCase(p: var TParser): PNode =
if wasIndented:
p.currInd = oldInd
proc parseTry(p: var TParser): PNode =
proc parseTry(p: var TParser; isExpr: bool): PNode =
#| tryStmt = 'try' colcom stmt &(IND{=}? 'except'|'finally')
#| (IND{=}? 'except' exprList colcom stmt)*
#| (IND{=}? 'finally' colcom stmt)?
#| tryExpr = 'try' colcom stmt &(optInd 'except'|'finally')
#| (optInd 'except' exprList colcom stmt)*
#| (optInd 'finally' colcom stmt)?
result = newNodeP(nkTryStmt, p)
getTok(p)
eat(p, tkColon)
skipComment(p, result)
addSon(result, parseStmt(p))
var b: PNode = nil
while sameOrNoInd(p):
while sameOrNoInd(p) or isExpr:
case p.tok.tokType
of tkExcept:
of tkExcept:
b = newNodeP(nkExceptBranch, p)
exprList(p, tkColon, b)
of tkFinally:
of tkFinally:
b = newNodeP(nkFinally, p)
getTokNoInd(p)
eat(p, tkColon)
@@ -1871,7 +1880,7 @@ proc complexOrSimpleStmt(p: var TParser): PNode =
of tkIf: result = parseIfOrWhen(p, nkIfStmt)
of tkWhile: result = parseWhile(p)
of tkCase: result = parseCase(p)
of tkTry: result = parseTry(p)
of tkTry: result = parseTry(p, isExpr=false)
of tkFinally: result = parseExceptBlock(p, nkFinally)
of tkExcept: result = parseExceptBlock(p, nkExceptBranch)
of tkFor: result = parseFor(p)
@@ -1952,7 +1961,8 @@ proc parseStmt(p: var TParser): PNode =
if p.tok.tokType != tkSemiColon: break
getTok(p)
proc parseAll(p: var TParser): PNode =
proc parseAll(p: var TParser): PNode =
## Parses the rest of the input stream held by the parser into a PNode.
result = newNodeP(nkStmtList, p)
while p.tok.tokType != tkEof:
var a = complexOrSimpleStmt(p)
@@ -1966,6 +1976,8 @@ proc parseAll(p: var TParser): PNode =
parMessage(p, errInvalidIndentation)
proc parseTopLevelStmt(p: var TParser): PNode =
## Implements an iterator which, when called repeatedly, returns the next
## top-level statement or emptyNode if end of stream.
result = ast.emptyNode
while true:
if p.tok.indent != 0:
@@ -1984,6 +1996,10 @@ proc parseTopLevelStmt(p: var TParser): PNode =
break
proc parseString(s: string, filename: string = "", line: int = 0): PNode =
## Parses a string into an AST, returning the top node.
## `filename` and `line`, although optional, provide info so that the
## compiler can generate correct error messages referring to the original
## source.
var stream = llStreamOpen(s)
stream.lineOffset = line

View File

@@ -287,7 +287,7 @@ proc applyRule*(c: PContext, s: PSym, n: PNode): PNode =
# constraint not fullfilled:
if not ok: return nil
markUsed(n, s)
markUsed(n.info, s)
if ctx.subMatch:
assert m.len == 3
m.sons[1] = result

View File

@@ -517,7 +517,7 @@ proc pragmaUses(c: PContext, n: PNode) =
proc processExc(c: PContext, x: PNode): PNode =
if x.kind in {nkAccQuoted, nkIdent, nkSym,
nkOpenSymChoice, nkClosedSymChoice}:
if considerAcc(x).s == "*":
if considerQuotedIdent(x).s == "*":
return newSymNode(ast.anyGlobal)
result = c.semExpr(c, x)
if result.kind != nkSym or sfGlobal notin result.sym.flags:
@@ -644,12 +644,13 @@ proc singlePragma(c: PContext, sym: PSym, n: PNode, i: int,
incl(sym.flags, sfNoReturn)
of wDynlib:
processDynLib(c, it, sym)
of wCompilerproc:
of wCompilerproc:
noVal(it) # compilerproc may not get a string!
makeExternExport(sym, "$1", it.info)
incl(sym.flags, sfCompilerProc)
incl(sym.flags, sfUsed) # suppress all those stupid warnings
registerCompilerProc(sym)
if sfFromGeneric notin sym.flags:
makeExternExport(sym, "$1", it.info)
incl(sym.flags, sfCompilerProc)
incl(sym.flags, sfUsed) # suppress all those stupid warnings
registerCompilerProc(sym)
of wProcVar:
noVal(it)
incl(sym.flags, sfProcvar)

View File

@@ -1,7 +1,7 @@
#
#
# The Nimrod Compiler
# (c) Copyright 2013 Andreas Rumpf
# (c) Copyright 2014 Andreas Rumpf
#
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
@@ -149,8 +149,8 @@ proc checkDef(c: PGen; n: PNode) =
if n.kind != nkSym: return
checkDef(n, n.sym)
proc checkUse*(n: PNode, s: PSym) =
if n.info.fileIndex < 0: return
proc checkUse*(info: TLineInfo; s: PSym) =
if info.fileIndex < 0: return
# we simply convert it to what it looks like in the definition
# for consistency
@@ -159,10 +159,10 @@ proc checkUse*(n: PNode, s: PSym) =
if s.kind in {skType, skGenericParam} and sfAnon in s.flags: return
let newName = s.name.s
loadFile(n.info)
loadFile(info)
let line = gSourceFiles[n.info.fileIndex].lines[n.info.line-1]
var first = min(n.info.col.int, line.len)
let line = gSourceFiles[info.fileIndex].lines[info.line-1]
var first = min(info.col.int, line.len)
if first < 0: return
#inc first, skipIgnoreCase(line, "proc ", first)
while first > 0 and line[first-1] in Letters: dec first
@@ -179,8 +179,8 @@ proc checkUse*(n: PNode, s: PSym) =
if x.match(peg"\s* {\ident} \s* '=' \s* y$1 ('#' .*)?"):
x = ""
system.shallowCopy(gSourceFiles[n.info.fileIndex].lines[n.info.line-1], x)
gSourceFiles[n.info.fileIndex].dirty = true
system.shallowCopy(gSourceFiles[info.fileIndex].lines[info.line-1], x)
gSourceFiles[info.fileIndex].dirty = true
when false:
var cannotRename = initIntSet()
@@ -220,53 +220,9 @@ when false:
result.add s[i]
inc i
proc checkUse(c: PGen; n: PNode) =
if n.info.fileIndex < 0: return
let s = n.sym
# operators stay as they are:
if s.kind in {skResult, skTemp} or s.name.s[0] notin Letters: return
if s.kind in {skType, skGenericParam} and sfAnon in s.flags: return
if s.id in cannotRename: return
let newName = if rules.hasKey(s.name.s): rules[s.name.s]
else: beautifyName(s.name.s, n.sym.kind)
loadFile(n.info)
let line = gSourceFiles[n.info.fileIndex].lines[n.info.line-1]
var first = min(n.info.col.int, line.len)
if first < 0: return
#inc first, skipIgnoreCase(line, "proc ", first)
while first > 0 and line[first-1] in Letters: dec first
if first < 0: return
if line[first] == '`': inc first
if {sfImportc, sfExportc} * s.flags != {}:
# careful, we must ensure the resulting name still matches the external
# name:
if newName != s.name.s and newName != s.loc.r.ropeToStr and
lfFullExternalName notin s.loc.flags:
#Message(n.info, errGenerated,
# "cannot rename $# to $# due to external name" % [s.name.s, newName])
cannotRename.incl(s.id)
return
let last = first+identLen(line, first)-1
if differ(line, first, last, newName):
# last-first+1 != newName.len or
var x = line.subStr(0, first-1) & newName & line.substr(last+1)
when removeTP:
# the WinAPI module is full of 'TX = X' which after the substitution
# becomes 'X = X'. We remove those lines:
if x.match(peg"\s* {\ident} \s* '=' \s* y$1 ('#' .*)?"):
x = ""
system.shallowCopy(gSourceFiles[n.info.fileIndex].lines[n.info.line-1], x)
gSourceFiles[n.info.fileIndex].dirty = true
proc check(c: PGen, n: PNode) =
case n.kind
of nkSym: checkUse(n, n.sym)
of nkSym: checkUse(n.info, n.sym)
of nkBlockStmt, nkBlockExpr, nkBlockType:
checkDef(c, n[0])
check(c, n.sons[1])

View File

@@ -146,28 +146,29 @@ proc makeNimString(s: string): string =
for i in countup(0, len(s)-1): add(result, toNimChar(s[i]))
add(result, '\"')
proc putComment(g: var TSrcGen, s: string) =
proc putComment(g: var TSrcGen, s: string) =
if s.isNil: return
var i = 0
var comIndent = 1
var isCode = (len(s) >= 2) and (s[1] != ' ')
var ind = g.lineLen
var com = ""
while true:
while true:
case s[i]
of '\0':
break
of '\x0D':
of '\0':
break
of '\x0D':
put(g, tkComment, com)
com = ""
inc(i)
if s[i] == '\x0A': inc(i)
optNL(g, ind)
of '\x0A':
of '\x0A':
put(g, tkComment, com)
com = ""
inc(i)
optNL(g, ind)
of '#':
of '#':
add(com, s[i])
inc(i)
comIndent = 0
@@ -175,10 +176,10 @@ proc putComment(g: var TSrcGen, s: string) =
add(com, s[i])
inc(i)
inc(comIndent)
of ' ', '\x09':
of ' ', '\x09':
add(com, s[i])
inc(i)
else:
else:
# we may break the comment into a multi-line comment if the line
# gets too long:
# compute length of the following word:
@@ -195,10 +196,10 @@ proc putComment(g: var TSrcGen, s: string) =
optNL(g)
proc maxLineLength(s: string): int =
result = 0
if s.isNil: return 0
var i = 0
var lineLen = 0
while true:
while true:
case s[i]
of '\0':
break
@@ -459,7 +460,7 @@ proc lsub(n: PNode): int =
of nkBreakStmt: result = lsub(n.sons[0]) + len("break_")
of nkContinueStmt: result = lsub(n.sons[0]) + len("continue_")
of nkPragma: result = lcomma(n) + 4
of nkCommentStmt: result = len(n.comment)
of nkCommentStmt: result = if n.comment.isNil: 0 else: len(n.comment)
of nkOfBranch: result = lcomma(n, 0, - 2) + lsub(lastSon(n)) + len("of_:_")
of nkImportAs: result = lsub(n.sons[0]) + len("_as_") + lsub(n.sons[1])
of nkElifBranch: result = lsons(n) + len("elif_:_")

View File

@@ -10,7 +10,7 @@
## Serialization utilities for the compiler.
import strutils
proc c_sprintf(buf, frmt: cstring) {.importc: "sprintf", nodecl, varargs.}
proc c_sprintf(buf, frmt: cstring) {.importc: "sprintf", header: "<stdio.h>", nodecl, varargs.}
proc toStrMaxPrecision*(f: BiggestFloat): string =
if f != f:

View File

@@ -15,7 +15,8 @@ import
magicsys, parser, nversion, nimsets, semfold, importer,
procfind, lookups, rodread, pragmas, passes, semdata, semtypinst, sigmatch,
intsets, transf, vmdef, vm, idgen, aliases, cgmeth, lambdalifting,
evaltempl, patterns, parampatterns, sempass2, pretty, semmacrosanity
evaltempl, patterns, parampatterns, sempass2, pretty, semmacrosanity,
semparallel
# implementation
@@ -134,7 +135,7 @@ proc isTopLevel(c: PContext): bool {.inline.} =
result = c.currentScope.depthLevel <= 2
proc newSymS(kind: TSymKind, n: PNode, c: PContext): PSym =
result = newSym(kind, considerAcc(n), getCurrOwner(), n.info)
result = newSym(kind, considerQuotedIdent(n), getCurrOwner(), n.info)
proc newSymG*(kind: TSymKind, n: PNode, c: PContext): PSym =
# like newSymS, but considers gensym'ed symbols
@@ -147,7 +148,7 @@ proc newSymG*(kind: TSymKind, n: PNode, c: PContext): PSym =
# template; we must fix it here: see #909
result.owner = getCurrOwner()
else:
result = newSym(kind, considerAcc(n), getCurrOwner(), n.info)
result = newSym(kind, considerQuotedIdent(n), getCurrOwner(), n.info)
proc semIdentVis(c: PContext, kind: TSymKind, n: PNode,
allowed: TSymFlags): PSym
@@ -268,11 +269,15 @@ include hlo, seminst, semcall
proc semAfterMacroCall(c: PContext, n: PNode, s: PSym,
flags: TExprFlags): PNode =
## Semantically check the output of a macro.
## This involves processes such as re-checking the macro output for type
## coherence, making sure that variables declared with 'let' aren't
## reassigned, and binding the unbound identifiers that the macro output
## contains.
inc(evalTemplateCounter)
if evalTemplateCounter > 100:
globalError(s.info, errTemplateInstantiationTooNested)
let oldFriend = c.friendModule
c.friendModule = s.owner.getModule
c.friendModules.add(s.owner.getModule)
result = n
if s.typ.sons[0] == nil:
@@ -296,11 +301,13 @@ proc semAfterMacroCall(c: PContext, n: PNode, s: PSym,
result = fitNode(c, s.typ.sons[0], result)
#GlobalError(s.info, errInvalidParamKindX, typeToString(s.typ.sons[0]))
dec(evalTemplateCounter)
c.friendModule = oldFriend
discard c.friendModules.pop()
proc semMacroExpr(c: PContext, n, nOrig: PNode, sym: PSym,
flags: TExprFlags = {}): PNode =
markUsed(n, sym)
pushInfoContext(nOrig.info)
markUsed(n.info, sym)
if sym == c.p.owner:
globalError(n.info, errRecursiveDependencyX, sym.name.s)
@@ -310,6 +317,7 @@ proc semMacroExpr(c: PContext, n, nOrig: PNode, sym: PSym,
result = evalMacroCall(c.module, n, nOrig, sym)
if efNoSemCheck notin flags:
result = semAfterMacroCall(c, result, sym, flags)
popInfoContext()
proc forceBool(c: PContext, n: PNode): PNode =
result = fitNode(c, getSysType(tyBool), n)

View File

@@ -168,7 +168,7 @@ proc resolveOverloads(c: PContext, n, orig: PNode,
pickBest(callOp)
if overloadsState == csEmpty and result.state == csEmpty:
localError(n.info, errUndeclaredIdentifier, considerAcc(f).s)
localError(n.info, errUndeclaredIdentifier, considerQuotedIdent(f).s)
return
elif result.state != csMatch:
if nfExprCall in n.flags:
@@ -251,7 +251,7 @@ proc inferWithMetatype(c: PContext, formal: PType,
proc semResolvedCall(c: PContext, n: PNode, x: TCandidate): PNode =
assert x.state == csMatch
var finalCallee = x.calleeSym
markUsed(n.sons[0], finalCallee)
markUsed(n.sons[0].info, finalCallee)
if finalCallee.ast == nil:
internalError(n.info, "calleeSym.ast is nil") # XXX: remove this check!
if finalCallee.ast.sons[genericParamsPos].kind != nkEmpty:
@@ -283,7 +283,7 @@ proc explicitGenericSym(c: PContext, n: PNode, s: PSym): PNode =
var m: TCandidate
initCandidate(c, m, s, n)
var newInst = generateInstance(c, s, m.bindings, n.info)
markUsed(n, s)
markUsed(n.info, s)
result = newSymNode(newInst, n.info)
proc explicitGenericInstantiation(c: PContext, n: PNode, s: PSym): PNode =

View File

@@ -52,7 +52,7 @@ type
importTable*: PScope # scope for all imported symbols
topLevelScope*: PScope # scope for all top-level symbols
p*: PProcCon # procedure context
friendModule*: PSym # current friend module; may access private data;
friendModules*: seq[PSym] # friend modules; may access private data;
# this is used so that generic instantiations
# can access private object fields
instCounter*: int # to prevent endless instantiations
@@ -91,6 +91,7 @@ type
generics*: seq[TInstantiationPair] # pending list of instantiated generics to compile
lastGenericIdx*: int # used for the generics stack
hloLoopDetector*: int # used to prevent endless loops in the HLO
inParallelStmt*: int
proc makeInstPair*(s: PSym, inst: PInstantiation): TInstantiationPair =
result.genericSym = s
@@ -168,7 +169,7 @@ proc newContext(module: PSym): PContext =
initLinkedList(result.libs)
append(result.optionStack, newOptionEntry())
result.module = module
result.friendModule = module
result.friendModules = @[module]
result.converters = @[]
result.patterns = @[]
result.includedFiles = initIntSet()

View File

@@ -12,7 +12,7 @@
proc semTemplateExpr(c: PContext, n: PNode, s: PSym,
flags: TExprFlags = {}): PNode =
markUsed(n, s)
markUsed(n.info, s)
pushInfoContext(n.info)
result = evalTemplate(n, s, getCurrOwner())
if efNoSemCheck notin flags: result = semAfterMacroCall(c, result, s, flags)
@@ -78,7 +78,7 @@ proc inlineConst(n: PNode, s: PSym): PNode {.inline.} =
proc semSym(c: PContext, n: PNode, s: PSym, flags: TExprFlags): PNode =
case s.kind
of skConst:
markUsed(n, s)
markUsed(n.info, s)
case skipTypes(s.typ, abstractInst-{tyTypeDesc}).kind
of tyNil, tyChar, tyInt..tyInt64, tyFloat..tyFloat128,
tyTuple, tySet, tyUInt..tyUInt64:
@@ -101,7 +101,7 @@ proc semSym(c: PContext, n: PNode, s: PSym, flags: TExprFlags): PNode =
of skMacro: result = semMacroExpr(c, n, n, s, flags)
of skTemplate: result = semTemplateExpr(c, n, s, flags)
of skVar, skLet, skResult, skParam, skForVar:
markUsed(n, s)
markUsed(n.info, s)
# if a proc accesses a global variable, it is not side effect free:
if sfGlobal in s.flags:
incl(c.p.owner.flags, sfSideEffect)
@@ -123,13 +123,13 @@ proc semSym(c: PContext, n: PNode, s: PSym, flags: TExprFlags): PNode =
n.typ = s.typ
return n
of skType:
markUsed(n, s)
markUsed(n.info, s)
if s.typ.kind == tyStatic and s.typ.n != nil:
return s.typ.n
result = newSymNode(s, n.info)
result.typ = makeTypeDesc(c, s.typ)
else:
markUsed(n, s)
markUsed(n.info, s)
result = newSymNode(s, n.info)
type
@@ -166,6 +166,7 @@ proc checkConvertible(c: PContext, castDest, src: PType): TConvStatus =
elif (skipTypes(castDest, abstractVarRange).kind in IntegralTypes) and
(skipTypes(src, abstractVarRange-{tyTypeDesc}).kind in IntegralTypes):
# accept conversion between integral types
discard
else:
# we use d, s here to speed up that operation a bit:
case cmpTypes(c, d, s)
@@ -175,21 +176,26 @@ proc checkConvertible(c: PContext, castDest, src: PType): TConvStatus =
else:
discard
proc isCastable(dst, src: PType): bool =
proc isCastable(dst, src: PType): bool =
## Checks whether the source type can be casted to the destination type.
## Casting is very unrestrictive; casts are allowed as long as
## castDest.size >= src.size, and typeAllowed(dst, skParam)
#const
# castableTypeKinds = {tyInt, tyPtr, tyRef, tyCstring, tyString,
# tySequence, tyPointer, tyNil, tyOpenArray,
# tyProc, tySet, tyEnum, tyBool, tyChar}
var ds, ss: BiggestInt
# this is very unrestrictive; cast is allowed if castDest.size >= src.size
ds = computeSize(dst)
ss = computeSize(src)
if ds < 0:
var dstSize, srcSize: BiggestInt
dstSize = computeSize(dst)
srcSize = computeSize(src)
if dstSize < 0:
result = false
elif ss < 0:
elif srcSize < 0:
result = false
elif not typeAllowed(dst, skParam):
result = false
else:
result = (ds >= ss) or
result = (dstSize >= srcSize) or
(skipTypes(dst, abstractInst).kind in IntegralTypes) or
(skipTypes(src, abstractInst-{tyTypeDesc}).kind in IntegralTypes)
@@ -247,12 +253,13 @@ proc semConv(c: PContext, n: PNode): PNode =
let it = op.sons[i]
let status = checkConvertible(c, result.typ, it.typ)
if status in {convOK, convNotNeedeed}:
markUsed(n, it.sym)
markUsed(n.info, it.sym)
markIndirect(c, it.sym)
return it
localError(n.info, errUseQualifier, op.sons[0].sym.name.s)
proc semCast(c: PContext, n: PNode): PNode =
## Semantically analyze a casting ("cast[type](param)")
if optSafeCode in gGlobalOptions: localError(n.info, errCastNotInSafeMode)
#incl(c.p.owner.flags, sfSideEffect)
checkSonsLen(n, 2)
@@ -386,7 +393,7 @@ proc semOpAux(c: PContext, n: PNode) =
var a = n.sons[i]
if a.kind == nkExprEqExpr and sonsLen(a) == 2:
var info = a.sons[0].info
a.sons[0] = newIdentNode(considerAcc(a.sons[0]), info)
a.sons[0] = newIdentNode(considerQuotedIdent(a.sons[0]), info)
a.sons[1] = semExprWithType(c, a.sons[1], flags)
a.typ = a.sons[1].typ
else:
@@ -785,6 +792,10 @@ proc semIndirectOp(c: PContext, n: PNode, flags: TExprFlags): PNode =
n.flags.incl nfExprCall
result = semOverloadedCallAnalyseEffects(c, n, nOrig, flags)
if result == nil: return errorNode(c, n)
elif result.kind notin nkCallKinds:
# the semExpr() in overloadedCallOpr can even break this condition!
# See bug #904 of how to trigger it:
return result
#result = afterCallActions(c, result, nOrig, flags)
fixAbstractType(c, result)
analyseIfAddressTakenInCall(c, result)
@@ -964,12 +975,12 @@ proc builtinFieldAccess(c: PContext, n: PNode, flags: TExprFlags): PNode =
var s = qualifiedLookUp(c, n, {checkAmbiguity, checkUndeclared})
if s != nil:
markUsed(n.sons[1], s)
markUsed(n.sons[1].info, s)
return semSym(c, n, s, flags)
n.sons[0] = semExprWithType(c, n.sons[0], flags+{efDetermineType})
#restoreOldStyleType(n.sons[0])
var i = considerAcc(n.sons[1])
var i = considerQuotedIdent(n.sons[1])
var ty = n.sons[0].typ
var f: PSym = nil
result = nil
@@ -987,7 +998,7 @@ proc builtinFieldAccess(c: PContext, n: PNode, flags: TExprFlags): PNode =
result = newSymNode(f)
result.info = n.info
result.typ = ty
markUsed(n, f)
markUsed(n.info, f)
return
of tyTypeParamsHolders:
return readTypeParameter(c, ty, i, n.info)
@@ -1019,7 +1030,7 @@ proc builtinFieldAccess(c: PContext, n: PNode, flags: TExprFlags): PNode =
if f != nil:
if fieldVisible(c, f):
# is the access to a public field or in the same module or in a friend?
markUsed(n.sons[1], f)
markUsed(n.sons[1].info, f)
n.sons[0] = makeDeref(n.sons[0])
n.sons[1] = newSymNode(f) # we now have the correct field
n.typ = f.typ
@@ -1032,7 +1043,7 @@ proc builtinFieldAccess(c: PContext, n: PNode, flags: TExprFlags): PNode =
elif ty.kind == tyTuple and ty.n != nil:
f = getSymFromList(ty.n, i)
if f != nil:
markUsed(n.sons[1], f)
markUsed(n.sons[1].info, f)
n.sons[0] = makeDeref(n.sons[0])
n.sons[1] = newSymNode(f)
n.typ = f.typ
@@ -1050,7 +1061,7 @@ proc dotTransformation(c: PContext, n: PNode): PNode =
addSon(result, n.sons[1])
addSon(result, copyTree(n[0]))
else:
var i = considerAcc(n.sons[1])
var i = considerQuotedIdent(n.sons[1])
result = newNodeI(nkDotCall, n.info)
result.flags.incl nfDotField
addSon(result, newIdentNode(i, n[1].info))
@@ -1134,7 +1145,7 @@ proc semArrayAccess(c: PContext, n: PNode, flags: TExprFlags): PNode =
result = semExpr(c, buildOverloadedSubscripts(n, getIdent"[]"))
proc propertyWriteAccess(c: PContext, n, nOrig, a: PNode): PNode =
var id = considerAcc(a[1])
var id = considerQuotedIdent(a[1])
var setterId = newIdentNode(getIdent(id.s & '='), n.info)
# a[0] is already checked for semantics, that does ``builtinFieldAccess``
# this is ugly. XXX Semantic checking should use the ``nfSem`` flag for
@@ -1368,7 +1379,7 @@ proc lookUpForDefined(c: PContext, n: PNode, onlyCurrentScope: bool): PSym =
else:
localError(n.sons[1].info, errIdentifierExpected, "")
of nkAccQuoted:
result = lookUpForDefined(c, considerAcc(n), onlyCurrentScope)
result = lookUpForDefined(c, considerQuotedIdent(n), onlyCurrentScope)
of nkSym:
result = n.sym
else:
@@ -1387,11 +1398,6 @@ proc semDefined(c: PContext, n: PNode, onlyCurrentScope: bool): PNode =
result.info = n.info
result.typ = getSysType(tyBool)
proc setMs(n: PNode, s: PSym): PNode =
result = n
n.sons[0] = newSymNode(s)
n.sons[0].info = n.info
proc expectMacroOrTemplateCall(c: PContext, n: PNode): PSym =
## The argument to the proc should be nkCall(...) or similar
## Returns the macro/template symbol
@@ -1448,7 +1454,7 @@ proc semExpandToAst(c: PContext, n: PNode): PNode =
if expandedSym.kind == skError: return n
macroCall.sons[0] = newSymNode(expandedSym, macroCall.info)
markUsed(n, expandedSym)
markUsed(n.info, expandedSym)
for i in countup(1, macroCall.len-1):
macroCall.sons[i] = semExprWithType(c, macroCall[i], {})
@@ -1583,6 +1589,27 @@ proc semShallowCopy(c: PContext, n: PNode, flags: TExprFlags): PNode =
else:
result = semDirectOp(c, n, flags)
proc createFlowVar(c: PContext; t: PType; info: TLineInfo): PType =
result = newType(tyGenericInvokation, c.module)
addSonSkipIntLit(result, magicsys.getCompilerProc("FlowVar").typ)
addSonSkipIntLit(result, t)
result = instGenericContainer(c, info, result, allowMetaTypes = false)
proc instantiateCreateFlowVarCall(c: PContext; t: PType;
info: TLineInfo): PSym =
let sym = magicsys.getCompilerProc("nimCreateFlowVar")
if sym == nil:
localError(info, errSystemNeeds, "nimCreateFlowVar")
var bindings: TIdTable
initIdTable(bindings)
bindings.idTablePut(sym.ast[genericParamsPos].sons[0].typ, t)
result = c.semGenerateInstance(c, sym, bindings, info)
proc setMs(n: PNode, s: PSym): PNode =
result = n
n.sons[0] = newSymNode(s)
n.sons[0].info = n.info
proc semMagic(c: PContext, n: PNode, s: PSym, flags: TExprFlags): PNode =
# this is a hotspot in the compiler!
# DON'T forget to update ast.SpecialSemMagics if you add a magic here!
@@ -1604,6 +1631,22 @@ proc semMagic(c: PContext, n: PNode, s: PSym, flags: TExprFlags): PNode =
checkSonsLen(n, 2)
result = newStrNodeT(renderTree(n[1], {renderNoComments}), n)
result.typ = getSysType(tyString)
of mParallel:
result = setMs(n, s)
var x = n.lastSon
if x.kind == nkDo: x = x.sons[bodyPos]
inc c.inParallelStmt
result.sons[1] = semStmt(c, x)
dec c.inParallelStmt
of mSpawn:
result = setMs(n, s)
result.sons[1] = semExpr(c, n.sons[1])
if not result[1].typ.isEmptyType:
if c.inParallelStmt > 0:
result.typ = result[1].typ
else:
result.typ = createFlowVar(c, result[1].typ, n.info)
result.add instantiateCreateFlowVarCall(c, result[1].typ, n.info).newSymNode
else: result = semDirectOp(c, n, flags)
proc semWhen(c: PContext, n: PNode, semCheck = true): PNode =
@@ -1842,7 +1885,7 @@ proc semBlock(c: PContext, n: PNode): PNode =
if sfGenSym notin labl.flags:
addDecl(c, labl)
n.sons[0] = newSymNode(labl, n.sons[0].info)
suggestSym(n.sons[0], labl)
suggestSym(n.sons[0].info, labl)
n.sons[1] = semExpr(c, n.sons[1])
n.typ = n.sons[1].typ
if isEmptyType(n.typ): n.kind = nkBlockStmt
@@ -1962,7 +2005,7 @@ proc semExpr(c: PContext, n: PNode, flags: TExprFlags = {}): PNode =
var s = qualifiedLookUp(c, n.sons[0], mode)
if s != nil:
if gCmd == cmdPretty and n.sons[0].kind == nkDotExpr:
pretty.checkUse(n.sons[0].sons[1], s)
pretty.checkUse(n.sons[0].sons[1].info, s)
case s.kind
of skMacro:
if sfImmediate notin s.flags:
@@ -2060,6 +2103,7 @@ proc semExpr(c: PContext, n: PNode, flags: TExprFlags = {}): PNode =
of nkClosedSymChoice, nkOpenSymChoice:
# handling of sym choices is context dependent
# the node is left intact for now
discard
of nkStaticExpr:
result = semStaticExpr(c, n)
of nkAsgn: result = semAsgn(c, n)

View File

@@ -1,7 +1,7 @@
#
#
# The Nimrod Compiler
# (c) Copyright 2012 Andreas Rumpf
# (c) Copyright 2014 Andreas Rumpf
#
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
@@ -380,6 +380,7 @@ proc evalOp(m: TMagic, n, a, b, c: PNode): PNode =
of mInSet: result = newIntNodeT(ord(inSet(a, b)), n)
of mRepr:
# BUGFIX: we cannot eval mRepr here for reasons that I forgot.
discard
of mIntToStr, mInt64ToStr: result = newStrNodeT($(getOrdValue(a)), n)
of mBoolToStr:
if getOrdValue(a) == 0: result = newStrNodeT("false", n)
@@ -404,10 +405,8 @@ proc evalOp(m: TMagic, n, a, b, c: PNode): PNode =
mExit, mInc, ast.mDec, mEcho, mSwap, mAppendStrCh,
mAppendStrStr, mAppendSeqElem, mSetLengthStr, mSetLengthSeq,
mParseExprToAst, mParseStmtToAst, mExpandToAst, mTypeTrait,
mNLen..mNError, mEqRef, mSlurp, mStaticExec, mNGenSym, mSpawn:
mNLen..mNError, mEqRef, mSlurp, mStaticExec, mNGenSym, mSpawn, mParallel:
discard
of mRand:
result = newIntNodeT(math.random(a.getInt.int), n)
else: internalError(a.info, "evalOp(" & $m & ')')
proc getConstIfExpr(c: PSym, n: PNode): PNode =
@@ -538,17 +537,18 @@ proc foldArrayAccess(m: PSym, n: PNode): PNode =
var idx = getOrdValue(y)
case x.kind
of nkPar:
if (idx >= 0) and (idx < sonsLen(x)):
if idx >= 0 and idx < sonsLen(x):
result = x.sons[int(idx)]
if result.kind == nkExprColonExpr: result = result.sons[1]
else:
localError(n.info, errIndexOutOfBounds)
of nkBracket:
if (idx >= 0) and (idx < sonsLen(x)): result = x.sons[int(idx)]
of nkBracket:
idx = idx - x.typ.firstOrd
if idx >= 0 and idx < x.len: result = x.sons[int(idx)]
else: localError(n.info, errIndexOutOfBounds)
of nkStrLit..nkTripleStrLit:
of nkStrLit..nkTripleStrLit:
result = newNodeIT(nkCharLit, x.info, n.typ)
if (idx >= 0) and (idx < len(x.strVal)):
if idx >= 0 and idx < len(x.strVal):
result.intVal = ord(x.strVal[int(idx)])
elif idx == len(x.strVal):
discard

View File

@@ -69,7 +69,7 @@ proc semGenericStmtSymbol(c: PContext, n: PNode, s: PSym): PNode =
proc lookup(c: PContext, n: PNode, flags: TSemGenericFlags,
ctx: var TIntSet): PNode =
result = n
let ident = considerAcc(n)
let ident = considerQuotedIdent(n)
var s = searchInScopes(c, ident)
if s == nil:
if ident.id notin ctx and withinMixin notin flags:
@@ -82,7 +82,37 @@ proc lookup(c: PContext, n: PNode, flags: TSemGenericFlags,
else:
result = semGenericStmtSymbol(c, n, s)
# else: leave as nkIdent
proc newDot(n, b: PNode): PNode =
result = newNodeI(nkDotExpr, n.info)
result.add(n.sons[0])
result.add(b)
proc fuzzyLookup(c: PContext, n: PNode, flags: TSemGenericFlags,
ctx: var TIntSet): PNode =
assert n.kind == nkDotExpr
let luf = if withinMixin notin flags: {checkUndeclared} else: {}
var s = qualifiedLookUp(c, n, luf)
if s != nil:
result = semGenericStmtSymbol(c, n, s)
else:
result = n
let n = n[1]
let ident = considerQuotedIdent(n)
var s = searchInScopes(c, ident)
if s != nil and s.kind in routineKinds:
if withinBind in flags:
result = newDot(result, symChoice(c, n, s, scClosed))
elif s.name.id in ctx:
result = newDot(result, symChoice(c, n, s, scForceOpen))
else:
let sym = semGenericStmtSymbol(c, n, s)
if sym.kind == nkSym:
result = newDot(result, symChoice(c, n, s, scForceOpen))
else:
result = newDot(result, sym)
proc semGenericStmt(c: PContext, n: PNode,
flags: TSemGenericFlags, ctx: var TIntSet): PNode =
result = n
@@ -91,10 +121,11 @@ proc semGenericStmt(c: PContext, n: PNode,
of nkIdent, nkAccQuoted:
result = lookup(c, n, flags, ctx)
of nkDotExpr:
let luf = if withinMixin notin flags: {checkUndeclared} else: {}
var s = qualifiedLookUp(c, n, luf)
if s != nil: result = semGenericStmtSymbol(c, n, s)
#let luf = if withinMixin notin flags: {checkUndeclared} else: {}
#var s = qualifiedLookUp(c, n, luf)
#if s != nil: result = semGenericStmtSymbol(c, n, s)
# XXX for example: ``result.add`` -- ``add`` needs to be looked up here...
result = fuzzyLookup(c, n, flags, ctx)
of nkEmpty, nkSym..nkNilLit:
# see tests/compile/tgensymgeneric.nim:
# We need to open the gensym'ed symbol again so that the instantiation
@@ -114,7 +145,7 @@ proc semGenericStmt(c: PContext, n: PNode,
let fn = n.sons[0]
var s = qualifiedLookUp(c, fn, {})
if s == nil and withinMixin notin flags and
fn.kind in {nkIdent, nkAccQuoted} and considerAcc(fn).id notin ctx:
fn.kind in {nkIdent, nkAccQuoted} and considerQuotedIdent(fn).id notin ctx:
localError(n.info, errUndeclaredIdentifier, fn.renderTree)
var first = 0
@@ -141,6 +172,7 @@ proc semGenericStmt(c: PContext, n: PNode,
# symbol lookup ...
of skUnknown, skParam:
# Leave it as an identifier.
discard
of skProc, skMethod, skIterators, skConverter:
result.sons[0] = symChoice(c, n.sons[0], s, scOption)
first = 1

View File

@@ -190,6 +190,9 @@ proc instantiateProcType(c: PContext, pt: TIdTable,
proc generateInstance(c: PContext, fn: PSym, pt: TIdTable,
info: TLineInfo): PSym =
## Generates a new instance of a generic procedure.
## The `pt` parameter is a type-unsafe mapping table used to link generic
## parameters to their concrete types within the generic instance.
# no need to instantiate generic templates/macros:
if fn.kind in {skTemplate, skMacro}: return fn
# generates an instantiated proc
@@ -199,8 +202,7 @@ proc generateInstance(c: PContext, fn: PSym, pt: TIdTable,
var n = copyTree(fn.ast)
# NOTE: for access of private fields within generics from a different module
# we set the friend module:
var oldFriend = c.friendModule
c.friendModule = getModule(fn)
c.friendModules.add(getModule(fn))
#let oldScope = c.currentScope
#c.currentScope = fn.scope
result = copySym(fn, false)
@@ -236,6 +238,6 @@ proc generateInstance(c: PContext, fn: PSym, pt: TIdTable,
closeScope(c) # close scope for parameters
popOwner()
#c.currentScope = oldScope
c.friendModule = oldFriend
discard c.friendModules.pop()
dec(c.instCounter)
if result.kind == skMethod: finishMethod(c, result)

View File

@@ -1,7 +1,7 @@
#
#
# The Nimrod Compiler
# (c) Copyright 2013 Andreas Rumpf
# (c) Copyright 2014 Andreas Rumpf
#
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
@@ -131,4 +131,3 @@ proc magicsAfterOverloadResolution(c: PContext, n: PNode,
of mNBindSym: result = semBindSym(c, n)
of mLocals: result = semLocals(c, n)
else: result = n

465
compiler/semparallel.nim Normal file
View File

@@ -0,0 +1,465 @@
#
#
# The Nimrod Compiler
# (c) Copyright 2014 Andreas Rumpf
#
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
#
## Semantic checking for 'parallel'.
# - codegen needs to support mSlice (+)
# - lowerings must not perform unnecessary copies (+)
# - slices should become "nocopy" to openArray (+)
# - need to perform bound checks (+)
#
# - parallel needs to insert a barrier (+)
# - passed arguments need to be ensured to be "const"
# - what about 'f(a)'? --> f shouldn't have side effects anyway
# - passed arrays need to be ensured not to alias
# - passed slices need to be ensured to be disjoint (+)
# - output slices need special logic (+)
import
ast, astalgo, idents, lowerings, magicsys, guards, sempass2, msgs,
renderer
from trees import getMagic
from strutils import `%`
discard """
one major problem:
spawn f(a[i])
inc i
spawn f(a[i])
is valid, but
spawn f(a[i])
spawn f(a[i])
inc i
is not! However,
spawn f(a[i])
if guard: inc i
spawn f(a[i])
is not valid either! --> We need a flow dependent analysis here.
However:
while foo:
spawn f(a[i])
inc i
spawn f(a[i])
Is not valid either! --> We should really restrict 'inc' to loop endings?
The heuristic that we implement here (that has no false positives) is: Usage
of 'i' in a slice *after* we determined the stride is invalid!
"""
type
TDirection = enum
ascending, descending
MonotonicVar = object
v, alias: PSym # to support the ordinary 'countup' iterator
# we need to detect aliases
lower, upper, stride: PNode
dir: TDirection
blacklisted: bool # blacklisted variables that are not monotonic
AnalysisCtx = object
locals: seq[MonotonicVar]
slices: seq[tuple[x,a,b: PNode, spawnId: int, inLoop: bool]]
guards: TModel # nested guards
args: seq[PSym] # args must be deeply immutable
spawns: int # we can check that at last 1 spawn is used in
# the 'parallel' section
currentSpawnId: int
inLoop: int
let opSlice = createMagic("slice", mSlice)
proc initAnalysisCtx(): AnalysisCtx =
result.locals = @[]
result.slices = @[]
result.args = @[]
result.guards = @[]
proc lookupSlot(c: AnalysisCtx; s: PSym): int =
for i in 0.. <c.locals.len:
if c.locals[i].v == s or c.locals[i].alias == s: return i
return -1
proc getSlot(c: var AnalysisCtx; v: PSym): ptr MonotonicVar =
let s = lookupSlot(c, v)
if s >= 0: return addr(c.locals[s])
let L = c.locals.len
c.locals.setLen(L+1)
c.locals[L].v = v
return addr(c.locals[L])
proc gatherArgs(c: var AnalysisCtx; n: PNode) =
for i in 0.. <n.safeLen:
let root = getRoot n[i]
if root != nil:
block addRoot:
for r in items(c.args):
if r == root: break addRoot
c.args.add root
gatherArgs(c, n[i])
proc isSingleAssignable(n: PNode): bool =
n.kind == nkSym and (let s = n.sym;
s.kind in {skTemp, skForVar, skLet} and
{sfAddrTaken, sfGlobal} * s.flags == {})
proc isLocal(n: PNode): bool =
n.kind == nkSym and (let s = n.sym;
s.kind in {skResult, skTemp, skForVar, skVar, skLet} and
{sfAddrTaken, sfGlobal} * s.flags == {})
proc checkLocal(c: AnalysisCtx; n: PNode) =
if isLocal(n):
let s = c.lookupSlot(n.sym)
if s >= 0 and c.locals[s].stride != nil:
localError(n.info, "invalid usage of counter after increment")
else:
for i in 0 .. <n.safeLen: checkLocal(c, n.sons[i])
template `?`(x): expr = x.renderTree
proc checkLe(c: AnalysisCtx; a, b: PNode) =
case proveLe(c.guards, a, b)
of impUnknown:
localError(a.info, "cannot prove: " & ?a & " <= " & ?b)
of impYes: discard
of impNo:
localError(a.info, "can prove: " & ?a & " > " & ?b)
proc checkBounds(c: AnalysisCtx; arr, idx: PNode) =
checkLe(c, arr.lowBound, idx)
checkLe(c, idx, arr.highBound)
proc addLowerBoundAsFacts(c: var AnalysisCtx) =
for v in c.locals:
if not v.blacklisted:
c.guards.addFactLe(v.lower, newSymNode(v.v))
proc addSlice(c: var AnalysisCtx; n: PNode; x, le, ri: PNode) =
checkLocal(c, n)
let le = le.canon
let ri = ri.canon
# perform static bounds checking here; and not later!
let oldState = c.guards.len
addLowerBoundAsFacts(c)
c.checkBounds(x, le)
c.checkBounds(x, ri)
c.guards.setLen(oldState)
c.slices.add((x, le, ri, c.currentSpawnId, c.inLoop > 0))
proc overlap(m: TModel; x,y,c,d: PNode) =
# X..Y and C..D overlap iff (X <= D and C <= Y)
case proveLe(m, x, d)
of impUnknown:
localError(x.info,
"cannot prove: $# > $#; required for ($#)..($#) disjoint from ($#)..($#)" %
[?x, ?d, ?x, ?y, ?c, ?d])
of impYes:
case proveLe(m, c, y)
of impUnknown:
localError(x.info,
"cannot prove: $# > $#; required for ($#)..($#) disjoint from ($#)..($#)" %
[?c, ?y, ?x, ?y, ?c, ?d])
of impYes:
localError(x.info, "($#)..($#) not disjoint from ($#)..($#)" % [?x, ?y, ?c, ?d])
of impNo: discard
of impNo: discard
proc stride(c: AnalysisCtx; n: PNode): BiggestInt =
if isLocal(n):
let s = c.lookupSlot(n.sym)
if s >= 0 and c.locals[s].stride != nil:
result = c.locals[s].stride.intVal
else:
for i in 0 .. <n.safeLen: result += stride(c, n.sons[i])
proc subStride(c: AnalysisCtx; n: PNode): PNode =
# substitute with stride:
if isLocal(n):
let s = c.lookupSlot(n.sym)
if s >= 0 and c.locals[s].stride != nil:
result = n +@ c.locals[s].stride.intVal
else:
result = n
elif n.safeLen > 0:
result = shallowCopy(n)
for i in 0 .. <n.len: result.sons[i] = subStride(c, n.sons[i])
else:
result = n
proc checkSlicesAreDisjoint(c: var AnalysisCtx) =
# this is the only thing that we need to perform after we have traversed
# the whole tree so that the strides are available.
# First we need to add all the computed lower bounds:
addLowerBoundAsFacts(c)
# Every slice used in a loop needs to be disjoint with itself:
for x,a,b,id,inLoop in items(c.slices):
if inLoop: overlap(c.guards, a,b, c.subStride(a), c.subStride(b))
# Another tricky example is:
# while true:
# spawn f(a[i])
# spawn f(a[i+1])
# inc i # inc i, 2 would be correct here
#
# Or even worse:
# while true:
# spawn f(a[i+1 .. i+3])
# spawn f(a[i+4 .. i+5])
# inc i, 4
# Prove that i*k*stride + 3 != i*k'*stride + 5
# For the correct example this amounts to
# i*k*2 != i*k'*2 + 1
# which is true.
# For now, we don't try to prove things like that at all, even though it'd
# be feasible for many useful examples. Instead we attach the slice to
# a spawn and if the attached spawns differ, we bail out:
for i in 0 .. high(c.slices):
for j in i+1 .. high(c.slices):
let x = c.slices[i]
let y = c.slices[j]
if x.spawnId != y.spawnId and guards.sameTree(x.x, y.x):
if not x.inLoop or not y.inLoop:
# XXX strictly speaking, 'or' is not correct here and it needs to
# be 'and'. However this prevents too many obviously correct programs
# like f(a[0..x]); for i in x+1 .. a.high: f(a[i])
overlap(c.guards, x.a, x.b, y.a, y.b)
elif (let k = simpleSlice(x.a, x.b); let m = simpleSlice(y.a, y.b);
k >= 0 and m >= 0):
# ah I cannot resist the temptation and add another sweet heuristic:
# if both slices have the form (i+k)..(i+k) and (i+m)..(i+m) we
# check they are disjoint and k < stride and m < stride:
overlap(c.guards, x.a, x.b, y.a, y.b)
let stride = min(c.stride(x.a), c.stride(y.a))
if k < stride and m < stride:
discard
else:
localError(x.x.info, "cannot prove ($#)..($#) disjoint from ($#)..($#)" %
[?x.a, ?x.b, ?y.a, ?y.b])
else:
localError(x.x.info, "cannot prove ($#)..($#) disjoint from ($#)..($#)" %
[?x.a, ?x.b, ?y.a, ?y.b])
proc analyse(c: var AnalysisCtx; n: PNode)
proc analyseSons(c: var AnalysisCtx; n: PNode) =
for i in 0 .. <safeLen(n): analyse(c, n[i])
proc min(a, b: PNode): PNode =
if a.isNil: result = b
elif a.intVal < b.intVal: result = a
else: result = b
proc fromSystem(op: PSym): bool = sfSystemModule in getModule(op).flags
proc analyseCall(c: var AnalysisCtx; n: PNode; op: PSym) =
if op.magic == mSpawn:
inc c.spawns
let oldSpawnId = c.currentSpawnId
c.currentSpawnId = c.spawns
gatherArgs(c, n[1])
analyseSons(c, n)
c.currentSpawnId = oldSpawnId
elif op.magic == mInc or (op.name.s == "+=" and op.fromSystem):
if n[1].isLocal:
let incr = n[2].skipConv
if incr.kind in {nkCharLit..nkUInt32Lit} and incr.intVal > 0:
let slot = c.getSlot(n[1].sym)
slot.stride = min(slot.stride, incr)
analyseSons(c, n)
elif op.name.s == "[]" and op.fromSystem:
c.addSlice(n, n[1], n[2][1], n[2][2])
analyseSons(c, n)
elif op.name.s == "[]=" and op.fromSystem:
c.addSlice(n, n[1], n[2][1], n[2][2])
analyseSons(c, n)
else:
analyseSons(c, n)
proc analyseCase(c: var AnalysisCtx; n: PNode) =
analyse(c, n.sons[0])
let oldFacts = c.guards.len
for i in 1.. <n.len:
let branch = n.sons[i]
setLen(c.guards, oldFacts)
addCaseBranchFacts(c.guards, n, i)
for i in 0 .. <branch.len:
analyse(c, branch.sons[i])
setLen(c.guards, oldFacts)
proc analyseIf(c: var AnalysisCtx; n: PNode) =
analyse(c, n.sons[0].sons[0])
let oldFacts = c.guards.len
addFact(c.guards, canon(n.sons[0].sons[0]))
analyse(c, n.sons[0].sons[1])
for i in 1.. <n.len:
let branch = n.sons[i]
setLen(c.guards, oldFacts)
for j in 0..i-1:
addFactNeg(c.guards, canon(n.sons[j].sons[0]))
if branch.len > 1:
addFact(c.guards, canon(branch.sons[0]))
for i in 0 .. <branch.len:
analyse(c, branch.sons[i])
setLen(c.guards, oldFacts)
proc analyse(c: var AnalysisCtx; n: PNode) =
case n.kind
of nkAsgn, nkFastAsgn:
if n[0].isSingleAssignable and n[1].isLocal:
let slot = c.getSlot(n[1].sym)
slot.alias = n[0].sym
elif n[0].isLocal:
# since we already ensure sfAddrTaken is not in s.flags, we only need to
# prevent direct assignments to the monotonic variable:
let slot = c.getSlot(n[0].sym)
slot.blackListed = true
invalidateFacts(c.guards, n[0])
analyseSons(c, n)
addAsgnFact(c.guards, n[0], n[1])
of nkCallKinds:
# direct call:
if n[0].kind == nkSym: analyseCall(c, n, n[0].sym)
else: analyseSons(c, n)
of nkBracketExpr:
c.addSlice(n, n[0], n[1], n[1])
analyseSons(c, n)
of nkReturnStmt, nkRaiseStmt, nkTryStmt:
localError(n.info, "invalid control flow for 'parallel'")
# 'break' that leaves the 'parallel' section is not valid either
# or maybe we should generate a 'try' XXX
of nkVarSection:
for it in n:
let value = it.lastSon
if value.kind != nkEmpty:
for j in 0 .. it.len-3:
if it[j].isLocal:
let slot = c.getSlot(it[j].sym)
if slot.lower.isNil: slot.lower = value
else: internalError(it.info, "slot already has a lower bound")
analyse(c, value)
of nkCaseStmt: analyseCase(c, n)
of nkIfStmt, nkIfExpr: analyseIf(c, n)
of nkWhileStmt:
analyse(c, n.sons[0])
# 'while true' loop?
inc c.inLoop
if isTrue(n.sons[0]):
analyseSons(c, n.sons[1])
else:
# loop may never execute:
let oldState = c.locals.len
let oldFacts = c.guards.len
addFact(c.guards, canon(n.sons[0]))
analyse(c, n.sons[1])
setLen(c.locals, oldState)
setLen(c.guards, oldFacts)
# we know after the loop the negation holds:
if not hasSubnodeWith(n.sons[1], nkBreakStmt):
addFactNeg(c.guards, canon(n.sons[0]))
dec c.inLoop
of nkTypeSection, nkProcDef, nkConverterDef, nkMethodDef, nkIteratorDef,
nkMacroDef, nkTemplateDef, nkConstSection, nkPragma:
discard
else:
analyseSons(c, n)
proc transformSlices(n: PNode): PNode =
if n.kind in nkCallKinds and n[0].kind == nkSym:
let op = n[0].sym
if op.name.s == "[]" and op.fromSystem:
result = copyNode(n)
result.add opSlice.newSymNode
result.add n[1]
result.add n[2][1]
result.add n[2][2]
return result
if n.safeLen > 0:
result = shallowCopy(n)
for i in 0 .. < n.len:
result.sons[i] = transformSlices(n.sons[i])
else:
result = n
proc transformSpawn(owner: PSym; n, barrier: PNode): PNode
proc transformSpawnSons(owner: PSym; n, barrier: PNode): PNode =
result = shallowCopy(n)
for i in 0 .. < n.len:
result.sons[i] = transformSpawn(owner, n.sons[i], barrier)
proc transformSpawn(owner: PSym; n, barrier: PNode): PNode =
case n.kind
of nkVarSection:
result = nil
for it in n:
let b = it.lastSon
if getMagic(b) == mSpawn:
if it.len != 3: localError(it.info, "invalid context for 'spawn'")
let m = transformSlices(b)
if result.isNil:
result = newNodeI(nkStmtList, n.info)
result.add n
result.add wrapProcForSpawn(owner, m, b.typ, barrier, it[0])
it.sons[it.len-1] = emptyNode
if result.isNil: result = n
of nkAsgn, nkFastAsgn:
let b = n[1]
if getMagic(b) == mSpawn:
let m = transformSlices(b)
return wrapProcForSpawn(owner, m, b.typ, barrier, n[0])
result = transformSpawnSons(owner, n, barrier)
of nkCallKinds:
if getMagic(n) == mSpawn:
result = transformSlices(n)
return wrapProcForSpawn(owner, result, n.typ, barrier, nil)
result = transformSpawnSons(owner, n, barrier)
elif n.safeLen > 0:
result = transformSpawnSons(owner, n, barrier)
else:
result = n
proc checkArgs(a: var AnalysisCtx; n: PNode) =
discard "too implement"
proc generateAliasChecks(a: AnalysisCtx; result: PNode) =
discard "too implement"
proc liftParallel*(owner: PSym; n: PNode): PNode =
# this needs to be called after the 'for' loop elimination
# first pass:
# - detect monotonic local integer variables
# - detect used slices
# - detect used arguments
#echo "PAR ", renderTree(n)
var a = initAnalysisCtx()
let body = n.lastSon
analyse(a, body)
if a.spawns == 0:
localError(n.info, "'parallel' section without 'spawn'")
checkSlicesAreDisjoint(a)
checkArgs(a, body)
var varSection = newNodeI(nkVarSection, n.info)
var temp = newSym(skTemp, getIdent"barrier", owner, n.info)
temp.typ = magicsys.getCompilerProc("Barrier").typ
incl(temp.flags, sfFromGeneric)
let tempNode = newSymNode(temp)
varSection.addVar tempNode
let barrier = genAddrOf(tempNode)
result = newNodeI(nkStmtList, n.info)
generateAliasChecks(a, result)
result.add varSection
result.add callCodeGenProc("openBarrier", barrier)
result.add transformSpawn(owner, body, barrier)
result.add callCodeGenProc("closeBarrier", barrier)

View File

@@ -1,7 +1,7 @@
#
#
# The Nimrod Compiler
# (c) Copyright 2013 Andreas Rumpf
# (c) Copyright 2014 Andreas Rumpf
#
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
@@ -89,7 +89,7 @@ proc initVarViaNew(a: PEffects, n: PNode) =
if n.kind != nkSym: return
let s = n.sym
if {tfNeedsInit, tfNotNil} * s.typ.flags <= {tfNotNil}:
# 'x' is not nil, but that doesn't mean it's not nil children
# 'x' is not nil, but that doesn't mean its "not nil" children
# are initialized:
initVar(a, n)
@@ -478,13 +478,18 @@ proc trackBlock(tracked: PEffects, n: PNode) =
else:
track(tracked, n)
proc isTrue(n: PNode): bool =
proc isTrue*(n: PNode): bool =
n.kind == nkSym and n.sym.kind == skEnumField and n.sym.position != 0 or
n.kind == nkIntLit and n.intVal != 0
proc paramType(op: PType, i: int): PType =
if op != nil and i < op.len: result = op.sons[i]
proc cstringCheck(tracked: PEffects; n: PNode) =
if n.sons[0].typ.kind == tyCString and (let a = skipConv(n[1]);
a.typ.kind == tyString and a.kind notin {nkStrLit..nkTripleStrLit}):
message(n.info, warnUnsafeCode, renderTree(n))
proc track(tracked: PEffects, n: PNode) =
case n.kind
of nkSym:
@@ -541,6 +546,7 @@ proc track(tracked: PEffects, n: PNode) =
track(tracked, n.sons[0])
addAsgnFact(tracked.guards, n.sons[0], n.sons[1])
notNilCheck(tracked, n.sons[1], n.sons[0].typ)
when false: cstringCheck(tracked, n)
of nkVarSection:
for child in n:
let last = lastSon(child)

View File

@@ -33,7 +33,7 @@ proc semBreakOrContinue(c: PContext, n: PNode): PNode =
x.info = n.info
incl(s.flags, sfUsed)
n.sons[0] = x
suggestSym(x, s)
suggestSym(x.info, s)
else:
localError(n.info, errInvalidControlFlowX, s.name.s)
elif (c.p.nestedLoopCounter <= 0) and (c.p.nestedBlockCounter <= 0):
@@ -66,10 +66,16 @@ proc toCover(t: PType): BiggestInt =
result = lengthOrd(skipTypes(t, abstractVar-{tyTypeDesc}))
proc performProcvarCheck(c: PContext, n: PNode, s: PSym) =
## Checks that the given symbol is a proper procedure variable, meaning
## that it
var smoduleId = getModule(s).id
if sfProcvar notin s.flags and s.typ.callConv == ccDefault and
smoduleId != c.module.id and smoduleId != c.friendModule.id:
localError(n.info, errXCannotBePassedToProcVar, s.name.s)
smoduleId != c.module.id:
block outer:
for module in c.friendModules:
if smoduleId == module.id:
break outer
localError(n.info, errXCannotBePassedToProcVar, s.name.s)
proc semProcvarCheck(c: PContext, n: PNode) =
let n = n.skipConv
@@ -190,7 +196,7 @@ proc semCase(c: PContext, n: PNode): PNode =
var typ = commonTypeBegin
var hasElse = false
case skipTypes(n.sons[0].typ, abstractVarRange-{tyTypeDesc}).kind
of tyInt..tyInt64, tyChar, tyEnum, tyUInt..tyUInt32:
of tyInt..tyInt64, tyChar, tyEnum, tyUInt..tyUInt32, tyBool:
chckCovered = true
of tyFloat..tyFloat128, tyString, tyError:
discard
@@ -313,7 +319,7 @@ proc semIdentDef(c: PContext, n: PNode, kind: TSymKind): PSym =
incl(result.flags, sfGlobal)
else:
result = semIdentWithPragma(c, kind, n, {})
suggestSym(n, result)
suggestSym(n.info, result)
proc checkNilable(v: PSym) =
if sfGlobal in v.flags and {tfNotNil, tfNeedsInit} * v.typ.flags != {}:
@@ -655,7 +661,7 @@ proc semFor(c: PContext, n: PNode): PNode =
n.sons[length-2] = semExprNoDeref(c, n.sons[length-2], {efWantIterator})
var call = n.sons[length-2]
let isCallExpr = call.kind in nkCallKinds
if isCallExpr and call.sons[0].sym.magic != mNone:
if isCallExpr and call[0].kind == nkSym and call[0].sym.magic != mNone:
if call.sons[0].sym.magic == mOmpParFor:
result = semForVars(c, n)
result.kind = nkParForStmt
@@ -822,6 +828,9 @@ proc typeSectionFinalPass(c: PContext, n: PNode) =
getCurrOwner(), s.info)
proc semTypeSection(c: PContext, n: PNode): PNode =
## Processes a type section. This must be done in separate passes, in order
## to allow the type definitions in the section to reference each other
## without regard for the order of their definitions.
typeSectionLeftSidePass(c, n)
typeSectionRightSidePass(c, n)
typeSectionFinalPass(c, n)
@@ -868,7 +877,7 @@ proc lookupMacro(c: PContext, n: PNode): PSym =
result = n.sym
if result.kind notin {skMacro, skTemplate}: result = nil
else:
result = searchInScopes(c, considerAcc(n), {skMacro, skTemplate})
result = searchInScopes(c, considerQuotedIdent(n), {skMacro, skTemplate})
proc semProcAnnotation(c: PContext, prc: PNode): PNode =
var n = prc.sons[pragmasPos]
@@ -879,7 +888,7 @@ proc semProcAnnotation(c: PContext, prc: PNode): PNode =
let m = lookupMacro(c, key)
if m == nil:
if key.kind == nkIdent and key.ident.id == ord(wDelegator):
if considerAcc(prc.sons[namePos]).s == "()":
if considerQuotedIdent(prc.sons[namePos]).s == "()":
prc.sons[namePos] = newIdentNode(idDelegator, prc.info)
prc.sons[pragmasPos] = copyExcept(n, i)
else:

View File

@@ -59,7 +59,7 @@ proc symChoice(c: PContext, n: PNode, s: PSym, r: TSymChoiceRule): PNode =
# (s.kind notin routineKinds or s.magic != mNone):
# for instance 'nextTry' is both in tables.nim and astalgo.nim ...
result = newSymNode(s, n.info)
markUsed(n, s)
markUsed(n.info, s)
else:
# semantic checking requires a type; ``fitNode`` deals with it
# appropriately
@@ -93,7 +93,7 @@ proc semBindStmt(c: PContext, n: PNode, toBind: var TIntSet): PNode =
proc semMixinStmt(c: PContext, n: PNode, toMixin: var TIntSet): PNode =
for i in 0 .. < n.len:
toMixin.incl(considerAcc(n.sons[i]).id)
toMixin.incl(considerQuotedIdent(n.sons[i]).id)
result = newNodeI(nkEmpty, n.info)
proc replaceIdentBySym(n: var PNode, s: PNode) =
@@ -151,7 +151,7 @@ proc onlyReplaceParams(c: var TemplCtx, n: PNode): PNode =
result.sons[i] = onlyReplaceParams(c, n.sons[i])
proc newGenSym(kind: TSymKind, n: PNode, c: var TemplCtx): PSym =
result = newSym(kind, considerAcc(n), c.owner, n.info)
result = newSym(kind, considerQuotedIdent(n), c.owner, n.info)
incl(result.flags, sfGenSym)
incl(result.flags, sfShadowed)

View File

@@ -70,9 +70,10 @@ proc semEnum(c: PContext, n: PNode, prev: PType): PType =
counter = x
of nkSym:
e = n.sons[i].sym
of nkIdent:
of nkIdent, nkAccQuoted:
e = newSymS(skEnumField, n.sons[i], c)
else: illFormedAst(n)
else:
illFormedAst(n[i])
e.typ = result
e.position = int(counter)
if e.position == 0: hasNull = true
@@ -116,7 +117,7 @@ proc semVarargs(c: PContext, n: PNode, prev: PType): PType =
var base = semTypeNode(c, n.sons[1], nil)
addSonSkipIntLit(result, base)
if sonsLen(n) == 3:
result.n = newIdentNode(considerAcc(n.sons[2]), n.sons[2].info)
result.n = newIdentNode(considerQuotedIdent(n.sons[2]), n.sons[2].info)
else:
localError(n.info, errXExpectsOneTypeParam, "varargs")
addSonSkipIntLit(result, errorType(c))
@@ -280,7 +281,7 @@ proc semTypeIdent(c: PContext, n: PNode): PSym =
else:
result = qualifiedLookUp(c, n, {checkAmbiguity, checkUndeclared})
if result != nil:
markUsed(n, result)
markUsed(n.info, result)
if result.kind == skParam and result.typ.kind == tyTypeDesc:
# This is a typedesc param. is it already bound?
# it's not bound when it's used multiple times in the
@@ -385,6 +386,7 @@ proc semIdentWithPragma(c: PContext, kind: TSymKind, n: PNode,
case kind
of skType:
# process pragmas later, because result.typ has not been set yet
discard
of skField: pragma(c, result, n.sons[1], fieldPragmas)
of skVar: pragma(c, result, n.sons[1], varPragmas)
of skLet: pragma(c, result, n.sons[1], letPragmas)
@@ -441,14 +443,14 @@ proc semCaseBranch(c: PContext, t, branch: PNode, branchIndex: int,
elif isRange(b):
branch.sons[i] = semCaseBranchRange(c, t, b, covered)
else:
# constant sets and arrays are allowed:
var r = semConstExpr(c, b)
# for ``{}`` we want to trigger the type mismatch in ``fitNode``:
if r.kind != nkCurly or len(r) == 0:
if r.kind notin {nkCurly, nkBracket} or len(r) == 0:
checkMinSonsLen(t, 1)
branch.sons[i] = skipConv(fitNode(c, t.sons[0].typ, r))
inc(covered)
else:
# constant sets have special rules
# first element is special and will overwrite: branch.sons[i]:
branch.sons[i] = semCaseBranchSetElem(c, t, r[0], covered)
# other elements have to be added to ``branch``
@@ -560,7 +562,7 @@ proc semRecordNodeAux(c: PContext, n: PNode, check: var TIntSet, pos: var int,
let rec = rectype.sym
for i in countup(0, sonsLen(n)-3):
var f = semIdentWithPragma(c, skField, n.sons[i], {sfExported})
suggestSym(n.sons[i], f)
suggestSym(n.sons[i].info, f)
f.typ = typ
f.position = pos
if (rec != nil) and ({sfImportc, sfExportc} * rec.flags != {}) and
@@ -825,7 +827,7 @@ proc liftParamType(c: PContext, procKind: TSymKind, genericParams: PNode,
result = addImplicitGeneric(newTypeS(tyAnything, c))
of tyGenericParam:
markUsed(genericParams, paramType.sym)
markUsed(info, paramType.sym)
if tfWildcard in paramType.flags:
paramType.flags.excl tfWildcard
paramType.sym.kind = skType
@@ -862,7 +864,13 @@ proc semProcTypeNode(c: PContext, n, genericParams: PNode,
var counter = 0
for i in countup(1, n.len - 1):
var a = n.sons[i]
if a.kind != nkIdentDefs: illFormedAst(a)
if a.kind != nkIdentDefs:
# for some generic instantiations the passed ':env' parameter
# for closures has already been produced (see bug #898). We simply
# skip this parameter here. It'll then be re-generated in another LL
# pass over this instantiation:
if a.kind == nkSym and sfFromGeneric in a.sym.flags: continue
illFormedAst(a)
checkMinSonsLen(a, 3)
var
typ: PType = nil
@@ -1083,8 +1091,10 @@ proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
of nkCallKinds:
if isRange(n):
result = semRangeAux(c, n, prev)
elif n[0].kind == nkIdent:
let op = n.sons[0].ident
elif n[0].kind notin nkIdentKinds:
result = semTypeExpr(c, n)
else:
let op = considerQuotedIdent(n.sons[0])
if op.id in {ord(wAnd), ord(wOr)} or op.s == "|":
checkSonsLen(n, 3)
var
@@ -1119,8 +1129,6 @@ proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
result = semAnyRef(c, n, tyRef, prev)
else:
result = semTypeExpr(c, n)
else:
result = semTypeExpr(c, n)
of nkWhenStmt:
var whenResult = semWhen(c, n, false)
if whenResult.kind == nkStmtList: whenResult.kind = nkStmtListType
@@ -1179,7 +1187,7 @@ proc semTypeNode(c: PContext, n: PNode, prev: PType): PType =
else:
assignType(prev, t)
result = prev
markUsed(n, n.sym)
markUsed(n.info, n.sym)
else:
if n.sym.kind != skError: localError(n.info, errTypeExpected)
result = newOrPrevType(tyError, prev, c)

View File

@@ -62,7 +62,7 @@ type
const
isNilConversion = isConvertible # maybe 'isIntConv' fits better?
proc markUsed*(n: PNode, s: PSym)
proc markUsed*(info: TLineInfo, s: PSym)
proc initCandidateAux(ctx: PContext,
c: var TCandidate, callee: PType) {.inline.} =
@@ -497,11 +497,11 @@ proc matchUserTypeClass*(c: PContext, m: var TCandidate,
proc shouldSkipDistinct(rules: PNode, callIdent: PIdent): bool =
if rules.kind == nkWith:
for r in rules:
if r.considerAcc == callIdent: return true
if r.considerQuotedIdent == callIdent: return true
return false
else:
for r in rules:
if r.considerAcc == callIdent: return false
if r.considerQuotedIdent == callIdent: return false
return true
proc maybeSkipDistinct(t: PType, callee: PSym): PType =
@@ -1058,7 +1058,7 @@ proc userConvMatch(c: PContext, m: var TCandidate, f, a: PType,
dest = generateTypeInstance(c, m.bindings, arg, dest)
let fdest = typeRel(m, f, dest)
if fdest in {isEqual, isGeneric}:
markUsed(arg, c.converters[i])
markUsed(arg.info, c.converters[i])
var s = newSymNode(c.converters[i])
s.typ = c.converters[i].typ
s.info = arg.info
@@ -1271,7 +1271,7 @@ proc paramTypesMatch*(m: var TCandidate, f, a: PType,
result = nil
else:
# only one valid interpretation found:
markUsed(arg, arg.sons[best].sym)
markUsed(arg.info, arg.sons[best].sym)
result = paramTypesMatchAux(m, f, arg.sons[best].typ, arg.sons[best],
argOrig)
@@ -1302,7 +1302,7 @@ proc prepareOperand(c: PContext; a: PNode): PNode =
proc prepareNamedParam(a: PNode) =
if a.sons[0].kind != nkIdent:
var info = a.sons[0].info
a.sons[0] = newIdentNode(considerAcc(a.sons[0]), info)
a.sons[0] = newIdentNode(considerQuotedIdent(a.sons[0]), info)
proc arrayConstr(c: PContext, n: PNode): PType =
result = newTypeS(tyArrayConstr, c)

View File

@@ -63,8 +63,11 @@ proc filterSym(s: PSym): bool {.inline.} =
proc fieldVisible*(c: PContext, f: PSym): bool {.inline.} =
let fmoduleId = getModule(f).id
result = sfExported in f.flags or fmoduleId == c.module.id or
fmoduleId == c.friendModule.id
result = sfExported in f.flags or fmoduleId == c.module.id
for module in c.friendModules:
if fmoduleId == module.id:
result = true
break
proc suggestField(c: PContext, s: PSym, outputs: var int) =
if filterSym(s) and fieldVisible(c, s):
@@ -243,18 +246,18 @@ var
usageSym*: PSym
lastLineInfo: TLineInfo
proc findUsages(node: PNode, s: PSym) =
if usageSym == nil and isTracked(node.info, s.name.s.len):
proc findUsages(info: TLineInfo; s: PSym) =
if usageSym == nil and isTracked(info, s.name.s.len):
usageSym = s
suggestWriteln(symToStr(s, isLocal=false, sectionUsage))
elif s == usageSym:
if lastLineInfo != node.info:
suggestWriteln(symToStr(s, isLocal=false, sectionUsage, node.info))
lastLineInfo = node.info
if lastLineInfo != info:
suggestWriteln(symToStr(s, isLocal=false, sectionUsage, info))
lastLineInfo = info
proc findDefinition(node: PNode, s: PSym) =
if node.isNil or s.isNil: return
if isTracked(node.info, s.name.s.len):
proc findDefinition(info: TLineInfo; s: PSym) =
if s.isNil: return
if isTracked(info, s.name.s.len):
suggestWriteln(symToStr(s, isLocal=false, sectionDef))
suggestQuit()
@@ -313,26 +316,26 @@ proc defFromSourceMap*(i: TLineInfo) =
defFromLine(gSourceMaps[i.fileIndex].lines[i.line].entries, i.col)
proc suggestSym*(n: PNode, s: PSym) {.inline.} =
proc suggestSym*(info: TLineInfo; s: PSym) {.inline.} =
## misnamed: should be 'symDeclared'
if optUsages in gGlobalOptions:
findUsages(n, s)
findUsages(info, s)
if optDef in gGlobalOptions:
findDefinition(n, s)
findDefinition(info, s)
if isServing:
addToSourceMap(s, n.info)
addToSourceMap(s, info)
proc markUsed(n: PNode, s: PSym) =
proc markUsed(info: TLineInfo; s: PSym) =
incl(s.flags, sfUsed)
if {sfDeprecated, sfError} * s.flags != {}:
if sfDeprecated in s.flags: message(n.info, warnDeprecated, s.name.s)
if sfError in s.flags: localError(n.info, errWrongSymbolX, s.name.s)
suggestSym(n, s)
if gCmd == cmdPretty: checkUse(n, s)
if sfDeprecated in s.flags: message(info, warnDeprecated, s.name.s)
if sfError in s.flags: localError(info, errWrongSymbolX, s.name.s)
suggestSym(info, s)
if gCmd == cmdPretty: checkUse(info, s)
proc useSym*(sym: PSym): PNode =
result = newSymNode(sym)
markUsed(result, sym)
markUsed(result.info, sym)
proc suggestExpr*(c: PContext, node: PNode) =
var cp = msgs.inCheckpoint(node.info)

View File

@@ -546,7 +546,7 @@ proc flattenTree(root: PNode): PNode =
flattenTreeAux(result, root, op)
else:
result = root
proc transformCall(c: PTransf, n: PNode): PTransNode =
var n = flattenTree(n)
var op = getMergeOp(n)
@@ -565,6 +565,9 @@ proc transformCall(c: PTransf, n: PNode): PTransNode =
inc(j)
add(result, a.PTransNode)
if len(result) == 2: result = result[1]
elif getMagic(n) == mNBindSym:
# for bindSym(myconst) we MUST NOT perform constant folding:
result = n.PTransNode
else:
let s = transformSons(c, n).PNode
# bugfix: check after 'transformSons' if it's still a method call:

View File

@@ -1118,6 +1118,11 @@ proc typeAllowed(t: PType, kind: TSymKind): bool =
proc align(address, alignment: BiggestInt): BiggestInt =
result = (address + (alignment - 1)) and not (alignment - 1)
const
szNonConcreteType* = -3
szIllegalRecursion* = -2
szUnknownSize* = -1
proc computeSizeAux(typ: PType, a: var BiggestInt): BiggestInt
proc computeRecSizeAux(n: PNode, a, currOffset: var BiggestInt): BiggestInt =
var maxAlign, maxSize, b, res: BiggestInt
@@ -1151,14 +1156,9 @@ proc computeRecSizeAux(n: PNode, a, currOffset: var BiggestInt): BiggestInt =
of nkSym:
result = computeSizeAux(n.sym.typ, a)
n.sym.offset = int(currOffset)
else:
internalError("computeRecSizeAux()")
else:
a = 1
result = - 1
const
szIllegalRecursion* = -2
szUnknownSize* = -1
result = szNonConcreteType
proc computeSizeAux(typ: PType, a: var BiggestInt): BiggestInt =
var res, maxAlign, length, currOffset: BiggestInt

View File

@@ -1,4 +1,4 @@
import renderer, strutils, ast, msgs, types
import renderer, strutils, ast, msgs, types, astalgo
const defaultParamSeparator* = ","
@@ -92,7 +92,7 @@ proc renderParamTypes(found: var seq[string], n: PNode) =
if not typ.isNil: typeStr = typeToString(typ, preferExported)
if typeStr.len < 1: return
for i in 0 .. <typePos:
assert n[i].kind == nkIdent
assert ((n[i].kind == nkIdent) or (n[i].kind == nkAccQuoted))
found.add(typeStr)
else:
internalError(n.info, "renderParamTypes(found,n) with " & $n.kind)

View File

@@ -127,15 +127,20 @@ proc createStrKeepNode(x: var TFullReg) =
elif x.node.kind == nkNilLit:
system.reset(x.node[])
x.node.kind = nkStrLit
elif x.node.kind notin {nkStrLit..nkTripleStrLit}:
elif x.node.kind notin {nkStrLit..nkTripleStrLit} or
nfAllConst in x.node.flags:
# XXX this is hacky; tests/txmlgen triggers it:
x.node = newNode(nkStrLit)
# debug x.node
#assert x.node.kind in {nkStrLit..nkTripleStrLit}
# It not only hackey, it is also wrong for tgentemplate. The primary
# cause of bugs like these is that the VM does not properly distinguish
# between variable defintions (var foo = e) and variable updates (foo = e).
template createStr(x) =
x.node = newNode(nkStrLit)
template createSet(x) =
x.node = newNode(nkCurly)
proc moveConst(x: var TFullReg, y: TFullReg) =
if x.kind != y.kind:
myreset(x)
@@ -433,7 +438,6 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
if regs[rc].intVal > high(int):
stackTrace(c, tos, pc, errIndexOutOfBounds)
let idx = regs[rc].intVal.int
# XXX what if the array is not 0-based? -> codegen should insert a sub
let src = regs[rb].node
if src.kind notin {nkEmpty..nkNilLit} and idx <% src.len:
regs[ra].node = src.sons[idx]
@@ -499,13 +503,13 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
else:
stackTrace(c, tos, pc, errNilAccess)
of opcWrDeref:
# a[] = b
# a[] = c; b unused
let ra = instr.regA
let rb = instr.regB
let rc = instr.regC
case regs[ra].kind
of rkNodeAddr: putIntoNode(regs[ra].nodeAddr[], regs[rb])
of rkRegisterAddr: regs[ra].regAddr[] = regs[rb]
of rkNode: putIntoNode(regs[ra].node, regs[rb])
of rkNodeAddr: putIntoNode(regs[ra].nodeAddr[], regs[rc])
of rkRegisterAddr: regs[ra].regAddr[] = regs[rc]
of rkNode: putIntoNode(regs[ra].node, regs[rc])
else: stackTrace(c, tos, pc, errNilAccess)
of opcAddInt:
decodeBC(rkInt)
@@ -667,14 +671,11 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
of opcLtu:
decodeBC(rkInt)
regs[ra].intVal = ord(regs[rb].intVal <% regs[rc].intVal)
of opcEqRef:
of opcEqRef, opcEqNimrodNode:
decodeBC(rkInt)
regs[ra].intVal = ord((regs[rb].node.kind == nkNilLit and
regs[rc].node.kind == nkNilLit) or
regs[rb].node == regs[rc].node)
of opcEqNimrodNode:
decodeBC(rkInt)
regs[ra].intVal = ord(regs[rb].node == regs[rc].node)
of opcXor:
decodeBC(rkInt)
regs[ra].intVal = ord(regs[rb].intVal != regs[rc].intVal)
@@ -720,18 +721,22 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
regs[ra].intVal = ord(containsSets(a, b) and not equalSets(a, b))
of opcMulSet:
decodeBC(rkNode)
createSet(regs[ra])
move(regs[ra].node.sons,
nimsets.intersectSets(regs[rb].node, regs[rc].node).sons)
of opcPlusSet:
decodeBC(rkNode)
createSet(regs[ra])
move(regs[ra].node.sons,
nimsets.unionSets(regs[rb].node, regs[rc].node).sons)
of opcMinusSet:
decodeBC(rkNode)
createSet(regs[ra])
move(regs[ra].node.sons,
nimsets.diffSets(regs[rb].node, regs[rc].node).sons)
of opcSymdiffSet:
decodeBC(rkNode)
createSet(regs[ra])
move(regs[ra].node.sons,
nimsets.symdiffSets(regs[rb].node, regs[rc].node).sons)
of opcConcatStr:
@@ -742,11 +747,11 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
regs[ra].node.strVal.add getstr(regs[i])
of opcAddStrCh:
decodeB(rkNode)
createStrKeepNode regs[ra]
#createStrKeepNode regs[ra]
regs[ra].node.strVal.add(regs[rb].intVal.chr)
of opcAddStrStr:
decodeB(rkNode)
createStrKeepNode regs[ra]
#createStrKeepNode regs[ra]
regs[ra].node.strVal.add(regs[rb].node.strVal)
of opcAddSeqElem:
decodeB(rkNode)
@@ -897,10 +902,10 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
c.exceptionInstr = pc
let (newPc, newTos) = cleanUpOnException(c, tos)
# -1 because of the following 'inc'
if pc-1 < 0:
if newPc-1 < 0:
bailOut(c, tos)
return
pc = newPc -1
pc = newPc-1
if tos != newTos:
tos = newTos
move(regs, tos.slots)
@@ -983,7 +988,7 @@ proc rawExecute(c: PCtx, start: int, tos: PStackFrame): TFullReg =
return TFullReg(kind: rkNone)
of opcSetLenStr:
decodeB(rkNode)
createStrKeepNode regs[ra]
#createStrKeepNode regs[ra]
regs[ra].node.strVal.setLen(regs[rb].intVal.int)
of opcOf:
decodeBC(rkInt)

View File

@@ -207,7 +207,7 @@ const
largeInstrs* = { # instructions which use 2 int32s instead of 1:
opcSubStr, opcConv, opcCast, opcNewSeq, opcOf}
slotSomeTemp* = slotTempUnknown
relativeJumps* = {opcTJmp, opcFJmp, opcJmp}
relativeJumps* = {opcTJmp, opcFJmp, opcJmp, opcJmpBack}
template opcode*(x: TInstr): TOpcode {.immediate.} = TOpcode(x.uint32 and 0xff'u32)
template regA*(x: TInstr): TRegister {.immediate.} = TRegister(x.uint32 shr 8'u32 and 0xff'u32)

View File

@@ -15,6 +15,8 @@ proc readOutput(p: PProcess): string =
discard p.waitForExit
while not output.atEnd:
result.add(output.readLine)
result.add("\n")
result.setLen(result.len - "\n".len)
proc opGorge*(cmd, input: string): string =
var p = startCmd(cmd)

View File

@@ -9,6 +9,24 @@
## This module implements the code generator for the VM.
# Important things to remember:
# - The VM does not distinguish between definitions ('var x = y') and
# assignments ('x = y'). For simple data types that fit into a register
# this doesn't matter. However it matters for strings and other complex
# types that use the 'node' field; the reason is that slots are
# re-used in a register based VM. Example:
#
# .. code-block:: nimrod
# let s = a & b # no matter what, create fresh node
# s = a & b # no matter what, keep the node
#
# Also *stores* into non-temporary memory need to perform deep copies:
# a.b = x.y
# We used to generate opcAsgn for the *load* of 'x.y' but this is clearly
# wrong! We need to produce opcAsgn (the copy) for the *store*. This also
# solves the opcLdConst vs opcAsgnConst issue. Of course whether we need
# this copy depends on the involved types.
import
unsigned, strutils, ast, astalgo, types, msgs, renderer, vmdef,
trees, intsets, rodread, magicsys, options, lowerings
@@ -84,21 +102,27 @@ proc gABI(c: PCtx; n: PNode; opc: TOpcode; a, b: TRegister; imm: BiggestInt) =
# Takes the `b` register and the immediate `imm`, appies the operation `opc`,
# and stores the output value into `a`.
# `imm` is signed and must be within [-127, 128]
assert(imm >= -127 and imm <= 128)
let ins = (opc.uint32 or (a.uint32 shl 8'u32) or
(b.uint32 shl 16'u32) or
(imm+byteExcess).uint32 shl 24'u32).TInstr
c.code.add(ins)
c.debug.add(n.info)
if imm >= -127 and imm <= 128:
let ins = (opc.uint32 or (a.uint32 shl 8'u32) or
(b.uint32 shl 16'u32) or
(imm+byteExcess).uint32 shl 24'u32).TInstr
c.code.add(ins)
c.debug.add(n.info)
else:
localError(n.info, errGenerated,
"VM: immediate value does not fit into an int8")
proc gABx(c: PCtx; n: PNode; opc: TOpcode; a: TRegister = 0; bx: int) =
# Applies `opc` to `bx` and stores it into register `a`
# `bx` must be signed and in the range [-32767, 32768]
assert(bx >= -32767 and bx <= 32768)
let ins = (opc.uint32 or a.uint32 shl 8'u32 or
(bx+wordExcess).uint32 shl 16'u32).TInstr
c.code.add(ins)
c.debug.add(n.info)
if bx >= -32767 and bx <= 32768:
let ins = (opc.uint32 or a.uint32 shl 8'u32 or
(bx+wordExcess).uint32 shl 16'u32).TInstr
c.code.add(ins)
c.debug.add(n.info)
else:
localError(n.info, errGenerated,
"VM: immediate value does not fit into an int16")
proc xjmp(c: PCtx; n: PNode; opc: TOpcode; a: TRegister = 0): TPosition =
#assert opc in {opcJmp, opcFJmp, opcTJmp}
@@ -331,6 +355,7 @@ proc canonValue*(n: PNode): PNode =
proc rawGenLiteral(c: PCtx; n: PNode): int =
result = c.constants.len
assert(n.kind != nkCall)
n.flags.incl nfAllConst
c.constants.add n.canonValue
internalAssert result < 0x7fff
@@ -484,11 +509,22 @@ proc genField(n: PNode): TRegister =
"too large offset! cannot generate code for: " & s.name.s)
result = s.position
proc genIndex(c: PCtx; n: PNode; arr: PType): TRegister =
if arr.skipTypes(abstractInst).kind == tyArray and (let x = firstOrd(arr);
x != 0):
let tmp = c.genx(n)
# freeing the temporary here means we can produce: regA = regA - Imm
c.freeTemp(tmp)
result = c.getTemp(n.typ)
c.gABI(n, opcSubImmInt, result, tmp, x.int)
else:
result = c.genx(n)
proc genAsgnPatch(c: PCtx; le: PNode, value: TRegister) =
case le.kind
of nkBracketExpr:
let dest = c.genx(le.sons[0], {gfAddrOf})
let idx = c.genx(le.sons[1])
let idx = c.genIndex(le.sons[1], le.sons[0].typ)
c.gABC(le, opcWrArr, dest, idx, value)
c.freeTemp(dest)
c.freeTemp(idx)
@@ -501,12 +537,12 @@ proc genAsgnPatch(c: PCtx; le: PNode, value: TRegister) =
c.freeTemp(dest)
of nkDerefExpr, nkHiddenDeref:
let dest = c.genx(le.sons[0], {gfAddrOf})
c.gABC(le, opcWrDeref, dest, value)
c.gABC(le, opcWrDeref, dest, 0, value)
c.freeTemp(dest)
of nkSym:
if le.sym.isGlobal:
let dest = c.genx(le, {gfAddrOf})
c.gABC(le, opcWrDeref, dest, value)
c.gABC(le, opcWrDeref, dest, 0, value)
c.freeTemp(dest)
else:
discard
@@ -786,10 +822,12 @@ proc genMagic(c: PCtx; n: PNode; dest: var TDest) =
c.freeTemp(tmp)
of mSwap:
unused(n, dest)
var d = c.genx(n.sons[1])
var tmp = c.genx(n.sons[2])
c.gABC(n, opcSwap, d, tmp)
c.freeTemp(tmp)
var
d1 = c.genx(n.sons[1])
d2 = c.genx(n.sons[2])
c.gABC(n, opcSwap, d1, d2)
c.genAsgnPatch(n.sons[1], d1)
c.genAsgnPatch(n.sons[2], d2)
of mIsNil: genUnaryABC(c, n, dest, opcIsNil)
of mCopyStr:
if dest < 0: dest = c.getTemp(n.typ)
@@ -968,7 +1006,7 @@ const
proc fitsRegister*(t: PType): bool =
t.skipTypes(abstractInst-{tyTypeDesc}).kind in {
tyRange, tyEnum, tyBool, tyInt..tyUInt64}
tyRange, tyEnum, tyBool, tyInt..tyUInt64, tyChar}
proc requiresCopy(n: PNode): bool =
if n.typ.skipTypes(abstractInst-{tyTypeDesc}).kind in atomicTypes:
@@ -1065,17 +1103,36 @@ proc checkCanEval(c: PCtx; n: PNode) =
not s.isOwnedBy(c.prc.sym) and s.owner != c.module:
cannotEval(n)
proc isTemp(c: PCtx; dest: TDest): bool =
result = dest >= 0 and c.prc.slots[dest].kind >= slotTempUnknown
template needsAdditionalCopy(n): expr =
not c.isTemp(dest) and not fitsRegister(n.typ)
proc preventFalseAlias(c: PCtx; n: PNode; opc: TOpcode;
dest, idx, value: TRegister) =
# opcLdObj et al really means "load address". We sometimes have to create a
# copy in order to not introduce false aliasing:
# mylocal = a.b # needs a copy of the data!
if needsAdditionalCopy(n):
var cc = c.getTemp(n.typ)
c.gABC(n, whichAsgnOpc(n), cc, value)
c.gABC(n, opc, dest, idx, cc)
c.freeTemp(cc)
else:
c.gABC(n, opc, dest, idx, value)
proc genAsgn(c: PCtx; le, ri: PNode; requiresCopy: bool) =
case le.kind
of nkBracketExpr:
let dest = c.genx(le.sons[0], {gfAddrOf})
let idx = c.genx(le.sons[1])
let idx = c.genIndex(le.sons[1], le.sons[0].typ)
let tmp = c.genx(ri)
if le.sons[0].typ.skipTypes(abstractVarRange-{tyTypeDesc}).kind in {
tyString, tyCString}:
c.gABC(le, opcWrStrIdx, dest, idx, tmp)
c.preventFalseAlias(le, opcWrStrIdx, dest, idx, tmp)
else:
c.gABC(le, opcWrArr, dest, idx, tmp)
c.preventFalseAlias(le, opcWrArr, dest, idx, tmp)
c.freeTemp(tmp)
of nkDotExpr, nkCheckedFieldExpr:
# XXX field checks here
@@ -1083,12 +1140,12 @@ proc genAsgn(c: PCtx; le, ri: PNode; requiresCopy: bool) =
let dest = c.genx(left.sons[0], {gfAddrOf})
let idx = genField(left.sons[1])
let tmp = c.genx(ri)
c.gABC(left, opcWrObj, dest, idx, tmp)
c.preventFalseAlias(left, opcWrObj, dest, idx, tmp)
c.freeTemp(tmp)
of nkDerefExpr, nkHiddenDeref:
let dest = c.genx(le.sons[0], {gfAddrOf})
let tmp = c.genx(ri)
c.gABC(le, opcWrDeref, dest, tmp)
c.preventFalseAlias(le, opcWrDeref, dest, 0, tmp)
c.freeTemp(tmp)
of nkSym:
let s = le.sym
@@ -1097,24 +1154,32 @@ proc genAsgn(c: PCtx; le, ri: PNode; requiresCopy: bool) =
withTemp(tmp, le.typ):
c.gen(le, tmp, {gfAddrOf})
let val = c.genx(ri)
c.gABC(le, opcWrDeref, tmp, val)
c.preventFalseAlias(le, opcWrDeref, tmp, 0, val)
c.freeTemp(val)
else:
if s.kind == skForVar: c.setSlot s
internalAssert s.position > 0 or (s.position == 0 and
s.kind in {skParam,skResult})
var dest: TRegister = s.position + ord(s.kind == skParam)
gen(c, ri, dest)
if needsAdditionalCopy(le) and s.kind in {skResult, skVar, skParam}:
var cc = c.getTemp(le.typ)
gen(c, ri, cc)
c.gABC(le, whichAsgnOpc(le), dest, cc)
c.freeTemp(cc)
else:
gen(c, ri, dest)
else:
let dest = c.genx(le, {gfAddrOf})
genAsgn(c, dest, ri, requiresCopy)
proc genLit(c: PCtx; n: PNode; dest: var TDest) =
var opc = opcLdConst
# opcLdConst is now always valid. We produce the necessary copy in the
# assignments now:
#var opc = opcLdConst
if dest < 0: dest = c.getTemp(n.typ)
elif c.prc.slots[dest].kind == slotFixedVar: opc = opcAsgnConst
#elif c.prc.slots[dest].kind == slotFixedVar: opc = opcAsgnConst
let lit = genLiteral(c, n)
c.gABx(n, opc, dest, lit)
c.gABx(n, opcLdConst, dest, lit)
proc genTypeLit(c: PCtx; t: PType; dest: var TDest) =
var n = newNode(nkType)
@@ -1143,7 +1208,7 @@ proc genGlobalInit(c: PCtx; n: PNode; s: PSym) =
let dest = c.getTemp(s.typ)
c.gABx(n, opcLdGlobal, dest, s.position)
let tmp = c.genx(s.ast)
c.gABC(n, opcWrDeref, dest, tmp)
c.preventFalseAlias(n, opcWrDeref, dest, 0, tmp)
c.freeTemp(dest)
c.freeTemp(tmp)
@@ -1179,17 +1244,22 @@ proc genRdVar(c: PCtx; n: PNode; dest: var TDest; flags: TGenFlags) =
# see tests/t99bott for an example that triggers it:
cannotEval(n)
template needsRegLoad(): expr =
gfAddrOf notin flags and fitsRegister(n.typ.skipTypes({tyVar}))
proc genArrAccess2(c: PCtx; n: PNode; dest: var TDest; opc: TOpcode;
flags: TGenFlags) =
let a = c.genx(n.sons[0], flags)
let b = c.genx(n.sons[1], {})
let b = c.genIndex(n.sons[1], n.sons[0].typ)
if dest < 0: dest = c.getTemp(n.typ)
if gfAddrOf notin flags and fitsRegister(n.typ):
if needsRegLoad():
var cc = c.getTemp(n.typ)
c.gABC(n, opc, cc, a, b)
c.gABC(n, opcNodeToReg, dest, cc)
c.freeTemp(cc)
else:
#message(n.info, warnUser, "argh")
#echo "FLAGS ", flags, " ", fitsRegister(n.typ), " ", typeToString(n.typ)
c.gABC(n, opc, dest, a, b)
c.freeTemp(a)
c.freeTemp(b)
@@ -1198,7 +1268,7 @@ proc genObjAccess(c: PCtx; n: PNode; dest: var TDest; flags: TGenFlags) =
let a = c.genx(n.sons[0], flags)
let b = genField(n.sons[1])
if dest < 0: dest = c.getTemp(n.typ)
if gfAddrOf notin flags and fitsRegister(n.typ.skipTypes({tyVar})):
if needsRegLoad():
var cc = c.getTemp(n.typ)
c.gABC(n, opcLdObj, cc, a, b)
c.gABC(n, opcNodeToReg, dest, cc)
@@ -1298,7 +1368,7 @@ proc genVarSection(c: PCtx; n: PNode) =
if a.sons[2].kind != nkEmpty:
let tmp = c.genx(a.sons[0], {gfAddrOf})
let val = c.genx(a.sons[2])
c.gABC(a, opcWrDeref, tmp, val)
c.preventFalseAlias(a, opcWrDeref, tmp, 0, val)
c.freeTemp(val)
c.freeTemp(tmp)
else:
@@ -1306,7 +1376,16 @@ proc genVarSection(c: PCtx; n: PNode) =
if a.sons[2].kind == nkEmpty:
c.gABx(a, ldNullOpcode(s.typ), s.position, c.genType(s.typ))
else:
gen(c, a.sons[2], s.position.TRegister)
if not fitsRegister(s.typ):
c.gABx(a, ldNullOpcode(s.typ), s.position, c.genType(s.typ))
let le = a.sons[0]
if not fitsRegister(le.typ) and s.kind in {skResult, skVar, skParam}:
var cc = c.getTemp(le.typ)
gen(c, a.sons[2], cc)
c.gABC(le, whichAsgnOpc(le), s.position.TRegister, cc)
c.freeTemp(cc)
else:
gen(c, a.sons[2], s.position.TRegister)
else:
# assign to a.sons[0]; happens for closures
if a.sons[2].kind == nkEmpty:
@@ -1334,7 +1413,7 @@ proc genArrayConstr(c: PCtx, n: PNode, dest: var TDest) =
c.gABx(n, opcLdNullReg, tmp, c.genType(intType))
for x in n:
let a = c.genx(x)
c.gABC(n, whichAsgnOpc(x, opcWrArr), dest, tmp, a)
c.preventFalseAlias(n, whichAsgnOpc(x, opcWrArr), dest, tmp, a)
c.gABI(n, opcAddImmInt, tmp, tmp, 1)
c.freeTemp(a)
c.freeTemp(tmp)
@@ -1366,7 +1445,8 @@ proc genObjConstr(c: PCtx, n: PNode, dest: var TDest) =
if it.kind == nkExprColonExpr and it.sons[0].kind == nkSym:
let idx = genField(it.sons[0])
let tmp = c.genx(it.sons[1])
c.gABC(it, whichAsgnOpc(it.sons[1], opcWrObj), dest, idx, tmp)
c.preventFalseAlias(it.sons[1], whichAsgnOpc(it.sons[1], opcWrObj),
dest, idx, tmp)
c.freeTemp(tmp)
else:
internalError(n.info, "invalid object constructor")
@@ -1380,11 +1460,12 @@ proc genTupleConstr(c: PCtx, n: PNode, dest: var TDest) =
if it.kind == nkExprColonExpr:
let idx = genField(it.sons[0])
let tmp = c.genx(it.sons[1])
c.gABC(it, whichAsgnOpc(it.sons[1], opcWrObj), dest, idx, tmp)
c.preventFalseAlias(it.sons[1], whichAsgnOpc(it.sons[1], opcWrObj),
dest, idx, tmp)
c.freeTemp(tmp)
else:
let tmp = c.genx(it)
c.gABC(it, whichAsgnOpc(it, opcWrObj), dest, i.TRegister, tmp)
c.preventFalseAlias(it, whichAsgnOpc(it, opcWrObj), dest, i.TRegister, tmp)
c.freeTemp(tmp)
proc genProc*(c: PCtx; s: PSym): int
@@ -1622,7 +1703,7 @@ proc genProc(c: PCtx; s: PSym): int =
c.gABC(body, opcEof, eofInstr.regA)
c.optimizeJumps(result)
s.offset = c.prc.maxSlots
#if s.name.s == "addStuff":
#if s.name.s == "calc":
# echo renderTree(body)
# c.echoCode(result)
c.prc = oldPrc

View File

@@ -16,6 +16,7 @@ arm.linux.gcc.linkerexe = "arm-linux-gcc"
path="$lib/core"
path="$lib/pure"
path="$lib/pure/collections"
path="$lib/pure/concurrency"
path="$lib/impure"
path="$lib/wrappers"
# path="$lib/wrappers/cairo"

View File

@@ -1,152 +0,0 @@
==============
Abstract types
==============
.. contents::
Abstract types in Nimrod provide a means to model different `units`:idx: of
a `base type`:idx:.
Use case 1: SQL strings
-----------------------
An SQL statement that is passed from Nimrod to an SQL database might be
modelled as a string. However, using string templates and filling in the
values is vulnerable to the famous `SQL injection attack`:idx:\:
.. code-block:: nimrod
proc query(db: TDbHandle, statement: TSQL) = ...
var
username: string
db.query("SELECT FROM users WHERE name = '$1'" % username)
# Horrible security hole, but the compiler does not mind!
This can be avoided by distinguishing strings that contain SQL from strings
that don't. Abstract types provide a means to introduce a new string type
``TSQL`` that is incompatible with ``string``:
.. code-block:: nimrod
type
TSQL = abstract string
proc query(db: TDbHandle, statement: TSQL) = ...
var
username: string
db.query("SELECT FROM users WHERE name = '$1'" % username)
# Error at compile time: `query` expects an SQL string!
It is an essential property of abstract types that they **do not** imply a
subtype relation between the abtract type and its base type. Explict type
conversions from ``string`` to ``TSQL`` are allowed:
.. code-block:: nimrod
proc properQuote(s: string): TSQL =
# quotes a string properly for an SQL statement
...
proc `%` (frmt: TSQL, values: openarray[string]): TSQL =
# quote each argument:
var v = values.each(properQuote)
# we need a temporary type for the type conversion :-(
type TStrSeq = seq[string]
# call strutils.`%`:
result = TSQL(string(frmt) % TStrSeq(v))
db.query("SELECT FROM users WHERE name = $1".TSQL % username)
Now we have compile-time checking against SQL injection attacks.
Since ``"".TSQL`` is transformed to ``TSQL("")`` no new syntax is needed
for nice looking ``TSQL`` string literals.
Use case 2: Money
-----------------
Different currencies should not be mixed in monetary calculations. Abstract
types are a perfect tool to model different currencies:
.. code-block:: nimrod
type
TDollar = abstract int
TEuro = abstract int
var
d: TDollar
e: TEuro
echo d + 12
# Error: cannot add a number with no unit with a ``TDollar``
Unfortunetaly, ``d + 12.TDollar`` is not allowed either,
because ``+`` is defined for ``int`` (among others), not for ``TDollar``. So
we define our own ``+`` for dollars:
.. code-block::
proc `+` (x, y: TDollar): TDollar =
result = TDollar(int(x) + int(y))
It does not make sense to multiply a dollar with a dollar, but with a
number without unit; and the same holds for division:
.. code-block::
proc `*` (x: TDollar, y: int): TDollar =
result = TDollar(int(x) * y)
proc `*` (x: int, y: TDollar): TDollar =
result = TDollar(x * int(y))
proc `div` ...
This quickly gets tedious. The implementations are trivial and the compiler
should not generate all this code only to optimize it away later - after all
``+`` for dollars should produce the same binary code as ``+`` for ints.
The pragma ``borrow`` has been designed to solve this problem; in principle
it generates the trivial implementation for us:
.. code-block:: nimrod
proc `*` (x: TDollar, y: int): TDollar {.borrow.}
proc `*` (x: int, y: TDollar): TDollar {.borrow.}
proc `div` (x: TDollar, y: int): TDollar {.borrow.}
The ``borrow`` pragma makes the compiler to use the same implementation as
the proc that deals with the abstract type's base type, so no code is
generated.
But it seems we still have to repeat all this boilerplate code for
the ``TEuro`` currency. Fortunately, Nimrod has a template mechanism:
.. code-block:: nimrod
template Additive(typ: typeDesc): stmt =
proc `+` *(x, y: typ): typ {.borrow.}
proc `-` *(x, y: typ): typ {.borrow.}
# unary operators:
proc `+` *(x: typ): typ {.borrow.}
proc `-` *(x: typ): typ {.borrow.}
template Multiplicative(typ, base: typeDesc): stmt =
proc `*` *(x: typ, y: base): typ {.borrow.}
proc `*` *(x: base, y: typ): typ {.borrow.}
proc `div` *(x: typ, y: base): typ {.borrow.}
proc `mod` *(x: typ, y: base): typ {.borrow.}
template Comparable(typ: typeDesc): stmt =
proc `<` * (x, y: typ): bool {.borrow.}
proc `<=` * (x, y: typ): bool {.borrow.}
proc `==` * (x, y: typ): bool {.borrow.}
template DefineCurrency(typ, base: expr): stmt =
type
typ* = abstract base
Additive(typ)
Multiplicative(typ, base)
Comparable(typ)
DefineCurrency(TDollar, int)
DefineCurrency(TEuro, int)

View File

@@ -2,6 +2,7 @@ Advanced commands:
//compileToC, cc compile project with C code generator
//compileToCpp, cpp compile project to C++ code
//compileToOC, objc compile project to Objective C code
//js compile project to Javascript
//rst2html convert a reStructuredText file to HTML
//rst2tex convert a reStructuredText file to TeX
//jsondoc extract the documentation to a json file

View File

@@ -1,6 +1,6 @@
==========
API design
==========
=================
API naming design
=================
The API is designed to be **easy to use** and consistent. Ease of use is
measured by the number of calls to achieve a concrete high level action.
@@ -79,3 +79,10 @@ string str
identifier ident
indentation indent
------------------- ------------ --------------------------------------
Coding Guidelines
=================
For coding guidelines see the `Internals of the Nimrod Compiler
<intern.html#coding-guidelines>`_ documentation.

409
doc/backends.txt Normal file
View File

@@ -0,0 +1,409 @@
================================
Nimrod Backend Integration
================================
:Author: Puppet Master
:Version: |nimrodversion|
.. contents::
"Heresy grows from idleness." -- Unknown.
Introduction
============
The `Nimrod Compiler User Guide <nimrodc.html>`_ documents the typical
compiler invocation, using the ``compile`` or ``c`` command to transform a
``.nim`` file into one or more ``.c`` files which are then compiled with the
platform's C compiler into a static binary. However there are other commands
to compile to C++, Objective-C or JavaScript. This document tries to
concentrate in a single place all the backend and interfacing options.
The Nimrod compiler supports mainly two backend families: the C, C++ and
Objective-C targets and the JavaScript target. `The C like targets`_ creates
source files which can be compiled into a library or a final executable. `The
JavaScript target`_ can generate a ``.js`` file which you reference from an
HTML file or create a `standalone nodejs program <http://nodejs.org>`_.
On top of generating libraries or standalone applications, Nimrod offers
bidirectional interfacing with the backend targets through generic and
specific pragmas.
Backends
========
The C like targets
------------------
The commands to compile to either C, C++ or Objective-C are:
//compileToC, cc compile project with C code generator
//compileToCpp, cpp compile project to C++ code
//compileToOC, objc compile project to Objective C code
The most significant difference between these commands is that if you look
into the ``nimcache`` directory you will find ``.c``, ``.cpp`` or ``.m``
files, other than that all of them will produce a native binary for your
project. This allows you to take the generated code and place it directly
into a project using any of these languages. Here are some typical command
line invocations::
$ nimrod c hallo.nim
$ nimrod cpp hallo.nim
$ nimrod objc hallo.nim
The compiler commands select the target backend, but if needed you can
`specify additional switches for cross compilation
<nimrodc.html#cross-compilation>`_ to select the target CPU, operative system
or compiler/linker commands.
The JavaScript target
---------------------
Nimrod can also generate `JavaScript`:idx: code through the ``js`` command.
However, the JavaScript code generator is experimental!
Nimrod targets JavaScript 1.5 which is supported by any widely used browser.
Since JavaScript does not have a portable means to include another module,
Nimrod just generates a long ``.js`` file.
Features or modules that the JavaScript platform does not support are not
available. This includes:
* manual memory management (``alloc``, etc.)
* casting and other unsafe operations (``cast`` operator, ``zeroMem``, etc.)
* file management
* most modules of the Standard library
* proper 64 bit integer arithmetic
* unsigned integer arithmetic
However, the modules `strutils <strutils.html>`_, `math <math.html>`_, and
`times <times.html>`_ are available! To access the DOM, use the `dom
<dom.html>`_ module that is only available for the JavaScript platform.
To compile a Nimrod module into a ``.js`` file use the ``js`` command; the
default is a ``.js`` file that is supposed to be referenced in an ``.html``
file. However, you can also run the code with `nodejs`:idx:, a `software
platform for easily building fast, scalable network applications
<http://nodejs.org>`_::
nimrod js -d:nodejs -r examples/hallo.nim
Interfacing
===========
Nimrod offers bidirectional interfacing with the target backend. This means
that you can call backend code from Nimrod and Nimrod code can be called by
the backend code. Usually the direction of which calls which depends on your
software architecture (is Nimrod your main program or is Nimrod providing a
component?).
Nimrod code calling the backend
--------------------------------
Nimrod code can interface with the backend through the `Foreign function
interface <manual.html#foreign-function-interface>`_ mainly through the
`importc pragma <manual.html#importc-pragma>`_. The ``importc`` pragma is the
*generic* way of making backend symbols available in Nimrod and is available
in all the target backends (JavaScript too). The C++ or Objective-C backends
have their respective `ImportCpp <nimrodc.html#importcpp-pragma>`_ and
`ImportObjC <nimrodc.html#importobjc-pragma>`_ pragmas to call methods from
classes.
Whenever you use any of these pragmas you need to integrate native code into
your final binary. In the case of JavaScript this is no problem at all, the
same html file which hosts the generated JavaScript will likely provide other
JavaScript functions which you are importing with ``importc``.
However, for the C like targets you need to link external code either
statically or dynamically. The preferred way of integrating native code is to
use dynamic linking because it allows you to compile Nimrod programs without
the need for having the related development libraries installed. This is done
through the `dynlib pragma for import
<manual.html#dynlib-pragma-for-import>`_, though more specific control can be
gained using the `dynlib module <dynlib.html>`_.
The `dynlibOverride <nimrodc.html#dynliboverride>`_ command line switch allows
to avoid dynamic linking if you need to statically link something instead.
Nimrod wrappers designed to statically link source files can use the `compile
pragma <nimrodc.html#compile-pragma>`_ if there are few sources or providing
them along the Nimrod code is easier than using a system library. Libraries
installed on the host system can be linked in with the `PassL pragma
<nimrodc.html#passl-pragma>`_.
To wrap native code, take a look at the `c2nim tool <c2nim.html>`_ which helps
with the process of scanning and transforming header files into a Nimrod
interface.
C invocation example
~~~~~~~~~~~~~~~~~~~~
Create a ``logic.c`` file with the following content:
.. code-block:: c
int addTwoIntegers(int a, int b)
{
return a + b;
}
Create a ``calculator.nim`` file with the following content:
.. code-block:: nimrod
{.compile: "logic.c".}
proc addTwoIntegers(a, b: cint): cint {.importc.}
when isMainModule:
echo addTwoIntegers(3, 7)
With these two files in place, you can run ``nimrod c -r calculator.nim`` and
the Nimrod compiler will compile the ``logic.c`` file in addition to
``calculator.nim`` and link both into an executable, which outputs ``10`` when
run. Another way to link the C file statically and get the same effect would
be remove the line with the ``compile`` pragma and run the following typical
Unix commands::
$ gcc -c logic.c
$ ar rvs mylib.a logic.o
$ nimrod c --passL:mylib.a -r calculator.nim
Just like in this example we pass the path to the ``mylib.a`` library (and we
could as well pass ``logic.o``) we could be passing switches to link any other
static C library.
JavaScript invocation example
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a ``host.html`` file with the following content:
.. code-block::
<html><body>
<script type="text/javascript">
function addTwoIntegers(a, b)
{
return a + b;
}
</script>
<script type="text/javascript" src="calculator.js"></script>
</body></html>
Create a ``calculator.nim`` file with the following content (or reuse the one
from the previous section):
.. code-block:: nimrod
proc addTwoIntegers(a, b: int): int {.importc.}
when isMainModule:
echo addTwoIntegers(3, 7)
Compile the Nimrod code to JavaScript with ``nimrod js -o:calculator.js
calculator.nim`` and open ``host.html`` in a browser. If the browser supports
javascript, you should see the value ``10``. In JavaScript the `echo proc
<system.html#echo>`_ will modify the HTML DOM and append the string. Use the
`dom module <dom.html>`_ for specific DOM querying and modification procs.
Backend code calling Nimrod
---------------------------
Backend code can interface with Nimrod code exposed through the `exportc
pragma <manual.html#exportc-pragma>`_. The ``exportc`` pragma is the *generic*
way of making Nimrod symbols available to the backends. By default the Nimrod
compiler will mangle all the Nimrod symbols to avoid any name collision, so
the most significant thing the ``exportc`` pragma does is maintain the Nimrod
symbol name, or if specified, use an alternative symbol for the backend in
case the symbol rules don't match.
The JavaScript target doesn't have any further interfacing considerations
since it also has garbage collection, but the C targets require you to
initialize Nimrod's internals, which is done calling a ``NimMain`` function.
Also, C code requires you to specify a forward declaration for functions or
the compiler will asume certain types for the return value and parameters
which will likely make your program crash at runtime.
The Nimrod compiler can generate a C interface header through the ``--header``
command line switch. The generated header will contain all the exported
symbols and the ``NimMain`` proc which you need to call before any other
Nimrod code.
Nimrod invocation example from C
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a ``fib.nim`` file with the following content:
.. code-block:: nimrod
proc fib(a: cint): cint {.exportc.} =
if a <= 2:
result = 1
else:
result = fib(a - 1) + fib(a - 2)
Create a ``maths.c`` file with the following content:
.. code-block:: c
#include "fib.h"
#include <stdio.h>
int main(void)
{
NimMain();
for (int f = 0; f < 10; f++)
printf("Fib of %d is %d\n", f, fib(f));
return 0;
}
Now you can run the following Unix like commands to first generate C sources
form the Nimrod code, then link them into a static binary along your main C
program::
$ nimrod c --noMain --noLinking --header:fib.h fib.nim
$ gcc -o m -Inimcache -Ipath/to/nimrod/lib nimcache/*.c maths.c
The first command runs the Nimrod compiler with three special options to avoid
generating a ``main()`` function in the generated files, avoid linking the
object files into a final binary, and explicitly generate a header file for C
integration. All the generated files are placed into the ``nimcache``
directory. That's why the next command compiles the ``maths.c`` source plus
all the ``.c`` files form ``nimcache``. In addition to this path, you also
have to tell the C compiler where to find Nimrod's ``nimbase.h`` header file.
Instead of depending on the generation of the individual ``.c`` files you can
also ask the Nimrod compiler to generate a statically linked library::
$ nimrod c --app:staticLib --noMain --header fib.nim
$ gcc -o m -Inimcache -Ipath/to/nimrod/lib libfib.nim.a maths.c
The Nimrod compiler will handle linking the source files generated in the
``nimcache`` directory into the ``libfib.nim.a`` static library, which you can
then link into your C program. Note that these commands are generic and will
vary for each system. For instance, on Linux systems you will likely need to
use ``-ldl`` too to link in required dlopen functionality.
Nimrod invocation example from JavaScript
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a ``mhost.html`` file with the following content:
.. code-block::
<html><body>
<script type="text/javascript" src="fib.js"></script>
<script type="text/javascript">
alert("Fib for 9 is " + fib(9));
</script>
</body></html>
Create a ``fib.nim`` file with the following content (or reuse the one
from the previous section):
.. code-block:: nimrod
proc fib(a: cint): cint {.exportc.} =
if a <= 2:
result = 1
else:
result = fib(a - 1) + fib(a - 2)
Compile the Nimrod code to JavaScript with ``nimrod js -o:fib.js fib.nim`` and
open ``mhost.html`` in a browser. If the browser supports javascript, you
should see an alert box displaying the text ``Fib for 9 is 34``. As mentioned
earlier, JavaScript doesn't require an initialisation call to ``NimMain`` or
similar function and you can call the exported Nimrod proc directly.
Memory management
=================
In the previous sections the ``NimMain()`` function reared its head. Since
JavaScript already provides automatic memory management, you can freely pass
objects between the two language without problems. In C and derivate languages
you need to be careful about what you do and how you share memory. The
previous examples only dealt with simple scalar values, but passing a Nimrod
string to C, or reading back a C string in Nimrod already requires you to be
aware of who controls what to avoid crashing.
Strings and C strings
---------------------
The manual mentions that `Nimrod strings are implicitly convertible to
cstrings <manual.html#cstring-type>`_ which makes interaction usually
painless. Most C functions accepting a Nimrod string converted to a
``cstring`` will likely not need to keep this string around and by the time
they return the string won't be needed any more. However, for the rare cases
where a Nimrod string has to be preserved and made available to the C backend
as a ``cstring``, you will need to manually prevent the string data from being
freed with `GC_ref <system.html#GC_ref>`_ and `GC_unref
<system.html#GC_unref>`_.
A similar thing happens with C code invoking Nimrod code which returns a
``cstring``. Consider the following proc:
.. code-block:: nimrod
proc gimme(): cstring {.exportc.} =
result = "Hey there C code! " & $random(100)
Since Nimrod's garbage collector is not aware of the C code, once the
``gimme`` proc has finished it can reclaim the memory of the ``cstring``.
However, from a practical standpoint, the C code invoking the ``gimme``
function directly will be able to use it since Nimrod's garbage collector has
not had a chance to run *yet*. This gives you enough time to make a copy for
the C side of the program, as calling any further Nimrod procs *might* trigger
garbage collection making the previously returned string garbage. Or maybe you
are `triggering yourself the collection <gc.html>`_.
Custom data types
-----------------
Just like strings, custom data types that are to be shared between Nimrod and
the backend will need careful consideration of who controlls who. If you want
to hand a Nimrod reference to C code, you will need to use `GC_ref
<system.html#GC_ref>`_ to mark the reference as used, so it does not get
freed. And for the C backend you will need to expose the `GC_unref
<system.html#GC_unref>`_ proc to clean up this memory when it is not required
any more.
Again, if you are wrapping a library which *mallocs* and *frees* data
structures, you need to expose the appropriate *free* function to Nimrod so
you can clean it up. And of course, once cleaned you should avoid accessing it
from Nimrod (or C for that matter). Typically C data structures have their own
``malloc_structure`` and ``free_structure`` specific functions, so wrapping
these for the Nimrod side should be enough.
Thread coordination
-------------------
When the ``NimMain()`` function is called Nimrod initializes the garbage
collector to the current thread, which is usually the main thread of your
application. If your C code later spawns a different thread and calls Nimrod
code, the garbage collector will fail to work properly and you will crash.
As long as you don't use the threadvar emulation Nimrod uses native thread
variables, of which you get a fresh version whenever you create a thread. You
can then attach a GC to this thread via
.. code-block:: nimrod
setStackBottom(addr(someLocal))
initGC()
At the moment this support is still experimental so you need to expose these
functions yourself or submit patches to request a public API.
It is **not** safe to disable the garbage collector and enable it after the
call from your background thread even if the code you are calling is short
lived.

View File

@@ -1,6 +1,6 @@
=================================
c2nim User's manual
=================================
=======================
c2nim User's manual
=======================
:Author: Andreas Rumpf
:Version: |nimrodversion|

View File

@@ -164,20 +164,10 @@ Index switch
nimrod doc2 --index:on filename.nim
This will generate an index of all the exported symbols in the input Nimrod
module, and put it into a neighboring file with the extension of `.idx`. The
index file is line oriented (newlines have to be escaped). Each line represents
a tab separated record of several columns, the first two mandatory, the rest
optional:
1. Mandatory term being indexed. Terms can include quoting according to
Nimrod's rules (eg. ```^```)
2. Base filename plus anchor hyper link (eg.
``algorithm.html#*,int,TSortOrder``).
3. Optional human readable string to display as hyper link. If the value is not
present or is the empty string, the hyper link will be rendered using the
term.
4. Optional title or description of the hyper link. Browsers usually display
this as a tooltip after hovering a moment over the hyper link.
module, and put it into a neighboring file with the extension of ``.idx``. The
index file is line oriented (newlines have to be escaped). Each line
represents a tab separated record of several columns, the first two mandatory,
the rest optional. See the `Index (idx) file format`_ section for details.
Once index files have been generated for one or more modules, the Nimrod
compiler command ``buildIndex directory`` can be run to go over all the index
@@ -295,6 +285,60 @@ symbols in the `system module <system.html>`_.
<system.html#newException.t,typedesc,string>`_
Index (idx) file format
=======================
Files with the ``.idx`` extension are generated when you use the `Index
switch`_ along with commands to generate documentation from source or text
files. You can programatically generate indices with the `setIndexTerm()
<rstgen.html#setIndexTerm>`_ and `writeIndexFile()
<rstgen.html#writeIndexFile>`_ procs. The purpose of ``idx`` files is to hold
the interesting symbols and their HTML references so they can be later
concatenated into a big index file with `mergeIndexes()
<rstgen.html#mergeIndexes>`_. This section documents the file format in
detail.
Index files are line oriented and tab separated (newline and tab characters
have to be escaped). Each line represents a record with at least two fields,
but can have up to four (additional columns are ignored). The content of these
columns is:
1. Mandatory term being indexed. Terms can include quoting according to
Nimrod's rules (eg. \`^\` like in `the actors module
<actors.html#^,ptr.TChannel[T]>`_).
2. Base filename plus anchor hyper link (eg.
``algorithm.html#*,int,TSortOrder``).
3. Optional human readable string to display as hyper link. If the value is not
present or is the empty string, the hyper link will be rendered
using the term. Prefix whitespace indicates that this entry is
not for an API symbol but for a TOC entry.
4. Optional title or description of the hyper link. Browsers usually display
this as a tooltip after hovering a moment over the hyper link.
The index generation tools try to differentiate between documentation
generated from ``.nim`` files and documentation generated from ``.txt`` or
``.rst`` files. The former are always closely related to source code and
consist mainly of API entries. The latter are generic documents meant for
human reading.
To differentiate both types (documents and APIs), the index generator will add
to the index of documents an entry with the title of the document. Since the
title is the topmost element, it will be added with a second field containing
just the filename without any HTML anchor. By convention this entry without
anchor is the *title entry*, and since entries in the index file are added as
they are scanned, the title entry will be the first line. The title for APIs
is not present because it can be generated concatenating the name of the file
to the word **Module**.
Normal symbols are added to the index with surrounding whitespaces removed. An
exception to this are table of content (TOC) entries. TOC entries are added to
the index file with their third column having as much prefix spaces as their
level is in the TOC (at least 1 character). The prefix whitespace helps to
filter TOC entries from API or text symbols. This is important because the
amount of spaces is used to replicate the hiearchy for document TOCs in the
final index, and TOC entries found in ``.nim`` files are discarded.
Additional resources
====================

View File

@@ -6,19 +6,19 @@ The documentation consists of several documents:
- | `Tutorial (part II) <tut2.html>`_
| The Nimrod tutorial part two deals with the advanced language constructs.
- | `Language Manual <manual.html>`_
| The Nimrod manual is a draft that will evolve into a proper specification.
- | `Library documentation <lib.html>`_
| This document describes Nimrod's standard library.
- | `User guide <nimrodc.html>`_
- | `Compiler user guide <nimrodc.html>`_
| The user guide lists command line arguments, special features of the
compiler, etc.
- | `Tools documentation <tools.html>`_
| Description of some tools that come with the standard distribution.
- | `Manual <manual.html>`_
| The Nimrod manual is a draft that will evolve into a proper specification.
- | `GC <gc.html>`_
| Additional documentation about Nimrod's GC and how to operate it in a
| realtime setting.

View File

@@ -1,6 +1,6 @@
===========================================
Embedded Nimrod Debugger User Guide
===========================================
==============================================
Embedded Nimrod Debugger (ENDB) User Guide
==============================================
:Author: Andreas Rumpf
:Version: |nimrodversion|
@@ -9,7 +9,7 @@
Nimrod comes with a platform independent debugger -
the `Embedded Nimrod Debugger`:idx: (`ENDB`:idx:). The debugger is
the Embedded Nimrod Debugger (ENDB). The debugger is
*embedded* into your executable if it has been
compiled with the ``--debugger:on`` command line option.
This also defines the conditional symbol ``ENDB`` for you.
@@ -102,7 +102,7 @@ necessary: A special pragma has been defined for this:
The ``breakpoint`` pragma
-------------------------
The `breakpoint`:idx: pragma is syntactically a statement. It can be used
The ``breakpoint`` pragma is syntactically a statement. It can be used
to mark the *following line* as a breakpoint:
.. code-block:: Nimrod
@@ -122,7 +122,7 @@ debugging.
The ``watchpoint`` pragma
-------------------------
The `watchpoint`:idx: pragma is syntactically a statement. It can be used
The ``watchpoint`` pragma is syntactically a statement. It can be used
to mark a location as a watchpoint:
.. code-block:: Nimrod

View File

@@ -1,13 +1,13 @@
============================================
Embedded Stack Trace Profiler User Guide
============================================
===================================================
Embedded Stack Trace Profiler (ESTP) User Guide
===================================================
:Author: Andreas Rumpf
:Version: |nimrodversion|
Nimrod comes with a platform independent profiler -
the `Embedded Stack Trace Profiler`:idx: (`ESTP`:idx:). The profiler
the Embedded Stack Trace Profiler (ESTP). The profiler
is *embedded* into your executable. To activate the profiler you need to do:
* compile your program with the ``--profiler:on --stackTrace:on`` command

View File

@@ -26,7 +26,7 @@ available parameters depend on the invoked filter.
Pipe operator
=============
Filters can be combined with the ``|`` `pipe operator`:idx:\ ::
Filters can be combined with the ``|`` pipe operator::
#! strip(startswith="<") | stdtmpl
#proc generateXML(name, age: string): string =
@@ -46,7 +46,7 @@ after each filter application.
Replace filter
--------------
The `replace`:idx: filter replaces substrings in each line.
The replace filter replaces substrings in each line.
Parameters and their defaults:
@@ -60,7 +60,7 @@ Parameters and their defaults:
Strip filter
------------
The `strip`:idx: filter simply removes leading and trailing whitespace from
The strip filter simply removes leading and trailing whitespace from
each line.
Parameters and their defaults:
@@ -79,7 +79,7 @@ Parameters and their defaults:
StdTmpl filter
--------------
The `stdtmpl`:idx: filter provides a simple templating engine for Nimrod. The
The stdtmpl filter provides a simple templating engine for Nimrod. The
filter uses a line based parser: Lines prefixed with a *meta character*
(default: ``#``) contain Nimrod code, other lines are verbatim. Because
indentation-based parsing is not suited for a templating engine, control flow

View File

@@ -24,7 +24,7 @@ ampExpr = plusExpr (OP6 optInd plusExpr)*
plusExpr = mulExpr (OP7 optInd mulExpr)*
mulExpr = dollarExpr (OP8 optInd dollarExpr)*
dollarExpr = primary (OP9 optInd primary)*
symbol = '`' (KEYW|IDENT|operator|'(' ')'|'[' ']'|'{' '}'|'='|literal)+ '`'
symbol = '`' (KEYW|IDENT|literal|(operator|'('|')'|'['|']'|'{'|'}'|'=')+)+ '`'
| IDENT
indexExpr = expr
indexExprList = indexExpr ^+ comma
@@ -82,10 +82,11 @@ paramListColon = paramList? (':' optInd typeDesc)?
doBlock = 'do' paramListArrow pragmas? colcom stmt
doBlocks = doBlock ^* IND{=}
procExpr = 'proc' paramListColon pragmas? ('=' COMMENT? stmt)?
distinct = 'distinct' optInd typeDesc
expr = (ifExpr
| whenExpr
| caseExpr
| tryStmt)
| tryExpr)
/ simpleExpr
typeKeyw = 'var' | 'ref' | 'ptr' | 'shared' | 'type' | 'tuple'
| 'proc' | 'iterator' | 'distinct' | 'object' | 'enum'
@@ -134,6 +135,9 @@ caseStmt = 'case' expr ':'? COMMENT?
tryStmt = 'try' colcom stmt &(IND{=}? 'except'|'finally')
(IND{=}? 'except' exprList colcom stmt)*
(IND{=}? 'finally' colcom stmt)?
tryExpr = 'try' colcom stmt &(optInd 'except'|'finally')
(optInd 'except' exprList colcom stmt)*
(optInd 'finally' colcom stmt)?
exceptBlock = 'except' colcom stmt
forStmt = 'for' (identWithPragma ^+ comma) 'in' expr colcom stmt
blockStmt = 'block' symbol? colcom stmt
@@ -166,7 +170,6 @@ object = 'object' pragma? ('of' typeDesc)? COMMENT? objectPart
typeClassParam = ('var')? symbol
typeClass = typeClassParam ^* ',' (pragma)? ('of' typeDesc ^* ',')?
&IND{>} stmt
distinct = 'distinct' optInd typeDesc
typeDef = identWithPragma genericParamList? '=' optInd typeDefAux
indAndComment?
varTuple = '(' optInd identWithPragma ^+ comma optPar ')' '=' optInd expr

View File

@@ -16,7 +16,7 @@
Nimrod differs from many other compilers in that it is really fast,
and being so fast makes it suited to provide external queries for
text editors about the source code being written. Through the
`idetools`:idx: command of `the compiler <nimrodc.html>`_, any IDE
``idetools`` command of `the compiler <nimrodc.html>`_, any IDE
can query a ``.nim`` source file and obtain useful information like
definition of symbols or suggestions for completion.
@@ -169,7 +169,7 @@ clicks it and after a second or two the IDE displays where that
symbol is defined. Such latencies would be terrible for features
like symbol suggestion, plus why wait at all if we can avoid it?
The idetools command can be run as a compiler service (`caas`:idx:),
The idetools command can be run as a compiler service (CAAS),
where you first launch the compiler and it will stay online as a
server, accepting queries in a telnet like fashion. The advantage
of staying on is that for many queries the compiler can cache the
@@ -528,7 +528,7 @@ suite is not integrated with the main test suite and you have to
run it manually. First you have to compile the tester::
$ cd my/nimrod/checkout/tests
$ nimrod c caasdriver.nim
$ nimrod c testament/caasdriver.nim
Running the ``caasdriver`` without parameters will attempt to process
all the test cases in all three operation modes. If a test succeeds

View File

@@ -59,6 +59,8 @@ And for a debug version compatible with GDB::
The ``koch`` program is Nimrod's maintenance script. It is a replacement for
make and shell scripting with the advantage that it is much more portable.
More information about its options can be found in the `koch <koch.html>`_
documentation.
Coding Guidelines
@@ -69,8 +71,10 @@ Coding Guidelines
* Max line length is 80 characters.
* Provide spaces around binary operators if that enhances readability.
* Use a space after a colon, but not before it.
* Start types with a capital ``T``, unless they are pointers/references which start
with ``P``.
* Start types with a capital ``T``, unless they are pointers/references which
start with ``P``.
See also the `API naming design <apis.html>`_ document.
Porting to new platforms
@@ -156,7 +160,7 @@ generator.
Compilation cache
=================
The implementation of the `compilation cache`:idx: is tricky: There are lots
The implementation of the compilation cache is tricky: There are lots
of issues to be solved for the front- and backend. In the following
sections *global* means *shared between modules* or *property of the whole
program*.

144
doc/koch.txt Normal file
View File

@@ -0,0 +1,144 @@
===============================
Nimrod maintenance script
===============================
:Version: |nimrodversion|
.. contents::
.. raw:: html
<blockquote><p>
"A great chef is an artist that I truly respect" -- Robert Stack.
</p></blockquote>
Introduction
============
The `koch`:idx: program is Nimrod's maintenance script. It is a replacement
for make and shell scripting with the advantage that it is much more portable.
The word *koch* means *cook* in German. ``koch`` is used mainly to build the
Nimrod compiler, but it can also be used for other tasks. This document
describes the supported commands and their options.
Commands
========
boot command
------------
The `boot`:idx: command bootstraps the compiler, and it accepts different
options:
-d:release
By default a debug version is created, passing this option will
force a release build, which is much faster and should be preferred
unless you are debugging the compiler.
-d:tinyc
Include the `Tiny C <http://bellard.org/tcc/>`_ backend. This
option is not supported on Windows.
-d:useGnuReadline
Includes the `rdstdin module <rdstdin.html>`_ for `interactive
mode <nimrodc.html#nimrod-interactive-mode>`_ (aka ``nimrod i``).
This is not needed on Windows. On other platforms this may
incorporate the GNU readline library.
-d:nativeStacktrace
Use native stack traces (only for Mac OS X or Linux).
-d:noCaas
Builds Nimrod without compiler as a service (CAAS) support. CAAS
support is required for functionality like Nimrod's `idetool
<idetools.html>`_ command used to integrate the compiler with
`external IDEs <https://github.com/Araq/Nimrod/wiki/Editor-Support>`_.
-d:avoidTimeMachine
Only for Mac OS X, activating this switch will force excluding
the generated ``nimcache`` directories from Time Machine backups.
By default ``nimcache`` directories will be included in backups,
and just for the Nimrod compiler itself it means backing up 20MB
of generated files each time you update the compiler. Using this
option will make the compiler invoke the `tmutil
<https://developer.apple.com/library/mac/documentation/Darwin/Reference/Manpages/man8/tmutil.8.html>`_
command on all ``nimcache`` directories, setting their backup
exclusion bit.
You can use the following command to locate all ``nimcache``
directories and check their backup exclusion bit::
$ find . -type d -name nimcache -exec tmutil isexcluded \{\} \;
-d:useFFI
Nimrod code can use the `foreign function interface (FFI)
<manual.html#foreign-function-interface>`_ at runtime, but macros
are limited to pure Nimrod code at compilation time. Enabling
this switch will allow macros to execute non-nimrod code at
compilation time (eg. open a file and write to it).
--gc:refc|v2|markAndSweep|boehm|none
Selects which garbage collection strategy to use for the compiler
and generated code. See the `Nimrod's Garbage Collector <gc.html>`_
documentation for more information.
After compilation is finished you will hopefully end up with the nimrod
compiler in the ``bin`` directory. You can add Nimrod's ``bin`` directory to
your ``$PATH`` or use the `install command`_ to place it where it will be
found.
clean command
-------------
The `clean`:idx: command removes all generated files.
csource command
---------------
The `csource`:idx: command builds the C sources for installation. It accepts
the same options as you would pass to the `boot command`_.
inno command
------------
The `inno`:idx: command builds the `Inno Setup installer for Windows
<http://www.jrsoftware.org/isinfo.php>`_.
install command
---------------
The `install`:idx: command installs Nimrod to the specified directory, which
is required as a parameter. For example, on Unix platforms you could run::
$ ./koch install /usr/local/bin
temp command
------------
The temp command builds the Nimrod compiler but with a different final name
(``nimrod_temp``), so it doesn't overwrite your normal compiler. You can use
this command to test different options, the same you would issue for the `boot
command`_.
test command
------------
The `test`:idx: command can also be invoked with the alias ``tests``. This
command will compile and run ``tests/testament/tester.nim``, which is the main
driver of Nimrod's test suite. You can pass options to the ``test`` command,
they will be forwarded to the tester. See its source code for available
options.
update command
--------------
The `update`:idx: command updates nimrod to the latest version from github.
For this command to work you need to have compiled ``koch`` itself with the
``-d:withUpdate`` switch.
web command
-----------
The `web`:idx: command converts the documentation in the ``doc`` directory
from rst to HTML. It also repeats the same operation but places the result in
the ``web/upload`` which can be used to update the website at
http://nimrod-lang.org.
zip command
-----------
The `zip`:idx: command builds the installation ZIP package.

View File

@@ -535,7 +535,7 @@ Database support
* `odbcsql <odbcsql.html>`_
interface to the ODBC driver.
* `sphinx <sphinx.html>`_
Nimrod wrapper for ``shpinx``.
Nimrod wrapper for ``sphinx``.
XML Processing

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
=================================
nimgrep User's manual
=================================
=========================
nimgrep User's manual
=========================
:Author: Andreas Rumpf
:Version: 0.9

View File

@@ -1,6 +1,6 @@
=================================
niminst User's manual
=================================
=========================
niminst User's manual
=========================
:Author: Andreas Rumpf
:Version: |nimrodversion|

View File

@@ -87,6 +87,18 @@ Level Description
for compiler developers.
===== ============================================
Compile time symbols
--------------------
Through the ``-d:x`` or ``--define:x`` switch you can define compile time
symbols for conditional compilation. The defined switches can be checked in
source code with the `when statement <manual.html#when-statement>`_ and
`defined proc <system.html#defined>`_. The typical use of this switch is to
enable builds in release mode (``-d:release``) where certain safety checks are
omitted for better performance. Another common use is the ``-d:ssl`` switch to
activate `SSL sockets <sockets.html>`_.
Configuration files
-------------------
@@ -116,7 +128,7 @@ The default build of a project is a `debug build`:idx:. To compile a
Search path handling
--------------------
Nimrod has the concept of a global `search path`:idx: (PATH) that is queried to
Nimrod has the concept of a global search path (PATH) that is queried to
determine where to find imported modules or include files. If multiple files are
found an ambiguity error is produced.
@@ -167,14 +179,14 @@ might contain some cruft even when dead code elimination is turned on. So
the final release build should be done with ``--symbolFiles:off``.
Due to the aggregation of C code it is also recommended that each project
resists in its own directory so that the generated ``nimcache`` directory
resides in its own directory so that the generated ``nimcache`` directory
is not shared between different projects.
Cross compilation
=================
To `cross compile`:idx:, use for example::
To cross compile, use for example::
nimrod c --cpu:i386 --os:linux --compile_only --gen_script myproject.nim
@@ -200,7 +212,7 @@ DLL generation
Nimrod supports the generation of DLLs. However, there must be only one
instance of the GC per process/address space. This instance is contained in
``nimrtl.dll``. This means that every generated Nimrod `DLL`:idx: depends
``nimrtl.dll``. This means that every generated Nimrod DLL depends
on ``nimrtl.dll``. To generate the "nimrtl.dll" file, use the command::
nimrod c -d:release lib/nimrtl.nim
@@ -251,7 +263,7 @@ generator and are subject to change.
NoDecl pragma
-------------
The `noDecl`:idx: pragma can be applied to almost any symbol (variable, proc,
The ``noDecl`` pragma can be applied to almost any symbol (variable, proc,
type, etc.) and is sometimes useful for interoperability with C:
It tells Nimrod that it should not generate a declaration for the symbol in
the C code. For example:
@@ -268,7 +280,7 @@ However, the ``header`` pragma is often the better alternative.
Header pragma
-------------
The `header`:idx: pragma is very similar to the ``noDecl`` pragma: It can be
The ``header`` pragma is very similar to the ``noDecl`` pragma: It can be
applied to almost any symbol and specifies that it should not be declared
and instead the generated code should contain an ``#include``:
@@ -287,7 +299,7 @@ encloses the header file in ``""`` in the generated C code.
IncompleteStruct pragma
-----------------------
The `incompleteStruct`:idx: pragma tells the compiler to not use the
The ``incompleteStruct`` pragma tells the compiler to not use the
underlying C ``struct`` in a ``sizeof`` expression:
.. code-block:: Nimrod
@@ -298,7 +310,7 @@ underlying C ``struct`` in a ``sizeof`` expression:
Compile pragma
--------------
The `compile`:idx: pragma can be used to compile and link a C/C++ source file
The ``compile`` pragma can be used to compile and link a C/C++ source file
with the project:
.. code-block:: Nimrod
@@ -311,7 +323,7 @@ of the file.
Link pragma
-----------
The `link`:idx: pragma can be used to link an additional file with the project:
The ``link`` pragma can be used to link an additional file with the project:
.. code-block:: Nimrod
{.link: "myfile.o".}
@@ -319,7 +331,7 @@ The `link`:idx: pragma can be used to link an additional file with the project:
PassC pragma
------------
The `passC`:idx: pragma can be used to pass additional parameters to the C
The ``passC`` pragma can be used to pass additional parameters to the C
compiler like you would using the commandline switch ``--passC``:
.. code-block:: Nimrod
@@ -333,7 +345,7 @@ embed parameters from an external command at compile time:
PassL pragma
------------
The `passL`:idx: pragma can be used to pass additional parameters to the linker
The ``passL`` pragma can be used to pass additional parameters to the linker
like you would using the commandline switch ``--passL``:
.. code-block:: Nimrod
@@ -348,7 +360,7 @@ embed parameters from an external command at compile time:
Emit pragma
-----------
The `emit`:idx: pragma can be used to directly affect the output of the
The ``emit`` pragma can be used to directly affect the output of the
compiler's code generator. So it makes your code unportable to other code
generators/backends. Its usage is highly discouraged! However, it can be
extremely useful for interfacing with `C++`:idx: or `Objective C`:idx: code.
@@ -360,20 +372,23 @@ Example:
static int cvariable = 420;
""".}
proc embedsC() {.noStackFrame.} =
{.push stackTrace:off.}
proc embedsC() =
var nimrodVar = 89
# use backticks to access Nimrod symbols within an emit section:
{.emit: """fprintf(stdout, "%d\n", cvariable + (int)`nimrodVar`);""".}
{.pop.}
embedsC()
ImportCpp pragma
----------------
The `importcpp`:idx: pragma can be used to import `C++`:idx: methods. The
generated code then uses the C++ method calling syntax: ``obj->method(arg)``.
In addition with the ``header`` and ``emit`` pragmas this allows *sloppy*
interfacing with libraries written in C++:
Similar to the `importc pragma for C <manual.html#importc-pragma>`_, the
``importcpp`` pragma can be used to import `C++`:idx: methods. The generated
code then uses the C++ method calling syntax: ``obj->method(arg)``. In
addition with the ``header`` and ``emit`` pragmas this allows *sloppy*
interfacing with libraries written in C++:
.. code-block:: Nimrod
# Horrible example of how to interface with a C++ engine ... ;-)
@@ -408,11 +423,11 @@ emits C++ code.
ImportObjC pragma
-----------------
The `importobjc`:idx: pragma can be used to import `Objective C`:idx: methods.
The generated code then uses the Objective C method calling
syntax: ``[obj method param1: arg]``.
In addition with the ``header`` and ``emit`` pragmas this allows *sloppy*
interfacing with libraries written in Objective C:
Similar to the `importc pragma for C <manual.html#importc-pragma>`_, the
``importobjc`` pragma can be used to import `Objective C`:idx: methods. The
generated code then uses the Objective C method calling syntax: ``[obj method
param1: arg]``. In addition with the ``header`` and ``emit`` pragmas this
allows *sloppy* interfacing with libraries written in Objective C:
.. code-block:: Nimrod
# horrible example of how to interface with GNUStep ...
@@ -458,7 +473,7 @@ emits Objective C code.
CodegenDecl pragma
------------------
The `codegenDecl`:idx: pragma can be used to directly influence Nimrod's code
The ``codegenDecl`` pragma can be used to directly influence Nimrod's code
generator. It receives a format string that determines how the variable or
proc is declared in the generated code:
@@ -473,7 +488,7 @@ proc is declared in the generated code:
InjectStmt pragma
-----------------
The `injectStmt`:idx: pragma can be used to inject a statement before every
The ``injectStmt`` pragma can be used to inject a statement before every
other statement in the current module. It is only supposed to be used for
debugging:
@@ -485,28 +500,28 @@ debugging:
LineDir option
--------------
The `lineDir`:idx: option can be turned on or off. If turned on the
The ``lineDir`` option can be turned on or off. If turned on the
generated C code contains ``#line`` directives. This may be helpful for
debugging with GDB.
StackTrace option
-----------------
If the `stackTrace`:idx: option is turned on, the generated C contains code to
If the ``stackTrace`` option is turned on, the generated C contains code to
ensure that proper stack traces are given if the program crashes or an
uncaught exception is raised.
LineTrace option
----------------
The `lineTrace`:idx: option implies the ``stackTrace`` option. If turned on,
The ``lineTrace`` option implies the ``stackTrace`` option. If turned on,
the generated C contains code to ensure that proper stack traces with line
number information are given if the program crashes or an uncaught exception
is raised.
Debugger option
---------------
The `debugger`:idx: option enables or disables the *Embedded Nimrod Debugger*.
The ``debugger`` option enables or disables the *Embedded Nimrod Debugger*.
See the documentation of endb_ for further information.
@@ -518,7 +533,7 @@ ENDB. See the documentation of `endb <endb.html>`_ for further information.
Volatile pragma
---------------
The `volatile`:idx: pragma is for variables only. It declares the variable as
The ``volatile`` pragma is for variables only. It declares the variable as
``volatile``, whatever that means in C/C++ (its semantics are not well defined
in C/C++).
@@ -530,14 +545,24 @@ DynlibOverride
By default Nimrod's ``dynlib`` pragma causes the compiler to generate
``GetProcAddress`` (or their Unix counterparts)
calls to bind to a DLL. With the `dynlibOverride`:idx: command line switch this
calls to bind to a DLL. With the ``dynlibOverride`` command line switch this
can be prevented and then via ``--passL`` the static library can be linked
against. For instance, to link statically against Lua this command might work
on Linux::
nimrod c --dynlibOverride:lua --passL:liblua.lib program.nim
Backend language options
========================
The typical compiler usage involves using the ``compile`` or ``c`` command to
transform a ``.nim`` file into one or more ``.c`` files which are then
compiled with the platform's C compiler into a static binary. However there
are other commands to compile to C++, Objective-C or Javascript. More details
can be read in the `Nimrod Backend Integration document <backends.html>`_.
Nimrod documentation tools
==========================
@@ -556,7 +581,7 @@ for further information.
Nimrod interactive mode
=======================
The Nimrod compiler supports an `interactive mode`:idx:. This is also known as
The Nimrod compiler supports an interactive mode. This is also known as
a `REPL`:idx: (*read eval print loop*). If Nimrod has been built with the
``-d:useGnuReadline`` switch, it uses the GNU readline library for terminal
input management. To start Nimrod in interactive mode use the command
@@ -681,35 +706,3 @@ efficient:
else: quit(errorStr(p, "expected: console or gui"))
of "license": c.license = UnixToNativePath(k.value)
else: quit(errorStr(p, "unknown variable: " & k.key))
The JavaScript target
=====================
Nimrod can also generate `JavaScript`:idx: code. However, the
JavaScript code generator is experimental!
Nimrod targets JavaScript 1.5 which is supported by any widely used browser.
Since JavaScript does not have a portable means to include another module,
Nimrod just generates a long ``.js`` file.
Features or modules that the JavaScript platform does not support are not
available. This includes:
* manual memory management (``alloc``, etc.)
* casting and other unsafe operations (``cast`` operator, ``zeroMem``, etc.)
* file management
* most modules of the Standard library
* proper 64 bit integer arithmetic
* unsigned integer arithmetic
However, the modules `strutils`:idx:, `math`:idx:, and `times`:idx: are
available! To access the DOM, use the `dom`:idx: module that is only
available for the JavaScript platform.
To compile a Nimrod module into a ``.js`` file use the ``js`` command; the
default is a ``.js`` file that is supposed to be referenced in an ``.html``
file. However, you can also run the code with `nodejs`:idx:\:
nimrod js -d:nodejs -r examples/hallo.nim

98
doc/spawn.txt Normal file
View File

@@ -0,0 +1,98 @@
==========================================================
Parallel & Spawn
==========================================================
Nimrod has two flavors of parallelism:
1) `Structured`:idx parallelism via the ``parallel`` statement.
2) `Unstructured`:idx: parallelism via the standalone ``spawn`` statement.
Both need the `threadpool <threadpool.html>`_ module to work.
Somewhat confusingly, ``spawn`` is also used in the ``parallel`` statement
with slightly different semantics. ``spawn`` always takes a call expression of
the form ``f(a, ...)``. Let ``T`` be ``f``'s return type. If ``T`` is ``void``
then ``spawn``'s return type is also ``void``. Within a ``parallel`` section
``spawn``'s return type is ``T``, otherwise it is ``FlowVar[T]``.
The compiler can ensure the location in ``location = spawn f(...)`` is not
read prematurely within a ``parallel`` section and so there is no need for
the overhead of an indirection via ``FlowVar[T]`` to ensure correctness.
Parallel statement
==================
Example:
.. code-block:: nimrod
# Compute PI in an inefficient way
import strutils, math, threadpool
proc term(k: float): float = 4 * math.pow(-1, k) / (2*k + 1)
proc pi(n: int): float =
var ch = newSeq[float](n+1)
parallel:
for k in 0..ch.high:
ch[k] = spawn term(float(k))
for k in 0..ch.high:
result += ch[k]
echo formatFloat(pi(5000))
The parallel statement is the preferred mechanism to introduce parallelism
in a Nimrod program. A subset of the Nimrod language is valid within a
``parallel`` section. This subset is checked to be free of data races at
compile time. A sophisticated `disjoint checker`:idx: ensures that no data
races are possible even though shared memory is extensively supported!
The subset is in fact the full language with the following
restrictions / changes:
* ``spawn`` within a ``parallel`` section has special semantics.
* Every location of the form ``a[i]`` and ``a[i..j]`` and ``dest`` where
``dest`` is part of the pattern ``dest = spawn f(...)`` has to be
provable disjoint. This is called the *disjoint check*.
* Every other complex location ``loc`` that is used in a spawned
proc (``spawn f(loc)``) has to be immutable for the duration of
the ``parallel`` section. This is called the *immutability check*. Currently
it is not specified what exactly "complex location" means. We need to make
this an optimization!
* Every array access has to be provable within bounds. This is called
the *bounds check*.
* Slices are optimized so that no copy is performed. This optimization is not
yet performed for ordinary slices outside of a ``parallel`` section. Slices
are also special in that they currently do not support negative indexes!
Spawn statement
===============
A standalone ``spawn`` statement is a simple construct. It executes
the passed expression on the thread pool and returns a `data flow variable`:idx:
``FlowVar[T]`` that can be read from. The reading with the ``^`` operator is
**blocking**. However, one can use ``awaitAny`` to wait on multiple flow
variables at the same time:
.. code-block:: nimrod
import threadpool, ...
# wait until 2 out of 3 servers received the update:
proc main =
var responses = newSeq[RawFlowVar](3)
for i in 0..2:
responses[i] = spawn tellServer(Update, "key", "value")
var index = awaitAny(responses)
assert index >= 0
responses.del(index)
discard awaitAny(responses)
Like the ``parallel`` statement data flow variables ensure that no data races
are possible. Due to technical limitations not every type ``T`` is possible in
a data flow variable: ``T`` has to be of the type ``ref``, ``string``, ``seq``
or of a type that doesn't contain a type that is garbage collected. This
restriction will be removed in the future.

View File

@@ -1,4 +1,3 @@
================================
Substitution Expressions (subex)
================================

View File

@@ -1,6 +1,6 @@
=====
Tools
=====
===========================
Tools available with Nimrod
===========================
The standard distribution ships with the following tools:

View File

@@ -1,275 +0,0 @@
=========================================================
Term rewriting macros for Nimrod
=========================================================
:Author: Andreas Rumpf
Term rewriting macros are macros or templates that have not only a *name* but
also a *pattern* that is searched for after the semantic checking phase of
the compiler: This means they provide an easy way to enhance the compilation
pipeline with user defined optimizations:
.. code-block:: nimrod
template optMul{`*`(a, 2)}(a: int): int = a+a
let x = 3
echo x * 2
The compiler now rewrites ``x * 2`` as ``x + x``. The code inside the
curlies is the pattern to match against. The operators ``*``, ``**``,
``|``, ``~`` have a special meaning in patterns if they are written in infix
notation, so to match verbatim against ``*`` the ordinary function call syntax
needs to be used.
Unfortunately optimizations are hard to get right and even the tiny example
is **wrong**:
.. code-block:: nimrod
template optMul{`*`(a, 2)}(a: int): int = a+a
proc f(): int =
echo "side effect!"
result = 55
echo f() * 2
We cannot duplicate 'a' if it denotes an expression that has a side effect!
Fortunately Nimrod supports side effect analysis:
.. code-block:: nimrod
template optMul{`*`(a, 2)}(a: int{noSideEffect}): int = a+a
proc f(): int =
echo "side effect!"
result = 55
echo f() * 2 # not optimized ;-)
So what about ``2 * a``? We should tell the compiler ``*`` is commutative. We
cannot really do that however as the following code only swaps arguments
blindly:
.. code-block:: nimrod
template mulIsCommutative{`*`(a, b)}(a, b: int): int = b*a
What optimizers really need to do is a *canonicalization*:
.. code-block:: nimrod
template canonMul{`*`(a, b)}(a: int{lit}, b: int): int = b*a
The ``int{lit}`` parameter pattern matches against an expression of
type ``int``, but only if it's a literal.
Parameter constraints
=====================
The parameter constraint expression can use the operators ``|`` (or),
``&`` (and) and ``~`` (not) and the following predicates:
=================== =====================================================
Predicate Meaning
=================== =====================================================
``atom`` The matching node has no children.
``lit`` The matching node is a literal like "abc", 12.
``sym`` The matching node must be a symbol (a bound
identifier).
``ident`` The matching node must be an identifier (an unbound
identifier).
``call`` The matching AST must be a call/apply expression.
``lvalue`` The matching AST must be an lvalue.
``sideeffect`` The matching AST must have a side effect.
``nosideeffect`` The matching AST must have no side effect.
``param`` A symbol which is a parameter.
``genericparam`` A symbol which is a generic parameter.
``module`` A symbol which is a module.
``type`` A symbol which is a type.
``var`` A symbol which is a variable.
``let`` A symbol which is a ``let`` variable.
``const`` A symbol which is a constant.
``result`` The special ``result`` variable.
``proc`` A symbol which is a proc.
``method`` A symbol which is a method.
``iterator`` A symbol which is an iterator.
``converter`` A symbol which is a converter.
``macro`` A symbol which is a macro.
``template`` A symbol which is a template.
``field`` A symbol which is a field in a tuple or an object.
``enumfield`` A symbol which is a field in an enumeration.
``forvar`` A for loop variable.
``label`` A label (used in ``block`` statements).
``nk*`` The matching AST must have the specified kind.
(Example: ``nkIfStmt`` denotes an ``if`` statement.)
``alias`` States that the marked parameter needs to alias
with *some* other parameter.
``noalias`` States that *every* other parameter must not alias
with the marked parameter.
=================== =====================================================
The ``alias`` and ``noalias`` predicates refer not only to the matching AST,
but also to every other bound parameter; syntactially they need to occur after
the ordinary AST predicates:
.. code-block:: nimrod
template ex{a = b + c}(a: int{noalias}, b, c: int) =
# this transformation is only valid if 'b' and 'c' do not alias 'a':
a = b
inc a, b
Pattern operators
=================
The operators ``*``, ``**``, ``|``, ``~`` have a special meaning in patterns
if they are written in infix notation.
The ``|`` operator
------------------
The ``|`` operator if used as infix operator creates an ordered choice:
.. code-block:: nimrod
template t{0|1}(): expr = 3
let a = 1
# outputs 3:
echo a
The matching is performed after the compiler performed some optimizations like
constant folding, so the following does not work:
.. code-block:: nimrod
template t{0|1}(): expr = 3
# outputs 1:
echo 1
The reason is that the compiler already transformed the 1 into "1" for
the ``echo`` statement. However, a term rewriting macro should not change the
semantics anyway. In fact they can be deactived with the ``--patterns:off``
command line option or temporarily with the ``patterns`` pragma.
The ``{}`` operator
-------------------
A pattern expression can be bound to a pattern parameter via the ``expr{param}``
notation:
.. code-block:: nimrod
template t{(0|1|2){x}}(x: expr): expr = x+1
let a = 1
# outputs 2:
echo a
The ``~`` operator
------------------
The ``~`` operator is the **not** operator in patterns:
.. code-block:: nimrod
template t{x = (~x){y} and (~x){z}}(x, y, z: bool): stmt =
x = y
if x: x = z
var
a = false
b = true
c = false
a = b and c
echo a
The ``*`` operator
------------------
The ``*`` operator can *flatten* a nested binary expression like ``a & b & c``
to ``&(a, b, c)``:
.. code-block:: nimrod
var
calls = 0
proc `&&`(s: varargs[string]): string =
result = s[0]
for i in 1..len(s)-1: result.add s[i]
inc calls
template optConc{ `&&` * a }(a: string): expr = &&a
let space = " "
echo "my" && (space & "awe" && "some " ) && "concat"
# check that it's been optimized properly:
doAssert calls == 1
The second operator of `*` must be a parameter; it is used to gather all the
arguments. The expression ``"my" && (space & "awe" && "some " ) && "concat"``
is passed to ``optConc`` in ``a`` as a special list (of kind ``nkArgList``)
which is flattened into a call expression; thus the invocation of ``optConc``
produces:
.. code-block:: nimrod
`&&`("my", space & "awe", "some ", "concat")
The ``**`` operator
-------------------
The ``**`` is much like the ``*`` operator, except that it gathers not only
all the arguments, but also the matched operators in reverse polish notation:
.. code-block:: nimrod
import macros
type
TMatrix = object
dummy: int
proc `*`(a, b: TMatrix): TMatrix = nil
proc `+`(a, b: TMatrix): TMatrix = nil
proc `-`(a, b: TMatrix): TMatrix = nil
proc `$`(a: TMatrix): string = result = $a.dummy
proc mat21(): TMatrix =
result.dummy = 21
macro optM{ (`+`|`-`|`*`) ** a }(a: TMatrix): expr =
echo treeRepr(a)
result = newCall(bindSym"mat21")
var x, y, z: TMatrix
echo x + y * z - x
This passes the expression ``x + y * z - x`` to the ``optM`` macro as
an ``nnkArgList`` node containing::
Arglist
Sym "x"
Sym "y"
Sym "z"
Sym "*"
Sym "+"
Sym "x"
Sym "-"
(Which is the reverse polish notation of ``x + y * z - x``.)
Parameters
==========
Parameters in a pattern are type checked in the matching process. If a
parameter is of the type ``varargs`` it is treated specially and it can match
0 or more arguments in the AST to be matched against:
.. code-block:: nimrod
template optWrite{
write(f, x)
((write|writeln){w})(f, y)
}(x, y: varargs[expr], f: TFile, w: expr) =
w(f, x, y)

View File

@@ -1,4 +1,4 @@
========================
========================
Nimrod Tutorial (Part I)
========================
@@ -110,7 +110,7 @@ useful for embedding HTML code templates for example.
Comments
--------
`Comments`:idx: start anywhere outside a string or character literal with the
Comments start anywhere outside a string or character literal with the
hash character ``#``. Documentation comments start with ``##``. Multiline
comments need to be aligned at the same column:
@@ -224,7 +224,7 @@ different values! For safety use only constant values.
Constants
=========
`Constants`:idx: are symbols which are bound to a value. The constant's value
Constants are symbols which are bound to a value. The constant's value
cannot change. The compiler must be able to evaluate the expression in a
constant declaration at compile time:
@@ -369,7 +369,7 @@ he types in nothing (only presses RETURN).
For statement
-------------
The `for`:idx: statement is a construct to loop over any element an *iterator*
The ``for`` statement is a construct to loop over any element an *iterator*
provides. The example uses the built-in ``countup`` iterator:
.. code-block:: nimrod
@@ -481,7 +481,7 @@ Example:
else:
echo("unknown operating system")
The `when`:idx: statement is almost identical to the ``if`` statement with some
The ``when`` statement is almost identical to the ``if`` statement with some
differences:
* Each condition has to be a constant expression since it is evaluated by the
@@ -791,7 +791,7 @@ However, this cannot be done for mutually recursive procedures:
Here ``odd`` depends on ``even`` and vice versa. Thus ``even`` needs to be
introduced to the compiler before it is completely defined. The syntax for
such a `forward declaration`:idx: is simple: just omit the ``=`` and the
such a forward declaration is simple: just omit the ``=`` and the
procedure's body.
Later versions of the language may get rid of the need for forward
@@ -863,7 +863,7 @@ that are available for them in detail.
Booleans
--------
The `boolean`:idx: type is named ``bool`` in Nimrod and consists of the two
The boolean type is named ``bool`` in Nimrod and consists of the two
pre-defined values ``true`` and ``false``. Conditions in while,
if, elif, when statements need to be of type bool.
@@ -1030,7 +1030,7 @@ Enumeration and object types cannot be defined on the fly, but only within a
Enumerations
------------
A variable of an `enumeration`:idx: type can only be assigned a value of a
A variable of an enumeration type can only be assigned a value of a
limited set. This set consists of ordered symbols. Each symbol is mapped
to an integer value internally. The first symbol is represented
at runtime by 0, the second by 1 and so on. Example:
@@ -1069,7 +1069,7 @@ An explicit ordered enum can have *holes*:
Ordinal types
-------------
Enumerations without holes, integer types, ``char`` and ``bool`` (and
subranges) are called `ordinal`:idx: types. Ordinal types have quite
subranges) are called ordinal types. Ordinal types have quite
a few special operations:
----------------- --------------------------------------------------------
@@ -1094,7 +1094,7 @@ checks turned on.)
Subranges
---------
A `subrange`:idx: type is a range of values from an integer or enumeration type
A subrange type is a range of values from an integer or enumeration type
(the base type). Example:
.. code-block:: nimrod
@@ -1117,7 +1117,7 @@ avoid this common programming error.
Sets
----
The `set type`:idx: models the mathematical notion of a set. The set's
The set type models the mathematical notion of a set. The set's
basetype can only be an ordinal type. The reason is that sets are implemented
as high performance bit vectors.
@@ -1161,7 +1161,7 @@ constants that should be ``or``'ed together.
Arrays
------
An `array`:idx: is a simple fixed length container. Each element in
An array is a simple fixed length container. Each element in
the array has the same type. The array's index type can be any ordinal type.
Arrays can be constructed via ``[]``:
@@ -1253,7 +1253,7 @@ to specify a range from zero to the specified index minus one:
Sequences
---------
`Sequences`:idx: are similar to arrays but of dynamic length which may change
Sequences are similar to arrays but of dynamic length which may change
during runtime (like strings). Since sequences are resizable they are always
allocated on the heap and garbage collected.
@@ -1385,8 +1385,8 @@ Tuples
A tuple type defines various named *fields* and an *order* of the fields.
The constructor ``()`` can be used to construct tuples. The order of the
fields in the constructor must match the order in the tuple's definition.
Different tuple-types are *equivalent* if they specify the same fields of
the same type in the same order.
Different tuple-types are *equivalent* if they specify fields of
the same type and of the same name in the same order.
The assignment operator for tuples copies each component. The notation
``t.field`` is used to access a tuple's field. Another notation is
@@ -1471,7 +1471,7 @@ won't compile:
Reference and pointer types
---------------------------
References (similar to `pointers`:idx: in other programming languages) are a
References (similar to pointers in other programming languages) are a
way to introduce many-to-one relationships. This means different references can
point to and modify the same location in memory.
@@ -1513,7 +1513,7 @@ If a reference points to *nothing*, it has the value ``nil``.
Procedural type
---------------
A `procedural type`:idx: is a (somewhat abstract) pointer to a procedure.
A procedural type is a (somewhat abstract) pointer to a procedure.
``nil`` is an allowed value for a variable of a procedural type.
Nimrod uses procedural types to achieve `functional`:idx: programming
techniques.
@@ -1521,17 +1521,13 @@ techniques.
Example:
.. code-block:: nimrod
proc echoItem(x: int) = echo(x)
type
TCallback = proc (x: int)
proc echoItem(x: Int) = echo(x)
proc forEach(callback: TCallback) =
proc forEach(action: proc (x: int)) =
const
data = [2, 3, 5, 7, 11]
for d in items(data):
callback(d)
action(d)
forEach(echoItem)
@@ -1543,7 +1539,7 @@ listed in the `manual <manual.html>`_.
Modules
=======
Nimrod supports splitting a program into pieces with a `module`:idx: concept.
Nimrod supports splitting a program into pieces with a module concept.
Each module is in its own file. Modules enable `information hiding`:idx: and
`separate compilation`:idx:. A module may gain access to symbols of another
module by the `import`:idx: statement. Only top-level symbols that are marked
@@ -1698,7 +1694,7 @@ define a shorter alias to use when qualifying symbols.
Include statement
-----------------
The `include`:idx: statement does something fundamentally different than
The ``include`` statement does something fundamentally different than
importing a module: it merely includes the contents of a file. The ``include``
statement is useful to split up a large module into several files:

View File

@@ -135,7 +135,7 @@ The ``EInvalidObjectConversion`` exception is raised if ``x`` is not a
Object variants
---------------
Often an object hierarchy is overkill in certain situations where simple
`variant`:idx: types are needed.
variant types are needed.
An example:
@@ -182,7 +182,7 @@ bound to a class. This has disadvantages:
``join`` a string method or an array method?
Nimrod avoids these problems by not assigning methods to a class. All methods
in Nimrod are `multi-methods`:idx:. As we will see later, multi-methods are
in Nimrod are multi-methods. As we will see later, multi-methods are
distinguished from procs only for dynamic binding purposes.
@@ -194,7 +194,7 @@ The syntax ``obj.method(args)`` can be used instead of ``method(obj, args)``.
If there are no remaining arguments, the parentheses can be omitted:
``obj.len`` (instead of ``len(obj)``).
This `method call syntax`:idx: is not restricted to objects, it can be used
This method call syntax is not restricted to objects, it can be used
for any type:
.. code-block:: nimrod
@@ -343,7 +343,7 @@ evaluation or dead code elimination do not work with methods.
Exceptions
==========
In Nimrod `exceptions`:idx: are objects. By convention, exception types are
In Nimrod exceptions are objects. By convention, exception types are
prefixed with an 'E', not 'T'. The `system <system.html>`_ module defines an
exception hierarchy that you might want to stick to. Exceptions derive from
E_Base, which provides the common interface.
@@ -380,7 +380,7 @@ the template ``newException`` in the ``system`` module can be used:
Try statement
-------------
The `try`:idx: statement handles exceptions:
The ``try`` statement handles exceptions:
.. code-block:: nimrod
# read the first two lines of a text file that should contain numbers
@@ -501,7 +501,7 @@ with the file and line where the uncaught exception is being raised, which may
help you locate the offending code which has changed.
If you want to add the ``{.raises.}`` pragma to existing code, the compiler can
also help you. You can add the ``{.effect.}`` pragma statement to your proc and
also help you. You can add the ``{.effects.}`` pragma statement to your proc and
the compiler will output all inferred effects up to that point (exception
tracking is part of Nimrod's effect system). Another more roundabout way to
find out the list of exceptions raised by a proc is to use the Nimrod ``doc2``
@@ -513,7 +513,7 @@ procs with the list of raised exceptions. You can read more about Nimrod's
Generics
========
`Generics`:idx: are Nimrod's means to parametrize procs, iterators or types
Generics are Nimrod's means to parametrize procs, iterators or types
with `type parameters`:idx:. They are most useful for efficient type safe
containers:
@@ -714,9 +714,9 @@ know how the Nimrod concrete syntax is converted to an abstract syntax tree
(AST). The AST is documented in the `macros <macros.html>`_ module.
Once your macro is finished, there are two ways to invoke it:
(1) invoking a macro like a procedure call (`expression macros`:idx:)
(1) invoking a macro like a procedure call (expression macros)
(2) invoking a macro with the special ``macrostmt``
syntax (`statement macros`:idx:)
syntax (statement macros)
Expression Macros

12
examples/cross_calculator/.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
# Android specific absolute paths.
android/bin/
android/gen/
android/jni/backend-jni.h
android/libs/
android/local.properties
android/obj/
android/tags
# iOS specific absolute paths
ios/resources/ui/*.m
ios/tags

0
examples/cross_calculator/android/scripts/jnibuild.sh Executable file → Normal file
View File

0
examples/cross_calculator/android/scripts/nimbuild.sh Executable file → Normal file
View File

0
examples/cross_calculator/android/scripts/tags.sh Executable file → Normal file
View File

0
examples/cross_calculator/ios/scripts/tags.sh Executable file → Normal file
View File

View File

View File

@@ -6,6 +6,8 @@
# See the file "copying.txt", included in this
# distribution, for details about the copyright.
#
# See doc/koch.txt for documentation.
#
when defined(gcc) and defined(windows):
when defined(x86):
@@ -152,7 +154,7 @@ proc boot(args: string) =
copyExe(findStartNimrod(), 0.thVersion)
for i in 0..2:
echo "iteration: ", i+1
exec i.thVersion & " cc $# $# compiler" / "nimrod.nim" % [bootOptions, args]
exec i.thVersion & " c $# $# compiler" / "nimrod.nim" % [bootOptions, args]
if sameFileContent(output, i.thVersion):
copyExe(output, finalDest)
echo "executables are equal: SUCCESS!"
@@ -167,7 +169,7 @@ const
cleanExt = [
".ppu", ".o", ".obj", ".dcu", ".~pas", ".~inc", ".~dsk", ".~dpr",
".map", ".tds", ".err", ".bak", ".pyc", ".exe", ".rod", ".pdb", ".idb",
".idx"
".idx", ".ilk"
]
ignore = [
".bzrignore", "nimrod", "nimrod.exe", "koch", "koch.exe", ".gitignore"
@@ -299,7 +301,7 @@ of cmdArgument:
of "boot": boot(op.cmdLineRest)
of "clean": clean(op.cmdLineRest)
of "web": web(op.cmdLineRest)
of "csource": csource(op.cmdLineRest)
of "csource", "csources": csource(op.cmdLineRest)
of "zip": zip(op.cmdLineRest)
of "inno": inno(op.cmdLineRest)
of "install": install(op.cmdLineRest)

View File

@@ -24,6 +24,15 @@ type
FReadDb* = object of FDb ## effect that denotes a read operation
FWriteDb* = object of FDb ## effect that denotes a write operation
proc sql*(query: string): TSqlQuery {.noSideEffect, inline.} =
## constructs a TSqlQuery from the string `query`. This is supposed to be
## used as a raw-string-literal modifier:
## ``sql"update user set counter = counter + 1"``
##
## If assertions are turned off, it does nothing. If assertions are turned
## on, later versions will check the string for valid syntax.
result = TSqlQuery(query)
proc dbError(db: TDbConn) {.noreturn.} =
## raises an EDb exception.
var e: ref EDb

View File

@@ -7,7 +7,8 @@
# distribution, for details about the copyright.
#
## Declaration of the Document Object Model for the JavaScript backend.
## Declaration of the Document Object Model for the `JavaScript backend
## <backends.html#the-javascript-target>`_.
when not defined(js) and not defined(Nimdoc):
{.error: "This module only works on the JavaScript platform".}

View File

@@ -23,7 +23,8 @@
## many options and tweaking, but you are not limited to snippets and can
## generate `LaTeX documents <https://en.wikipedia.org/wiki/LaTeX>`_ too.
import strutils, os, hashes, strtabs, rstast, rst, highlite
import strutils, os, hashes, strtabs, rstast, rst, highlite, tables, sequtils,
algorithm
const
HtmlExt = "html"
@@ -56,6 +57,9 @@ type
currentSection: string ## \
## Stores the empty string or the last headline/overline found in the rst
## document, so it can be used as a prettier name for term index generation.
seenIndexTerms: TTable[string, int] ## \
## Keeps count of same text index terms to generate different identifiers
## for hyperlinks. See renderIndexTerm proc for details.
PDoc = var TRstGenerator ## Alias to type less.
@@ -68,10 +72,16 @@ proc initRstGenerator*(g: var TRstGenerator, target: TOutputTarget,
##
## You need to call this before using a ``TRstGenerator`` with any other
## procs in this module. Pass a non ``nil`` ``PStringTable`` value as
## ``config`` with parameters used by the HTML output generator. If you
## don't know what to use, pass the results of the ``defaultConfig()`` proc.
## The ``filename`` is symbolic and used only for error reporting, you can
## pass any non ``nil`` string here.
## `config` with parameters used by the HTML output generator. If you don't
## know what to use, pass the results of the `defaultConfig()
## <#defaultConfig>_` proc.
##
## The `filename` parameter will be used for error reporting and creating
## index hyperlinks to the file, but you can pass an empty string here if you
## are parsing a stream in memory. If `filename` ends with the ``.nim``
## extension, the title for the document will be set by default to ``Module
## filename``. This default title can be overriden by the embedded rst, but
## it helps to prettify the generated index if no title is found.
##
## The ``TRstParseOptions``, ``TFindFileHandler`` and ``TMsgHandler`` types
## are defined in the the `packages/docutils/rst module <rst.html>`_.
@@ -111,6 +121,10 @@ proc initRstGenerator*(g: var TRstGenerator, target: TOutputTarget,
g.options = options
g.findFile = findFile
g.currentSection = ""
let fileParts = filename.splitFile
if fileParts.ext == ".nim":
g.currentSection = "Module " & fileParts.name
g.seenIndexTerms = initTable[string, int]()
g.msgHandler = msgHandler
let s = config["split.item.toc"]
@@ -120,8 +134,8 @@ proc initRstGenerator*(g: var TRstGenerator, target: TOutputTarget,
proc writeIndexFile*(g: var TRstGenerator, outfile: string) =
## Writes the current index buffer to the specified output file.
##
## You previously need to add entries to the index with the ``setIndexTerm``
## proc. If the index is empty the file won't be created.
## You previously need to add entries to the index with the `setIndexTerm()
## <#setIndexTerm>`_ proc. If the index is empty the file won't be created.
if g.theIndex.len > 0: writeFile(outfile, g.theIndex)
proc addXmlChar(dest: var string, c: char) =
@@ -207,6 +221,9 @@ proc dispA(target: TOutputTarget, dest: var string,
if target != outLatex: addf(dest, xml, args)
else: addf(dest, tex, args)
proc `or`(x, y: string): string {.inline.} =
result = if x.isNil: y else: x
proc renderRstToOut*(d: var TRstGenerator, n: PRstNode, result: var string)
## Writes into ``result`` the rst ast ``n`` using the ``d`` configuration.
##
@@ -224,7 +241,7 @@ proc renderRstToOut*(d: var TRstGenerator, n: PRstNode, result: var string)
proc renderAux(d: PDoc, n: PRstNode, result: var string) =
for i in countup(0, len(n)-1): renderRstToOut(d, n.sons[i], result)
proc renderAux(d: PDoc, n: PRstNode, frmtA, frmtB: string, result: var string) =
proc renderAux(d: PDoc, n: PRstNode, frmtA, frmtB: string, result: var string) =
var tmp = ""
for i in countup(0, len(n)-1): renderRstToOut(d, n.sons[i], tmp)
if d.target != outLatex:
@@ -254,25 +271,45 @@ proc setIndexTerm*(d: var TRstGenerator, id, term: string,
linkTitle, linkDesc = "") =
## Adds a `term` to the index using the specified hyperlink identifier.
##
## The ``d.theIndex`` string will be used to append the term in the format
## ``term<tab>file#id``. The anchor will be the based on the name of the file
## currently being parsed plus the `id`, which will be appended after a hash.
## A new entry will be added to the index using the format
## ``term<tab>file#id``. The file part will come from the `filename`
## parameter used in a previous call to the `initRstGenerator()
## <#initRstGenerator>`_ proc.
##
## The `id` will be appended with a hash character only if its length is not
## zero, otherwise no specific anchor will be generated. In general you
## should only pass an empty `id` value for the title of standalone rst
## documents (they are special for the `mergeIndexes() <#mergeIndexes>`_
## proc, see `Index (idx) file format <docgen.html#index-idx-file-format>`_
## for more information). Unlike other index terms, title entries are
## inserted at the beginning of the accumulated buffer to maintain a logical
## order of entries.
##
## If `linkTitle` or `linkDesc` are not the empty string, two additional
## columns with their contents will be added.
##
## The index won't be written to disk unless you call ``writeIndexFile``. The
## purpose of the index is documented in the `docgen tools guide
## <docgen.html#index-switch>`_.
d.theIndex.add(term)
d.theIndex.add('\t')
## The index won't be written to disk unless you call `writeIndexFile()
## <#writeIndexFile>`_. The purpose of the index is documented in the `docgen
## tools guide <docgen.html#index-switch>`_.
assert(not d.theIndex.isNil)
var
entry = term
isTitle = false
entry.add('\t')
let htmlFile = changeFileExt(extractFilename(d.filename), HtmlExt)
d.theIndex.add(htmlFile)
d.theIndex.add('#')
d.theIndex.add(id)
entry.add(htmlFile)
if id.len > 0:
entry.add('#')
entry.add(id)
else:
isTitle = true
if linkTitle.len > 0 or linkDesc.len > 0:
d.theIndex.add('\t' & linkTitle.quoteIndexColumn)
d.theIndex.add('\t' & linkDesc.quoteIndexColumn)
d.theIndex.add("\n")
entry.add('\t' & linkTitle.quoteIndexColumn)
entry.add('\t' & linkDesc.quoteIndexColumn)
entry.add("\n")
if isTitle: d.theIndex.insert(entry)
else: d.theIndex.add(entry)
proc hash(n: PRstNode): int =
if n.kind == rnLeaf:
@@ -283,8 +320,20 @@ proc hash(n: PRstNode): int =
result = result !& hash(n.sons[i])
result = !$result
proc renderIndexTerm(d: PDoc, n: PRstNode, result: var string) =
let id = rstnodeToRefname(n) & '_' & $abs(hash(n))
proc renderIndexTerm*(d: PDoc, n: PRstNode, result: var string) =
## Renders the string decorated within \`foobar\`\:idx\: markers.
##
## Additionally adds the encosed text to the index as a term. Since we are
## interested in different instances of the same term to have different
## entries, a table is used to keep track of the amount of times a term has
## previously appeared to give a different identifier value for each.
let refname = n.rstnodeToRefname
if d.seenIndexTerms.hasKey(refname):
d.seenIndexTerms[refname] = d.seenIndexTerms[refname] + 1
else:
d.seenIndexTerms[refname] = 1
let id = refname & '_' & $d.seenIndexTerms[refname]
var term = ""
renderAux(d, n, term)
setIndexTerm(d, id, term, d.currentSection)
@@ -298,12 +347,34 @@ type
linkTitle: string ## If not nil, contains a prettier text for the href
linkDesc: string ## If not nil, the title attribute of the final href
TIndexedDocs {.pure, final.} = TTable[TIndexEntry, seq[TIndexEntry]] ## \
## Contains the index sequences for doc types.
##
## The key is a *fake* TIndexEntry which will contain the title of the
## document in the `keyword` field and `link` will contain the html
## filename for the document. `linkTitle` and `linkDesc` will be nil.
##
## The value indexed by this TIndexEntry is a sequence with the real index
## entries found in the ``.idx`` file.
proc cmp(a, b: TIndexEntry): int =
## Sorts two ``TIndexEntry`` first by `keyword` field, then by `link`.
result = cmpIgnoreStyle(a.keyword, b.keyword)
if result == 0:
result = cmpIgnoreStyle(a.link, b.link)
proc hash(x: TIndexEntry): THash =
## Returns the hash for the combined fields of the type.
##
## The hash is computed as the chained hash of the individual string hashes.
assert(not x.keyword.isNil)
assert(not x.link.isNil)
result = x.keyword.hash !& x.link.hash
result = result !& (x.linkTitle or "").hash
result = result !& (x.linkDesc or "").hash
result = !$result
proc `<-`(a: var TIndexEntry, b: TIndexEntry) =
shallowCopy a.keyword, b.keyword
shallowCopy a.link, b.link
@@ -332,43 +403,18 @@ proc sortIndex(a: var openArray[TIndexEntry]) =
a[j] <- v
if h == 1: break
proc mergeIndexes*(dir: string): string =
## merges all index files in `dir` and returns the generated index as HTML.
## The result is no full HTML for flexibility.
var a: seq[TIndexEntry]
newSeq(a, 15_000)
setLen(a, 0)
var L = 0
for kind, path in walkDir(dir):
if kind == pcFile and path.endsWith(IndexExt):
for line in lines(path):
let s = line.find('\t')
if s < 0: continue
setLen(a, L+1)
a[L].keyword = line.substr(0, s-1)
a[L].link = line.substr(s+1)
if a[L].link.find('\t') > 0:
let extraCols = a[L].link.split('\t')
a[L].link = extraCols[0]
assert extraCols.len == 3
a[L].linkTitle = extraCols[1].unquoteIndexColumn
a[L].linkDesc = extraCols[2].unquoteIndexColumn
else:
a[L].linkTitle = nil
a[L].linkDesc = nil
inc L
sortIndex(a)
proc generateSymbolIndex(symbols: seq[TIndexEntry]): string =
result = ""
var i = 0
while i < L:
result.addf("<dt><span>$1</span></dt><ul class=\"simple\"><dd>\n",
[a[i].keyword])
while i < symbols.len:
result.addf("<dt><span>$1:</span></dt><ul class=\"simple\"><dd>\n",
[symbols[i].keyword])
var j = i
while j < L and a[i].keyword == a[j].keyword:
while j < symbols.len and symbols[i].keyword == symbols[j].keyword:
let
url = a[j].link
text = if not a[j].linkTitle.isNil: a[j].linkTitle else: url
desc = if not a[j].linkDesc.isNil: a[j].linkDesc else: ""
url = symbols[j].link
text = if not symbols[j].linkTitle.isNil: symbols[j].linkTitle else: url
desc = if not symbols[j].linkDesc.isNil: symbols[j].linkDesc else: ""
if desc.len > 0:
result.addf("""<li><a class="reference external"
title="$3" href="$1">$2</a></li>
@@ -379,9 +425,246 @@ proc mergeIndexes*(dir: string): string =
inc j
result.add("</ul></dd>\n")
i = j
proc isDocumentationTitle(hyperlink: string): bool =
## Returns true if the hyperlink is actually a documentation title.
##
## Documentation titles lack the hash. See `mergeIndexes() <#mergeIndexes>`_
## for a more detailed explanation.
result = hyperlink.find('#') < 0
proc stripTOCLevel(s: string): tuple[level: int, text: string] =
## Returns the *level* of the toc along with the text without it.
for c in 0 .. <s.len:
result.level = c
if s[c] != ' ': break
result.text = s[result.level .. <s.len]
proc indentToLevel(level: var int, newLevel: int): string =
## Returns the sequence of <ul>|</ul> characters to switch to `newLevel`.
##
## The amount of lists added/removed will be based on the `level` variable,
## which will be reset to `newLevel` at the end of the proc.
result = ""
if level == newLevel:
return
if newLevel > level:
result = repeatStr(newLevel - level, "<ul>")
else:
result = repeatStr(level - newLevel, "</ul>")
level = newLevel
proc generateDocumentationTOC(entries: seq[TIndexEntry]): string =
## Returns the sequence of index entries in an HTML hierarchical list.
result = ""
# Build a list of levels and extracted titles to make processing easier.
var
titleRef: string
levels: seq[tuple[level: int, text: string]]
L = 0
level = 1
levels.newSeq(entries.len)
for entry in entries:
let (rawLevel, rawText) = stripTOCLevel(entry.linkTitle or entry.keyword)
if rawLevel < 1:
# This is a normal symbol, push it *inside* one level from the last one.
levels[L].level = level + 1
# Also, ignore the linkTitle and use directly the keyword.
levels[L].text = entry.keyword
else:
# The level did change, update the level indicator.
level = rawLevel
levels[L].level = rawLevel
levels[L].text = rawText
inc L
# Now generate hierarchical lists based on the precalculated levels.
result = "<ul>\n"
level = 1
L = 0
while L < entries.len:
let link = entries[L].link
if link.isDocumentationTitle:
titleRef = link
else:
result.add(level.indentToLevel(levels[L].level))
result.add("<li><a href=\"" & link & "\">" &
levels[L].text & "</a>\n")
inc L
result.add(level.indentToLevel(1) & "</ul>\n")
assert(not titleRef.isNil,
"Can't use this proc on an API index, docs always have a title entry")
proc generateDocumentationIndex(docs: TIndexedDocs): string =
## Returns all the documentation TOCs in an HTML hierarchical list.
result = ""
# Sort the titles to generate their toc in alphabetical order.
var titles = toSeq(keys[TIndexEntry, seq[TIndexEntry]](docs))
sort(titles, cmp)
for title in titles:
let tocList = generateDocumentationTOC(docs[title])
result.add("<ul><li><a href=\"" &
title.link & "\">" & title.keyword & "</a>\n" & tocList & "</ul>\n")
proc generateDocumentationJumps(docs: TIndexedDocs): string =
## Returns a plain list of hyperlinks to documentation TOCs in HTML.
result = "Documents: "
# Sort the titles to generate their toc in alphabetical order.
var titles = toSeq(keys[TIndexEntry, seq[TIndexEntry]](docs))
sort(titles, cmp)
var chunks: seq[string] = @[]
for title in titles:
chunks.add("<a href=\"" & title.link & "\">" & title.keyword & "</a>")
result.add(chunks.join(", ") & ".<br>")
proc generateModuleJumps(modules: seq[string]): string =
## Returns a plain list of hyperlinks to the list of modules.
result = "Modules: "
var chunks: seq[string] = @[]
for name in modules:
chunks.add("<a href=\"" & name & ".html\">" & name & "</a>")
result.add(chunks.join(", ") & ".<br>")
proc readIndexDir(dir: string):
tuple[modules: seq[string], symbols: seq[TIndexEntry], docs: TIndexedDocs] =
## Walks `dir` reading ``.idx`` files converting them in TIndexEntry items.
##
## Returns the list of found module names, the list of free symbol entries
## and the different documentation indexes. The list of modules is sorted.
## See the documentation of ``mergeIndexes`` for details.
result.modules = @[]
result.docs = initTable[TIndexEntry, seq[TIndexEntry]](32)
newSeq(result.symbols, 15_000)
setLen(result.symbols, 0)
var L = 0
# Scan index files and build the list of symbols.
for kind, path in walkDir(dir):
if kind == pcFile and path.endsWith(IndexExt):
var
fileEntries: seq[TIndexEntry]
title: TIndexEntry
F = 0
newSeq(fileEntries, 500)
setLen(fileEntries, 0)
for line in lines(path):
let s = line.find('\t')
if s < 0: continue
setLen(fileEntries, F+1)
fileEntries[F].keyword = line.substr(0, s-1)
fileEntries[F].link = line.substr(s+1)
# See if we detect a title, a link without a `#foobar` trailing part.
if title.keyword.isNil and fileEntries[F].link.isDocumentationTitle:
title.keyword = fileEntries[F].keyword
title.link = fileEntries[F].link
if fileEntries[F].link.find('\t') > 0:
let extraCols = fileEntries[F].link.split('\t')
fileEntries[F].link = extraCols[0]
assert extraCols.len == 3
fileEntries[F].linkTitle = extraCols[1].unquoteIndexColumn
fileEntries[F].linkDesc = extraCols[2].unquoteIndexColumn
else:
fileEntries[F].linkTitle = nil
fileEntries[F].linkDesc = nil
inc F
# Depending on type add this to the list of symbols or table of APIs.
if title.keyword.isNil:
for i in 0 .. <F:
# Don't add to symbols TOC entries (they start with a whitespace).
let toc = fileEntries[i].linkTitle
if not toc.isNil and toc.len > 0 and toc[0] == ' ':
continue
# Ok, non TOC entry, add it.
setLen(result.symbols, L + 1)
result.symbols[L] = fileEntries[i]
inc L
result.modules.add(path.splitFile.name)
else:
# Generate the symbolic anchor for index quickjumps.
title.linkTitle = "doc_toc_" & $result.docs.len
result.docs[title] = fileEntries
sort(result.modules, system.cmp)
proc mergeIndexes*(dir: string): string =
## Merges all index files in `dir` and returns the generated index as HTML.
##
## This proc will first scan `dir` for index files with the ``.idx``
## extension previously created by commands like ``nimrod doc|rst2html``
## which use the ``--index:on`` switch. These index files are the result of
## calls to `setIndexTerm() <#setIndexTerm>`_ and `writeIndexFile()
## <#writeIndexFile>`_, so they are simple tab separated files.
##
## As convention this proc will split index files into two categories:
## documentation and API. API indices will be all joined together into a
## single big sorted index, making the bulk of the final index. This is good
## for API documentation because many symbols are repated in different
## modules. On the other hand, documentation indices are essentially table of
## contents plus a few special markers. These documents will be rendered in a
## separate section which tries to maintain the order and hierarchy of the
## symbols in the index file.
##
## To differentiate between a documentation and API file a convention is
## used: indices which contain one entry without the HTML hash character (#)
## will be considered `documentation`, since this hash-less entry is the
## explicit title of the document. Indices without this explicit entry will
## be considered `generated API` extracted out of a source ``.nim`` file.
##
## Returns the merged and sorted indices into a single HTML block which can
## be further embedded into nimdoc templates.
var (modules, symbols, docs) = readIndexDir(dir)
assert(not symbols.isNil)
result = ""
# Generate a quick jump list of documents.
if docs.len > 0:
result.add(generateDocumentationJumps(docs))
result.add("<p />")
# Generate hyperlinks to all the linked modules.
if modules.len > 0:
result.add(generateModuleJumps(modules))
result.add("<p />")
# Generate the HTML block with API documents.
if docs.len > 0:
result.add("<h2>Documentation files</h2>\n")
result.add(generateDocumentationIndex(docs))
# Generate the HTML block with symbols.
if symbols.len > 0:
sortIndex(symbols)
result.add("<h2>API symbols</h2>\n")
result.add(generateSymbolIndex(symbols))
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
proc stripTOCHTML(s: string): string =
## Ugly quick hack to remove HTML tags from TOC titles.
##
## A TTocEntry.header field already contains rendered HTML tags. Instead of
## implementing a proper version of renderRstToOut() which recursively
## renders an rst tree to plain text, we simply remove text found between
## angled brackets. Given the limited possibilities of rst inside TOC titles
## this should be enough.
result = s
var first = result.find('<')
while first >= 0:
let last = result.find('>', first)
if last < 0:
# Abort, since we didn't found a closing angled bracket.
return
result.delete(first, last)
first = result.find('<', first)
proc renderHeadline(d: PDoc, n: PRstNode, result: var string) =
var tmp = ""
for i in countup(0, len(n) - 1): renderRstToOut(d, n.sons[i], tmp)
@@ -393,27 +676,30 @@ proc renderHeadline(d: PDoc, n: PRstNode, result: var string) =
d.tocPart[length].refname = refname
d.tocPart[length].n = n
d.tocPart[length].header = tmp
dispA(d.target, result,
"\n<h$1><a class=\"toc-backref\" id=\"$2\" href=\"#$2_toc\">$3</a></h$1>",
"\\rsth$4{$3}\\label{$2}\n", [$n.level,
d.tocPart[length].refname, tmp,
$chr(n.level - 1 + ord('A'))])
dispA(d.target, result, "\n<h$1><a class=\"toc-backref\" " &
"id=\"$2\" href=\"#$2_toc\">$3</a></h$1>", "\\rsth$4{$3}\\label{$2}\n",
[$n.level, d.tocPart[length].refname, tmp, $chr(n.level - 1 + ord('A'))])
else:
dispA(d.target, result, "\n<h$1 id=\"$2\">$3</h$1>",
"\\rsth$4{$3}\\label{$2}\n", [
$n.level, refname, tmp,
$chr(n.level - 1 + ord('A'))])
# Generate index entry using spaces to indicate TOC level for the output HTML.
assert n.level >= 0
setIndexTerm(d, refname, tmp.stripTOCHTML,
repeatChar(max(0, n.level), ' ') & tmp)
proc renderOverline(d: PDoc, n: PRstNode, result: var string) =
if d.meta[metaTitle].len == 0:
d.currentSection = d.meta[metaTitle]
for i in countup(0, len(n)-1):
renderRstToOut(d, n.sons[i], d.meta[metaTitle])
d.currentSection = d.meta[metaTitle]
elif d.meta[metaSubtitle].len == 0:
d.currentSection = d.meta[metaSubtitle]
for i in countup(0, len(n)-1):
renderRstToOut(d, n.sons[i], d.meta[metaSubtitle])
d.currentSection = d.meta[metaSubtitle]
else:
var tmp = ""
for i in countup(0, len(n) - 1): renderRstToOut(d, n.sons[i], tmp)
@@ -428,7 +714,8 @@ proc renderTocEntry(d: PDoc, e: TTocEntry, result: var string) =
"<li><a class=\"reference\" id=\"$1_toc\" href=\"#$1\">$2</a></li>\n",
"\\item\\label{$1_toc} $2\\ref{$1}\n", [e.refname, e.header])
proc renderTocEntries*(d: var TRstGenerator, j: var int, lvl: int, result: var string) =
proc renderTocEntries*(d: var TRstGenerator, j: var int, lvl: int,
result: var string) =
var tmp = ""
while j <= high(d.tocPart):
var a = abs(d.tocPart[j].n.level)
@@ -572,7 +859,8 @@ proc renderRstToOut(d: PDoc, n: PRstNode, result: var string) =
[tmp])
of rnField: renderField(d, n, result)
of rnFieldName:
renderAux(d, n, "<th class=\"docinfo-name\">$1:</th>", "\\item[$1:]", result)
renderAux(d, n, "<th class=\"docinfo-name\">$1:</th>",
"\\item[$1:]", result)
of rnFieldBody:
renderAux(d, n, "<td>$1</td>", " $1\n", result)
of rnIndex:
@@ -631,8 +919,9 @@ proc renderRstToOut(d: PDoc, n: PRstNode, result: var string) =
of rnRef:
var tmp = ""
renderAux(d, n, tmp)
dispA(d.target, result, "<a class=\"reference external\" href=\"#$2\">$1</a>",
"$1\\ref{$2}", [tmp, rstnodeToRefname(n)])
dispA(d.target, result,
"<a class=\"reference external\" href=\"#$2\">$1</a>",
"$1\\ref{$2}", [tmp, rstnodeToRefname(n)])
of rnStandaloneHyperlink:
renderAux(d, n,
"<a class=\"reference external\" href=\"$1\">$1</a>",
@@ -642,9 +931,9 @@ proc renderRstToOut(d: PDoc, n: PRstNode, result: var string) =
var tmp1 = ""
renderRstToOut(d, n.sons[0], tmp0)
renderRstToOut(d, n.sons[1], tmp1)
dispA(d.target, result, "<a class=\"reference external\" href=\"$2\">$1</a>",
"\\href{$2}{$1}",
[tmp0, tmp1])
dispA(d.target, result,
"<a class=\"reference external\" href=\"$2\">$1</a>",
"\\href{$2}{$1}", [tmp0, tmp1])
of rnDirArg, rnRaw: renderAux(d, n, result)
of rnRawHtml:
if d.target != outLatex:

View File

@@ -846,7 +846,7 @@ var
FE_UPWARD* {.importc, header: "<fenv.h>".}: cint
FE_DFL_ENV* {.importc, header: "<fenv.h>".}: cint
when not defined(haiku):
when not defined(haiku) and not defined(OpenBSD):
var
MM_HARD* {.importc, header: "<fmtmsg.h>".}: cint
## Source of the condition is hardware.
@@ -1578,8 +1578,17 @@ var
## Terminates a record (if supported by the protocol).
MSG_OOB* {.importc, header: "<sys/socket.h>".}: cint
## Out-of-band data.
MSG_NOSIGNAL* {.importc, header: "<sys/socket.h>".}: cint
## No SIGPIPE generated when an attempt to send is made on a stream-oriented socket that is no longer connected.
when defined(macosx):
var
MSG_HAVEMORE* {.importc, header: "<sys/socket.h>".}: cint
MSG_NOSIGNAL* = MSG_HAVEMORE
else:
var
MSG_NOSIGNAL* {.importc, header: "<sys/socket.h>".}: cint
## No SIGPIPE generated when an attempt to send is made on a stream-oriented socket that is no longer connected.
var
MSG_PEEK* {.importc, header: "<sys/socket.h>".}: cint
## Leave received data in queue.
MSG_TRUNC* {.importc, header: "<sys/socket.h>".}: cint
@@ -1816,7 +1825,7 @@ proc feholdexcept*(a1: ptr Tfenv): cint {.importc, header: "<fenv.h>".}
proc fesetenv*(a1: ptr Tfenv): cint {.importc, header: "<fenv.h>".}
proc feupdateenv*(a1: ptr Tfenv): cint {.importc, header: "<fenv.h>".}
when not defined(haiku):
when not defined(haiku) and not defined(OpenBSD):
proc fmtmsg*(a1: int, a2: cstring, a3: cint,
a4, a5, a6: cstring): cint {.importc, header: "<fmtmsg.h>".}

View File

@@ -150,6 +150,15 @@ proc sort*[T](a: var openArray[T],
## # overload:
## sort(myStrArray, system.cmp)
##
## You can inline adhoc comparison procs with the `do notation
## <manual.html#do-notation>`_. Example:
##
## .. code-block:: nimrod
##
## people.sort do (x, y: Person) -> int:
## result = cmp(x.surname, y.surname)
## if result == 0:
## result = cmp(x.name, y.name)
var n = a.len
var b: seq[T]
newSeq(b, n div 2)

View File

@@ -11,8 +11,11 @@ include "system/inclrtl"
import os, oids, tables, strutils, macros
import rawsockets
export TPort
import rawsockets, net
export TPort, TSocketFlags
#{.injectStmt: newGcInvariant().}
## AsyncDispatch
## -------------
@@ -24,10 +27,12 @@ export TPort
## **Note:** This module is still largely experimental.
# TODO: Discarded void PFutures need to checked for exception.
# TODO: Exceptions are currently uncatchable due to the limitation that
# you cannot have yield in a try stmt. Perhaps I can get the macro to put
# a user's try except around ``future.read``.
# TODO: Discarded void PFutures need to be checked for exception.
# TODO: ``except`` statement (without `try`) does not work.
# TODO: Multiple exception names in a ``except`` don't work.
# TODO: The effect system (raises: []) has trouble with my try transformation.
# TODO: Can't await in a 'except' body
# TODO: getCurrentException(Msg) don't work
# -- Futures
@@ -35,19 +40,33 @@ type
PFutureBase* = ref object of PObject
cb: proc () {.closure,gcsafe.}
finished: bool
error*: ref EBase
stackTrace: string ## For debugging purposes only.
PFuture*[T] = ref object of PFutureBase
value: T
error*: ref EBase # TODO: This shouldn't be necessary, generics bug?
proc newFuture*[T](): PFuture[T] =
## Creates a new future.
new(result)
result.finished = false
result.stackTrace = getStackTrace()
proc checkFinished[T](future: PFuture[T]) =
if future.finished:
echo("<----->")
echo(future.stackTrace)
echo("-----")
when T is string:
echo("Contents: ", future.value.repr)
echo("<----->")
echo("Future already finished, cannot finish twice.")
assert false
proc complete*[T](future: PFuture[T], val: T) =
## Completes ``future`` with value ``val``.
assert(not future.finished, "Future already finished, cannot finish twice.")
#assert(not future.finished, "Future already finished, cannot finish twice.")
checkFinished(future)
assert(future.error == nil)
future.value = val
future.finished = true
@@ -56,7 +75,8 @@ proc complete*[T](future: PFuture[T], val: T) =
proc complete*(future: PFuture[void]) =
## Completes a void ``future``.
assert(not future.finished, "Future already finished, cannot finish twice.")
#assert(not future.finished, "Future already finished, cannot finish twice.")
checkFinished(future)
assert(future.error == nil)
future.finished = true
if future.cb != nil:
@@ -64,11 +84,18 @@ proc complete*(future: PFuture[void]) =
proc fail*[T](future: PFuture[T], error: ref EBase) =
## Completes ``future`` with ``error``.
assert(not future.finished, "Future already finished, cannot finish twice.")
#assert(not future.finished, "Future already finished, cannot finish twice.")
checkFinished(future)
future.finished = true
future.error = error
if future.cb != nil:
future.cb()
else:
# This is to prevent exceptions from being silently ignored when a future
# is discarded.
# TODO: This may turn out to be a bad idea.
# Turns out this is a bad idea.
#raise error
proc `callback=`*(future: PFutureBase, cb: proc () {.closure,gcsafe.}) =
## Sets the callback proc to be called when the future completes.
@@ -112,10 +139,19 @@ proc finished*[T](future: PFuture[T]): bool =
## ``True`` may indicate an error or a value. Use ``failed`` to distinguish.
future.finished
proc failed*[T](future: PFuture[T]): bool =
proc failed*(future: PFutureBase): bool =
## Determines whether ``future`` completed with an error.
future.error != nil
proc asyncCheck*[T](future: PFuture[T]) =
## Sets a callback on ``future`` which raises an exception if the future
## finished with an error.
##
## This should be used instead of ``discard`` to discard void futures.
future.callback =
proc () =
if future.failed: raise future.error
when defined(windows) or defined(nimdoc):
import winlean, sets, hashes
type
@@ -130,15 +166,10 @@ when defined(windows) or defined(nimdoc):
ioPort: THandle
handles: TSet[TAsyncFD]
TCustomOverlapped = object
Internal*: DWORD
InternalHigh*: DWORD
Offset*: DWORD
OffsetHigh*: DWORD
hEvent*: THANDLE
TCustomOverlapped = object of TOVERLAPPED
data*: TCompletionData
PCustomOverlapped = ptr TCustomOverlapped
PCustomOverlapped = ref TCustomOverlapped
TAsyncFD* = distinct int
@@ -184,27 +215,27 @@ when defined(windows) or defined(nimdoc):
else: timeout.int32
var lpNumberOfBytesTransferred: DWORD
var lpCompletionKey: ULONG
var lpOverlapped: POverlapped
let res = GetQueuedCompletionStatus(p.ioPort, addr lpNumberOfBytesTransferred,
addr lpCompletionKey, addr lpOverlapped, llTimeout).bool
var customOverlapped: PCustomOverlapped
let res = GetQueuedCompletionStatus(p.ioPort,
addr lpNumberOfBytesTransferred, addr lpCompletionKey,
cast[ptr POverlapped](addr customOverlapped), llTimeout).bool
# http://stackoverflow.com/a/12277264/492186
# TODO: http://www.serverframework.com/handling-multiple-pending-socket-read-and-write-operations.html
var customOverlapped = cast[PCustomOverlapped](lpOverlapped)
if res:
# This is useful for ensuring the reliability of the overlapped struct.
assert customOverlapped.data.sock == lpCompletionKey.TAsyncFD
customOverlapped.data.cb(customOverlapped.data.sock,
lpNumberOfBytesTransferred, TOSErrorCode(-1))
dealloc(customOverlapped)
GC_unref(customOverlapped)
else:
let errCode = osLastError()
if lpOverlapped != nil:
if customOverlapped != nil:
assert customOverlapped.data.sock == lpCompletionKey.TAsyncFD
customOverlapped.data.cb(customOverlapped.data.sock,
lpNumberOfBytesTransferred, errCode)
dealloc(customOverlapped)
GC_unref(customOverlapped)
else:
if errCode.int32 == WAIT_TIMEOUT:
# Timed out
@@ -300,7 +331,8 @@ when defined(windows) or defined(nimdoc):
while it != nil:
# "the OVERLAPPED structure must remain valid until the I/O completes"
# http://blogs.msdn.com/b/oldnewthing/archive/2011/02/02/10123392.aspx
var ol = cast[PCustomOverlapped](alloc0(sizeof(TCustomOverlapped)))
var ol = PCustomOverlapped()
GC_ref(ol)
ol.data = TCompletionData(sock: socket, cb:
proc (sock: TAsyncFD, bytesCount: DWord, errcode: TOSErrorCode) =
if not retFuture.finished:
@@ -328,7 +360,7 @@ when defined(windows) or defined(nimdoc):
success = true
break
else:
dealloc(ol)
GC_unref(ol)
success = false
it = it.ai_next
@@ -338,7 +370,7 @@ when defined(windows) or defined(nimdoc):
return retFuture
proc recv*(socket: TAsyncFD, size: int,
flags: int = 0): PFuture[string] =
flags = {TSocketFlags.SafeDisconn}): PFuture[string] =
## Reads **up to** ``size`` bytes from ``socket``. Returned future will
## complete once all the data requested is read, a part of the data has been
## read, or the socket has disconnected in which case the future will
@@ -352,15 +384,15 @@ when defined(windows) or defined(nimdoc):
# '\0' in the message currently signifies a socket disconnect. Who
# knows what will happen when someone sends that to our socket.
verifyPresence(socket)
var retFuture = newFuture[string]()
var retFuture = newFuture[string]()
var dataBuf: TWSABuf
dataBuf.buf = cast[cstring](alloc0(size))
dataBuf.len = size
var bytesReceived: DWord
var flagsio = flags.DWord
var ol = cast[PCustomOverlapped](alloc0(sizeof(TCustomOverlapped)))
var flagsio = flags.toOSFlags().DWord
var ol = PCustomOverlapped()
GC_ref(ol)
ol.data = TCompletionData(sock: socket, cb:
proc (sock: TAsyncFD, bytesCount: DWord, errcode: TOSErrorCode) =
if not retFuture.finished:
@@ -374,7 +406,9 @@ when defined(windows) or defined(nimdoc):
retFuture.complete($data)
else:
retFuture.fail(newException(EOS, osErrorMsg(errcode)))
dealloc dataBuf.buf
if dataBuf.buf != nil:
dealloc dataBuf.buf
dataBuf.buf = nil
)
let ret = WSARecv(socket.TSocketHandle, addr dataBuf, 1, addr bytesReceived,
@@ -382,9 +416,14 @@ when defined(windows) or defined(nimdoc):
if ret == -1:
let err = osLastError()
if err.int32 != ERROR_IO_PENDING:
dealloc dataBuf.buf
dealloc(ol)
retFuture.fail(newException(EOS, osErrorMsg(err)))
if dataBuf.buf != nil:
dealloc dataBuf.buf
dataBuf.buf = nil
GC_unref(ol)
if flags.isDisconnectionError(err):
retFuture.complete("")
else:
retFuture.fail(newException(EOS, osErrorMsg(err)))
elif ret == 0 and bytesReceived == 0 and dataBuf.buf[0] == '\0':
# We have to ensure that the buffer is empty because WSARecv will tell
# us immediatelly when it was disconnected, even when there is still
@@ -415,7 +454,8 @@ when defined(windows) or defined(nimdoc):
# free ``ol``.
return retFuture
proc send*(socket: TAsyncFD, data: string): PFuture[void] =
proc send*(socket: TAsyncFD, data: string,
flags = {TSocketFlags.SafeDisconn}): PFuture[void] =
## Sends ``data`` to ``socket``. The returned future will complete once all
## data has been sent.
verifyPresence(socket)
@@ -425,8 +465,9 @@ when defined(windows) or defined(nimdoc):
dataBuf.buf = data # since this is not used in a callback, this is fine
dataBuf.len = data.len
var bytesReceived, flags: DWord
var ol = cast[PCustomOverlapped](alloc0(sizeof(TCustomOverlapped)))
var bytesReceived, lowFlags: DWord
var ol = PCustomOverlapped()
GC_ref(ol)
ol.data = TCompletionData(sock: socket, cb:
proc (sock: TAsyncFD, bytesCount: DWord, errcode: TOSErrorCode) =
if not retFuture.finished:
@@ -437,12 +478,15 @@ when defined(windows) or defined(nimdoc):
)
let ret = WSASend(socket.TSocketHandle, addr dataBuf, 1, addr bytesReceived,
flags, cast[POverlapped](ol), nil)
lowFlags, cast[POverlapped](ol), nil)
if ret == -1:
let err = osLastError()
if err.int32 != ERROR_IO_PENDING:
retFuture.fail(newException(EOS, osErrorMsg(err)))
dealloc(ol)
GC_unref(ol)
if flags.isDisconnectionError(err):
retFuture.complete()
else:
retFuture.fail(newException(EOS, osErrorMsg(err)))
else:
retFuture.complete()
# We don't deallocate ``ol`` here because even though this completed
@@ -490,7 +534,8 @@ when defined(windows) or defined(nimdoc):
client: clientSock.TAsyncFD)
)
var ol = cast[PCustomOverlapped](alloc0(sizeof(TCustomOverlapped)))
var ol = PCustomOverlapped()
GC_ref(ol)
ol.data = TCompletionData(sock: socket, cb:
proc (sock: TAsyncFD, bytesCount: DWord, errcode: TOSErrorCode) =
if not retFuture.finished:
@@ -511,7 +556,7 @@ when defined(windows) or defined(nimdoc):
let err = osLastError()
if err.int32 != ERROR_IO_PENDING:
retFuture.fail(newException(EOS, osErrorMsg(err)))
dealloc(ol)
GC_unref(ol)
else:
completeAccept()
# We don't deallocate ``ol`` here because even though this completed
@@ -528,15 +573,30 @@ when defined(windows) or defined(nimdoc):
result.TSocketHandle.setBlocking(false)
register(result)
proc close*(socket: TAsyncFD) =
proc closeSocket*(socket: TAsyncFD) =
## Closes a socket and ensures that it is unregistered.
socket.TSocketHandle.close()
getGlobalDispatcher().handles.excl(socket)
proc unregister*(fd: TAsyncFD) =
## Unregisters ``fd``.
getGlobalDispatcher().handles.excl(fd)
initAll()
else:
import selectors
from posix import EINTR, EAGAIN, EINPROGRESS, EWOULDBLOCK, MSG_PEEK
when defined(windows):
import winlean
const
EINTR = WSAEINPROGRESS
EINPROGRESS = WSAEINPROGRESS
EWOULDBLOCK = WSAEWOULDBLOCK
EAGAIN = EINPROGRESS
MSG_NOSIGNAL = 0
else:
from posix import EINTR, EAGAIN, EINPROGRESS, EWOULDBLOCK, MSG_PEEK,
MSG_NOSIGNAL
type
TAsyncFD* = distinct cint
TCallback = proc (sock: TAsyncFD): bool {.closure,gcsafe.}
@@ -577,11 +637,14 @@ else:
result.TSocketHandle.setBlocking(false)
register(result)
proc close*(sock: TAsyncFD) =
proc closeSocket*(sock: TAsyncFD) =
let disp = getGlobalDispatcher()
sock.TSocketHandle.close()
disp.selector.unregister(sock.TSocketHandle)
proc unregister*(fd: TAsyncFD) =
getGlobalDispatcher().selector.unregister(fd.TSocketHandle)
proc addRead(sock: TAsyncFD, cb: TCallback) =
let p = getGlobalDispatcher()
if sock.TSocketHandle notin p.selector:
@@ -667,20 +730,23 @@ else:
return retFuture
proc recv*(socket: TAsyncFD, size: int,
flags: int = 0): PFuture[string] =
flags = {TSocketFlags.SafeDisconn}): PFuture[string] =
var retFuture = newFuture[string]()
var readBuffer = newString(size)
proc cb(sock: TAsyncFD): bool =
result = true
let res = recv(sock.TSocketHandle, addr readBuffer[0], size,
flags.cint)
let res = recv(sock.TSocketHandle, addr readBuffer[0], size.cint,
flags.toOSFlags())
#echo("recv cb res: ", res)
if res < 0:
let lastError = osLastError()
if lastError.int32 notin {EINTR, EWOULDBLOCK, EAGAIN}:
retFuture.fail(newException(EOS, osErrorMsg(lastError)))
if lastError.int32 notin {EINTR, EWOULDBLOCK, EAGAIN}:
if flags.isDisconnectionError(lastError):
retFuture.complete("")
else:
retFuture.fail(newException(EOS, osErrorMsg(lastError)))
else:
result = false # We still want this callback to be called.
elif res == 0:
@@ -689,11 +755,13 @@ else:
else:
readBuffer.setLen(res)
retFuture.complete(readBuffer)
# TODO: The following causes a massive slowdown.
#if not cb(socket):
addRead(socket, cb)
return retFuture
proc send*(socket: TAsyncFD, data: string): PFuture[void] =
proc send*(socket: TAsyncFD, data: string,
flags = {TSocketFlags.SafeDisconn}): PFuture[void] =
var retFuture = newFuture[void]()
var written = 0
@@ -702,11 +770,15 @@ else:
result = true
let netSize = data.len-written
var d = data.cstring
let res = send(sock.TSocketHandle, addr d[written], netSize, 0.cint)
let res = send(sock.TSocketHandle, addr d[written], netSize.cint,
MSG_NOSIGNAL)
if res < 0:
let lastError = osLastError()
if lastError.int32 notin {EINTR, EWOULDBLOCK, EAGAIN}:
retFuture.fail(newException(EOS, osErrorMsg(lastError)))
if flags.isDisconnectionError(lastError):
retFuture.complete()
else:
retFuture.fail(newException(EOS, osErrorMsg(lastError)))
else:
result = false # We still want this callback to be called.
else:
@@ -715,6 +787,8 @@ else:
result = false # We still have data to send.
else:
retFuture.complete()
# TODO: The following causes crashes.
#if not cb(socket):
addWrite(socket, cb)
return retFuture
@@ -757,41 +831,76 @@ proc accept*(socket: TAsyncFD): PFuture[TAsyncFD] =
# -- Await Macro
template createCb*(retFutureSym, iteratorNameSym: expr): stmt {.immediate.} =
template createCb*(retFutureSym, iteratorNameSym,
name: expr): stmt {.immediate.} =
var nameIterVar = iteratorNameSym
#{.push stackTrace: off.}
proc cb {.closure,gcsafe.} =
if not nameIterVar.finished:
var next = nameIterVar()
if next == nil:
assert retFutureSym.finished, "Async procedure's return Future was not finished."
else:
next.callback = cb
try:
if not nameIterVar.finished:
var next = nameIterVar()
if next == nil:
assert retFutureSym.finished, "Async procedure's (" &
name & ") return Future was not finished."
else:
next.callback = cb
except:
retFutureSym.fail(getCurrentException())
cb()
#{.pop.}
proc generateExceptionCheck(futSym,
exceptBranch, rootReceiver: PNimrodNode): PNimrodNode {.compileTime.} =
if exceptBranch == nil:
result = rootReceiver
else:
if exceptBranch[0].kind == nnkStmtList:
result = newIfStmt(
(newDotExpr(futSym, newIdentNode("failed")),
exceptBranch[0]
)
)
else:
expectKind(exceptBranch[1], nnkStmtList)
result = newIfStmt(
(newDotExpr(futSym, newIdentNode("failed")),
newIfStmt(
(infix(newDotExpr(futSym, newIdentNode("error")), "of", exceptBranch[0]),
exceptBranch[1])
)
)
)
let elseNode = newNimNode(nnkElse)
elseNode.add newNimNode(nnkStmtList)
elseNode[0].add rootReceiver
result.add elseNode
template createVar(futSymName: string, asyncProc: PNimrodNode,
valueReceiver: expr) {.immediate, dirty.} =
# TODO: Used template here due to bug #926
template createVar(result: var PNimrodNode, futSymName: string,
asyncProc: PNimrodNode,
valueReceiver, rootReceiver: expr) =
result = newNimNode(nnkStmtList)
var futSym = genSym(nskVar, "future")
result.add newVarStmt(futSym, asyncProc) # -> var future<x> = y
result.add newNimNode(nnkYieldStmt).add(futSym) # -> yield future<x>
valueReceiver = newDotExpr(futSym, newIdentNode("read")) # -> future<x>.read
result.add generateExceptionCheck(futSym, exceptBranch, rootReceiver)
proc processBody(node, retFutureSym: PNimrodNode,
subtypeName: string): PNimrodNode {.compileTime.} =
subTypeIsVoid: bool,
exceptBranch: PNimrodNode): PNimrodNode {.compileTime.} =
#echo(node.treeRepr)
result = node
case node.kind
of nnkReturnStmt:
result = newNimNode(nnkStmtList)
if node[0].kind == nnkEmpty:
if subtypeName != "void":
if not subtypeIsVoid:
result.add newCall(newIdentNode("complete"), retFutureSym,
newIdentNode("result"))
else:
result.add newCall(newIdentNode("complete"), retFutureSym)
else:
result.add newCall(newIdentNode("complete"), retFutureSym,
node[0].processBody(retFutureSym, subtypeName))
node[0].processBody(retFutureSym, subtypeIsVoid, exceptBranch))
result.add newNimNode(nnkReturnStmt).add(newNilLit())
return # Don't process the children of this return stmt
@@ -804,16 +913,16 @@ proc processBody(node, retFutureSym: PNimrodNode,
of nnkCall:
# await foo(p, x)
var futureValue: PNimrodNode
createVar("future" & $node[1][0].toStrLit, node[1], futureValue)
result.add futureValue
result.createVar("future" & $node[1][0].toStrLit, node[1], futureValue,
futureValue)
else:
error("Invalid node kind in 'await', got: " & $node[1].kind)
elif node[1].kind == nnkCommand and node[1][0].kind == nnkIdent and
node[1][0].ident == !"await":
# foo await x
var newCommand = node
createVar("future" & $node[0].toStrLit, node[1][1], newCommand[1])
result.add newCommand
result.createVar("future" & $node[0].toStrLit, node[1][1], newCommand[1],
newCommand)
of nnkVarSection, nnkLetSection:
case node[0][2].kind
@@ -821,9 +930,8 @@ proc processBody(node, retFutureSym: PNimrodNode,
if node[0][2][0].ident == !"await":
# var x = await y
var newVarSection = node # TODO: Should this use copyNimNode?
createVar("future" & $node[0][0].ident, node[0][2][1],
newVarSection[0][2])
result.add newVarSection
result.createVar("future" & $node[0][0].ident, node[0][2][1],
newVarSection[0][2], newVarSection)
else: discard
of nnkAsgn:
case node[1].kind
@@ -831,19 +939,43 @@ proc processBody(node, retFutureSym: PNimrodNode,
if node[1][0].ident == !"await":
# x = await y
var newAsgn = node
createVar("future" & $node[0].toStrLit, node[1][1], newAsgn[1])
result.add newAsgn
result.createVar("future" & $node[0].toStrLit, node[1][1], newAsgn[1], newAsgn)
else: discard
of nnkDiscardStmt:
# discard await x
if node[0][0].kind == nnkIdent and node[0][0].ident == !"await":
var dummy = newNimNode(nnkStmtList)
createVar("futureDiscard_" & $toStrLit(node[0][1]), node[0][1], dummy)
if node[0].kind != nnkEmpty and node[0][0].kind == nnkIdent and
node[0][0].ident == !"await":
var newDiscard = node
result.createVar("futureDiscard_" & $toStrLit(node[0][1]), node[0][1],
newDiscard[0], newDiscard)
of nnkTryStmt:
# try: await x; except: ...
result = newNimNode(nnkStmtList)
proc processForTry(n: PNimrodNode, i: var int,
res: PNimrodNode): bool {.compileTime.} =
result = false
while i < n[0].len:
var processed = processBody(n[0][i], retFutureSym, subtypeIsVoid, n[1])
if processed.kind != n[0][i].kind or processed.len != n[0][i].len:
expectKind(processed, nnkStmtList)
expectKind(processed[2][1], nnkElse)
i.inc
discard processForTry(n, i, processed[2][1][0])
res.add processed
result = true
else:
res.add n[0][i]
i.inc
var i = 0
if not processForTry(node, i, result):
var temp = node
temp[0] = result
result = temp
return
else: discard
for i in 0 .. <result.len:
result[i] = processBody(result[i], retFutureSym, subtypeName)
#echo(treeRepr(result))
result[i] = processBody(result[i], retFutureSym, subtypeIsVoid, exceptBranch)
proc getName(node: PNimrodNode): string {.compileTime.} =
case node.kind
@@ -851,47 +983,53 @@ proc getName(node: PNimrodNode): string {.compileTime.} =
return $node[1].ident
of nnkIdent:
return $node.ident
of nnkEmpty:
return "anonymous"
else:
assert false
error("Unknown name.")
macro async*(prc: stmt): stmt {.immediate.} =
## Macro which processes async procedures into the appropriate
## iterators and yield statements.
expectKind(prc, nnkProcDef)
if prc.kind notin {nnkProcDef, nnkLambda}:
error("Cannot transform this node kind into an async proc." &
" Proc definition or lambda node expected.")
hint("Processing " & prc[0].getName & " as an async proc.")
let returnType = prc[3][0]
var subtypeName = ""
# Verify that the return type is a PFuture[T]
if returnType.kind == nnkIdent:
error("Expected return type of 'PFuture' got '" & $returnType & "'")
elif returnType.kind == nnkBracketExpr:
if $returnType[0] != "PFuture":
error("Expected return type of 'PFuture' got '" & $returnType[0] & "'")
subtypeName = $returnType[1].ident
elif returnType.kind == nnkEmpty:
subtypeName = "void"
let subtypeIsVoid = returnType.kind == nnkEmpty or
(returnType.kind == nnkBracketExpr and
returnType[1].kind == nnkIdent and returnType[1].ident == !"void")
var outerProcBody = newNimNode(nnkStmtList)
# -> var retFuture = newFuture[T]()
var retFutureSym = genSym(nskVar, "retFuture")
var subRetType =
if returnType.kind == nnkEmpty: newIdentNode("void")
else: returnType[1]
outerProcBody.add(
newVarStmt(retFutureSym,
newCall(
newNimNode(nnkBracketExpr).add(
newIdentNode(!"newFuture"), # TODO: Strange bug here? Remove the `!`.
newIdentNode(subtypeName))))) # Get type from return type of this proc
subRetType)))) # Get type from return type of this proc
# -> iterator nameIter(): PFutureBase {.closure.} =
# -> var result: T
# -> <proc_body>
# -> complete(retFuture, result)
var iteratorNameSym = genSym(nskIterator, $prc[0].getName & "Iter")
var procBody = prc[6].processBody(retFutureSym, subtypeName)
if subtypeName != "void":
var procBody = prc[6].processBody(retFutureSym, subtypeIsVoid, nil)
if not subtypeIsVoid:
procBody.insert(0, newNimNode(nnkVarSection).add(
newIdentDefs(newIdentNode("result"), returnType[1]))) # -> var result: T
procBody.add(
@@ -908,7 +1046,8 @@ macro async*(prc: stmt): stmt {.immediate.} =
# -> createCb(retFuture)
var cbName = newIdentNode("cb")
var procCb = newCall("createCb", retFutureSym, iteratorNameSym)
var procCb = newCall("createCb", retFutureSym, iteratorNameSym,
newStrLitNode(prc[0].getName))
outerProcBody.add procCb
# -> return retFuture
@@ -918,17 +1057,18 @@ macro async*(prc: stmt): stmt {.immediate.} =
# Remove the 'async' pragma.
for i in 0 .. <result[4].len:
if result[4][i].ident == !"async":
if result[4][i].kind == nnkIdent and result[4][i].ident == !"async":
result[4].del(i)
if subtypeName == "void":
if subtypeIsVoid:
# Add discardable pragma.
result[4].add(newIdentNode("discardable"))
if returnType.kind == nnkEmpty:
# Add PFuture[void]
result[3][0] = parseExpr("PFuture[void]")
result[6] = outerProcBody
#echo(treeRepr(result))
#if prc[0].getName == "routeReq":
#echo(toStrLit(result))
proc recvLine*(socket: TAsyncFD): PFuture[string] {.async.} =
@@ -956,7 +1096,7 @@ proc recvLine*(socket: TAsyncFD): PFuture[string] {.async.} =
if c.len == 0:
return ""
if c == "\r":
c = await recv(socket, 1, MSG_PEEK)
c = await recv(socket, 1, {TSocketFlags.SafeDisconn, TSocketFlags.Peek})
if c.len > 0 and c == "\L":
discard await recv(socket, 1)
addNLIfEmpty()

View File

@@ -14,12 +14,13 @@
import strtabs, asyncnet, asyncdispatch, parseutils, parseurl, strutils
type
TRequest* = object
client: PAsyncSocket # TODO: Separate this into a Response object?
client*: PAsyncSocket # TODO: Separate this into a Response object?
reqMethod*: string
headers*: PStringTable
protocol*: tuple[orig: string, major, minor: int]
url*: TURL
hostname*: string ## The hostname of the client that made the request.
body*: string
PAsyncHttpServer* = ref object
socket: PAsyncSocket
@@ -50,10 +51,15 @@ proc `==`*(protocol: tuple[orig: string, major, minor: int],
proc newAsyncHttpServer*(): PAsyncHttpServer =
new result
proc sendHeaders*(req: TRequest, headers: PStringTable) {.async.} =
## Sends the specified headers to the requesting client.
proc addHeaders(msg: var string, headers: PStringTable) =
for k, v in headers:
await req.client.send(k & ": " & v & "\c\L")
msg.add(k & ": " & v & "\c\L")
proc sendHeaders*(req: TRequest, headers: PStringTable): PFuture[void] =
## Sends the specified headers to the requesting client.
var msg = ""
addHeaders(msg, headers)
return req.client.send(msg)
proc respond*(req: TRequest, code: THttpCode,
content: string, headers: PStringTable = newStringTable()) {.async.} =
@@ -63,9 +69,9 @@ proc respond*(req: TRequest, code: THttpCode,
## This procedure will **not** close the client socket.
var customHeaders = headers
customHeaders["Content-Length"] = $content.len
await req.client.send("HTTP/1.1 " & $code & "\c\L")
await sendHeaders(req, headers)
await req.client.send("\c\L" & content)
var msg = "HTTP/1.1 " & $code & "\c\L"
msg.addHeaders(customHeaders)
await req.client.send(msg & "\c\L" & content)
proc newRequest(): TRequest =
result.headers = newStringTable(modeCaseInsensitive)
@@ -77,7 +83,7 @@ proc parseHeader(line: string): tuple[key, value: string] =
i += line.skipWhiteSpace(i)
i += line.parseUntil(result.value, {'\c', '\L'}, i)
proc parseProtocol(protocol: string): tuple[orig: string, major, minor: int] =
proc parseProtocol(protocol: string): tuple[orig: string, major, minor: int] =
var i = protocol.skipIgnoreCase("HTTP/")
if i != 5:
raise newException(EInvalidValue, "Invalid request protocol. Got: " &
@@ -87,70 +93,95 @@ proc parseProtocol(protocol: string): tuple[orig: string, major, minor: int] =
i.inc # Skip .
i.inc protocol.parseInt(result.minor, i)
proc sendStatus(client: PAsyncSocket, status: string): PFuture[void] =
client.send("HTTP/1.1 " & status & "\c\L")
proc processClient(client: PAsyncSocket, address: string,
callback: proc (request: TRequest): PFuture[void]) {.async.} =
# GET /path HTTP/1.1
# Header: val
# \n
var request = newRequest()
request.hostname = address
assert client != nil
request.client = client
# First line - GET /path HTTP/1.1
let line = await client.recvLine() # TODO: Timeouts.
if line == "":
client.close()
return
let lineParts = line.split(' ')
if lineParts.len != 3:
request.respond(Http400, "Invalid request. Got: " & line)
client.close()
return
let reqMethod = lineParts[0]
let path = lineParts[1]
let protocol = lineParts[2]
# Headers
var i = 0
while true:
i = 0
let headerLine = await client.recvLine()
if headerLine == "":
client.close(); return
if headerLine == "\c\L": break
# TODO: Compiler crash
#let (key, value) = parseHeader(headerLine)
let kv = parseHeader(headerLine)
request.headers[kv.key] = kv.value
# GET /path HTTP/1.1
# Header: val
# \n
var request = newRequest()
request.hostname = address
assert client != nil
request.client = client
request.reqMethod = reqMethod
request.url = parseUrl(path)
try:
request.protocol = protocol.parseProtocol()
except EInvalidValue:
request.respond(Http400, "Invalid request protocol. Got: " & protocol)
return
case reqMethod.normalize
of "get", "post", "head", "put", "delete", "trace", "options", "connect", "patch":
await callback(request)
else:
request.respond(Http400, "Invalid request method. Got: " & reqMethod)
# First line - GET /path HTTP/1.1
let line = await client.recvLine() # TODO: Timeouts.
if line == "":
client.close()
return
let lineParts = line.split(' ')
if lineParts.len != 3:
await request.respond(Http400, "Invalid request. Got: " & line)
continue
# Persistent connections
if (request.protocol == HttpVer11 and
request.headers["connection"].normalize != "close") or
(request.protocol == HttpVer10 and
request.headers["connection"].normalize == "keep-alive"):
# In HTTP 1.1 we assume that connection is persistent. Unless connection
# header states otherwise.
# In HTTP 1.0 we assume that the connection should not be persistent.
# Unless the connection header states otherwise.
await processClient(client, address, callback)
else:
request.client.close()
let reqMethod = lineParts[0]
let path = lineParts[1]
let protocol = lineParts[2]
# Headers
var i = 0
while true:
i = 0
let headerLine = await client.recvLine()
if headerLine == "":
client.close(); return
if headerLine == "\c\L": break
# TODO: Compiler crash
#let (key, value) = parseHeader(headerLine)
let kv = parseHeader(headerLine)
request.headers[kv.key] = kv.value
request.reqMethod = reqMethod
request.url = parseUrl(path)
try:
request.protocol = protocol.parseProtocol()
except EInvalidValue:
asyncCheck request.respond(Http400, "Invalid request protocol. Got: " &
protocol)
continue
if reqMethod.normalize == "post":
# Check for Expect header
if request.headers.hasKey("Expect"):
if request.headers["Expect"].toLower == "100-continue":
await client.sendStatus("100 Continue")
else:
await client.sendStatus("417 Expectation Failed")
# Read the body
# - Check for Content-length header
if request.headers.hasKey("Content-Length"):
var contentLength = 0
if parseInt(request.headers["Content-Length"], contentLength) == 0:
await request.respond(Http400, "Bad Request. Invalid Content-Length.")
else:
request.body = await client.recv(contentLength)
assert request.body.len == contentLength
else:
await request.respond(Http400, "Bad Request. No Content-Length.")
continue
case reqMethod.normalize
of "get", "post", "head", "put", "delete", "trace", "options", "connect", "patch":
await callback(request)
else:
await request.respond(Http400, "Invalid request method. Got: " & reqMethod)
# Persistent connections
if (request.protocol == HttpVer11 and
request.headers["connection"].normalize != "close") or
(request.protocol == HttpVer10 and
request.headers["connection"].normalize == "keep-alive"):
# In HTTP 1.1 we assume that connection is persistent. Unless connection
# header states otherwise.
# In HTTP 1.0 we assume that the connection should not be persistent.
# Unless the connection header states otherwise.
else:
request.client.close()
break
proc serve*(server: PAsyncHttpServer, port: TPort,
callback: proc (request: TRequest): PFuture[void],
@@ -167,14 +198,20 @@ proc serve*(server: PAsyncHttpServer, port: TPort,
# TODO: Causes compiler crash.
#var (address, client) = await server.socket.acceptAddr()
var fut = await server.socket.acceptAddr()
processClient(fut.client, fut.address, callback)
asyncCheck processClient(fut.client, fut.address, callback)
proc close*(server: PAsyncHttpServer) =
## Terminates the async http server instance.
server.socket.close()
when isMainModule:
var server = newAsyncHttpServer()
proc cb(req: TRequest) {.async.} =
#echo(req.reqMethod, " ", req.url)
#echo(req.headers)
await req.respond(Http200, "Hello World")
let headers = {"Date": "Tue, 29 Apr 2014 23:40:08 GMT",
"Content-type": "text/plain; charset=utf-8"}
await req.respond(Http200, "Hello World", headers.newStringTable())
server.serve(TPort(5555), cb)
asyncCheck server.serve(TPort(5555), cb)
runForever()

View File

@@ -80,7 +80,8 @@ proc connect*(socket: PAsyncSocket, address: string, port: TPort,
## or an error occurs.
result = connect(socket.fd.TAsyncFD, address, port, af)
proc readIntoBuf(socket: PAsyncSocket, flags: int): PFuture[int] {.async.} =
proc readIntoBuf(socket: PAsyncSocket,
flags: set[TSocketFlags]): PFuture[int] {.async.} =
var data = await recv(socket.fd.TAsyncFD, BufferSize, flags)
if data.len != 0:
copyMem(addr socket.buffer[0], addr data[0], data.len)
@@ -89,7 +90,7 @@ proc readIntoBuf(socket: PAsyncSocket, flags: int): PFuture[int] {.async.} =
result = data.len
proc recv*(socket: PAsyncSocket, size: int,
flags: int = 0): PFuture[string] {.async.} =
flags = {TSocketFlags.SafeDisconn}): PFuture[string] {.async.} =
## Reads ``size`` bytes from ``socket``. Returned future will complete once
## all of the requested data is read. If socket is disconnected during the
## recv operation then the future may complete with only a part of the
@@ -97,37 +98,42 @@ proc recv*(socket: PAsyncSocket, size: int,
## to be read then the future will complete with a value of ``""``.
if socket.isBuffered:
result = newString(size)
template returnNow(readBytes: int) =
result.setLen(readBytes)
# Only increase buffer position when not peeking.
if (flags and MSG_PEEK) != MSG_PEEK:
socket.currPos.inc(readBytes)
return
let originalBufPos = socket.currPos
if socket.bufLen == 0:
let res = await socket.readIntoBuf(flags and (not MSG_PEEK))
if res == 0: returnNow(0)
let res = await socket.readIntoBuf(flags - {TSocketFlags.Peek})
if res == 0:
result.setLen(0)
return
var read = 0
while read < size:
if socket.currPos >= socket.bufLen:
let res = await socket.readIntoBuf(flags and (not MSG_PEEK))
if res == 0: returnNow(read)
if TSocketFlags.Peek in flags:
# We don't want to get another buffer if we're peeking.
break
let res = await socket.readIntoBuf(flags - {TSocketFlags.Peek})
if res == 0:
break
let chunk = min(socket.bufLen-socket.currPos, size-read)
copyMem(addr(result[read]), addr(socket.buffer[socket.currPos+read]), chunk)
copyMem(addr(result[read]), addr(socket.buffer[socket.currPos]), chunk)
read.inc(chunk)
socket.currPos.inc(chunk)
returnNow(read)
if TSocketFlags.Peek in flags:
# Restore old buffer cursor position.
socket.currPos = originalBufPos
result.setLen(read)
else:
result = await recv(socket.fd.TAsyncFD, size, flags)
proc send*(socket: PAsyncSocket, data: string): PFuture[void] =
proc send*(socket: PAsyncSocket, data: string,
flags = {TSocketFlags.SafeDisconn}): PFuture[void] =
## Sends ``data`` to ``socket``. The returned future will complete once all
## data has been sent.
assert socket != nil
result = send(socket.fd.TAsyncFD, data)
result = send(socket.fd.TAsyncFD, data, flags)
proc acceptAddr*(socket: PAsyncSocket):
PFuture[tuple[address: string, client: PAsyncSocket]] =
@@ -162,7 +168,8 @@ proc accept*(socket: PAsyncSocket): PFuture[PAsyncSocket] =
retFut.complete(future.read.client)
return retFut
proc recvLine*(socket: PAsyncSocket): PFuture[string] {.async.} =
proc recvLine*(socket: PAsyncSocket,
flags = {TSocketFlags.SafeDisconn}): PFuture[string] {.async.} =
## Reads a line of data from ``socket``. Returned future will complete once
## a full line is read or an error occurs.
##
@@ -175,28 +182,60 @@ proc recvLine*(socket: PAsyncSocket): PFuture[string] {.async.} =
## If the socket is disconnected in the middle of a line (before ``\r\L``
## is read) then line will be set to ``""``.
## The partial line **will be lost**.
##
## **Warning**: The ``Peek`` flag is not yet implemented.
template addNLIfEmpty(): stmt =
if result.len == 0:
result.add("\c\L")
assert TSocketFlags.Peek notin flags ## TODO:
if socket.isBuffered:
result = ""
if socket.bufLen == 0:
let res = await socket.readIntoBuf(flags)
if res == 0:
return
result = ""
var c = ""
while true:
c = await recv(socket, 1)
if c.len == 0:
return ""
if c == "\r":
c = await recv(socket, 1, MSG_PEEK)
if c.len > 0 and c == "\L":
let dummy = await recv(socket, 1)
assert dummy == "\L"
addNLIfEmpty()
return
elif c == "\L":
addNLIfEmpty()
return
add(result.string, c)
var lastR = false
while true:
if socket.currPos >= socket.bufLen:
let res = await socket.readIntoBuf(flags)
if res == 0:
result = ""
break
case socket.buffer[socket.currPos]
of '\r':
lastR = true
addNLIfEmpty()
of '\L':
addNLIfEmpty()
socket.currPos.inc()
return
else:
if lastR:
socket.currPos.inc()
return
else:
result.add socket.buffer[socket.currPos]
socket.currPos.inc()
else:
result = ""
var c = ""
while true:
c = await recv(socket, 1, flags)
if c.len == 0:
return ""
if c == "\r":
c = await recv(socket, 1, flags + {TSocketFlags.Peek})
if c.len > 0 and c == "\L":
let dummy = await recv(socket, 1, flags)
assert dummy == "\L"
addNLIfEmpty()
return
elif c == "\L":
addNLIfEmpty()
return
add(result.string, c)
proc bindAddr*(socket: PAsyncSocket, port = TPort(0), address = "") =
## Binds ``address``:``port`` to the socket.
@@ -214,7 +253,7 @@ proc listen*(socket: PAsyncSocket, backlog = SOMAXCONN) =
proc close*(socket: PAsyncSocket) =
## Closes the socket.
socket.fd.TAsyncFD.close()
socket.fd.TAsyncFD.closeSocket()
# TODO SSL
when isMainModule:
@@ -235,7 +274,7 @@ when isMainModule:
break
else:
echo("Got line: ", line)
main()
asyncCheck main()
elif test == LowClient:
var sock = newAsyncSocket()
var f = connect(sock, "irc.freenode.net", TPort(6667))

View File

@@ -47,19 +47,15 @@ proc concat*[T](seqs: varargs[seq[T]]): seq[T] =
result[i] = itm
inc(i)
proc distnct*[T](seq1: seq[T]): seq[T] =
proc deduplicate*[T](seq1: seq[T]): seq[T] =
## Returns a new sequence without duplicates.
##
## This proc is `misspelled` on purpose to avoid a clash with the keyword
## ``distinct`` used to `define a derived type incompatible with its base
## type <manual.html#distinct-type>`_. Example:
##
## .. code-block:: nimrod
## let
## dup1 = @[1, 1, 3, 4, 2, 2, 8, 1, 4]
## dup2 = @["a", "a", "c", "d", "d"]
## unique1 = distnct(dup1)
## unique2 = distnct(dup2)
## unique1 = deduplicate(dup1)
## unique2 = deduplicate(dup2)
## assert unique1 == @[1, 3, 4, 2, 8]
## assert unique2 == @["a", "c", "d"]
result = @[]
@@ -182,6 +178,24 @@ proc filter*[T](seq1: seq[T], pred: proc(item: T): bool {.closure.}): seq[T] =
## assert f2 == @["yellow"]
accumulateResult(filter(seq1, pred))
proc keepIf*[T](seq1: var seq[T], pred: proc(item: T): bool {.closure.}) =
## Keeps the items in the passed sequence if they fulfilled the predicate.
## Same as the ``filter`` proc, but modifies the sequence directly.
##
## Example:
##
## .. code-block:: nimrod
## var floats = @[13.0, 12.5, 5.8, 2.0, 6.1, 9.9, 10.1]
## filter(floats, proc(x: float): bool = x > 10)
## assert floats == @[13.0, 12.5, 10.1]
var pos = 0
for i in 0 .. <len(seq1):
if pred(seq1[i]):
if pos != i:
seq1[pos] = seq1[i]
inc(pos)
setLen(seq1, pos)
proc delete*[T](s: var seq[T], first=0, last=0) =
## Deletes in `s` the items at position `first` .. `last`. This modifies
## `s` itself, it does not return a copy.
@@ -252,6 +266,27 @@ template filterIt*(seq1, pred: expr): expr {.immediate.} =
if pred: result.add(it)
result
template keepItIf*(varSeq, pred: expr) =
## Convenience template around the ``keepIf`` proc to reduce typing.
##
## Unlike the `proc` version, the predicate needs to be an expression using
## the ``it`` variable for testing, like: ``keepItIf("abcxyz", it == 'x')``.
## Example:
##
## .. code-block:: nimrod
## var candidates = @["foo", "bar", "baz", "foobar"]
## keepItIf(candidates, it.len == 3 and it[0] == 'b')
## assert candidates == @["bar", "baz"]
var pos = 0
for i in 0 .. <len(varSeq):
let it {.inject.} = varSeq[i]
if pred:
if pos != i:
varSeq[pos] = varSeq[i]
inc(pos)
setLen(varSeq, pos)
template toSeq*(iter: expr): expr {.immediate.} =
## Transforms any iterator into a sequence.
##
@@ -387,8 +422,8 @@ when isMainModule:
let
dup1 = @[1, 1, 3, 4, 2, 2, 8, 1, 4]
dup2 = @["a", "a", "c", "d", "d"]
unique1 = distnct(dup1)
unique2 = distnct(dup2)
unique1 = deduplicate(dup1)
unique2 = deduplicate(dup2)
assert unique1 == @[1, 3, 4, 2, 8]
assert unique2 == @["a", "c", "d"]
@@ -418,6 +453,11 @@ when isMainModule:
echo($n)
# echoes 4, 8, 4 in separate lines
block: # keepIf test
var floats = @[13.0, 12.5, 5.8, 2.0, 6.1, 9.9, 10.1]
keepIf(floats, proc(x: float): bool = x > 10)
assert floats == @[13.0, 12.5, 10.1]
block: # filterIt test
let
temperatures = @[-272.15, -2.0, 24.5, 44.31, 99.9, -113.44]
@@ -426,6 +466,11 @@ when isMainModule:
assert acceptable == @[-2.0, 24.5, 44.31]
assert notAcceptable == @[-272.15, 99.9, -113.44]
block: # keepItIf test
var candidates = @["foo", "bar", "baz", "foobar"]
keepItIf(candidates, it.len == 3 and it[0] == 'b')
assert candidates == @["bar", "baz"]
block: # toSeq test
let
numeric = @[1, 2, 3, 4, 5, 6, 7, 8, 9]

View File

@@ -112,6 +112,10 @@ proc incl*[A](s: var TSet[A], key: A) =
## includes an element `key` in `s`.
inclImpl()
proc incl*[A](s: var TSet[A], other: TSet[A]) =
## includes everything in `other` in `s`
for item in other: incl(s, item)
proc excl*[A](s: var TSet[A], key: A) =
## excludes `key` from the set `s`.
var index = rawGet(s, key)
@@ -119,6 +123,10 @@ proc excl*[A](s: var TSet[A], key: A) =
s.data[index].slot = seDeleted
dec(s.counter)
proc excl*[A](s: var TSet[A], other: TSet[A]) =
## excludes everything in `other` from `s`.
for item in other: excl(s, item)
proc containsOrIncl*[A](s: var TSet[A], key: A): bool =
## returns true if `s` contains `key`, otherwise `key` is included in `s`
## and false is returned.
@@ -147,6 +155,43 @@ proc `$`*[A](s: TSet[A]): string =
## The `$` operator for hash sets.
dollarImpl()
proc union*[A](s1, s2: TSet[A]): TSet[A] =
## returns a new set of all items that are contained in at
## least one of `s1` and `s2`
result = s1
incl(result, s2)
proc intersection*[A](s1, s2: TSet[A]): TSet[A] =
## returns a new set of all items that are contained in both `s1` and `s2`
result = initSet[A](min(s1.data.len, s2.data.len))
for item in s1:
if item in s2: incl(result, item)
proc symmetricDifference*[A](s1, s2: TSet[A]): TSet[A] =
## returns a new set of all items that are contained in either
## `s1` or `s2`, but not both
result = s1
for item in s2:
if containsOrIncl(result, item): excl(result, item)
proc `+`*[A](s1, s2: TSet[A]): TSet[A] {.inline.} =
## alias for `union`
result = union(s1, s2)
proc `*`*[A](s1, s2: TSet[A]): TSet[A] {.inline.} =
## alias for `intersection`
result = intersection(s1, s2)
proc `-+-`*[A](s1, s2: TSet[A]): TSet[A] {.inline.} =
## alias for `symmetricDifference`
result = symmetricDifference(s1, s2)
proc disjoint*[A](s1, s2: TSet[A]): bool =
## returns true iff `s1` and `s2` have no items in common
for item in s1:
if item in s2: return false
return true
# ------------------------------ ordered set ------------------------------
type
@@ -211,6 +256,10 @@ proc incl*[A](s: var TOrderedSet[A], key: A) =
## includes an element `key` in `s`.
inclImpl()
proc incl*[A](s: var TSet[A], other: TOrderedSet[A]) =
## includes everything in `other` in `s`
for item in other: incl(s, item)
proc containsOrIncl*[A](s: var TOrderedSet[A], key: A): bool =
## returns true if `s` contains `key`, otherwise `key` is included in `s`
## and false is returned.

Some files were not shown because too many files have changed in this diff Show More