mirror of
https://github.com/nim-lang/Nim.git
synced 2026-02-13 14:53:46 +00:00
Merge branch 'devel' of https://github.com/Araq/Nimrod into new_spawn
This commit is contained in:
@@ -19,7 +19,7 @@
|
||||
|
||||
import
|
||||
os, llstream, renderer, clex, idents, strutils, pegs, ast, astalgo, msgs,
|
||||
options, strtabs
|
||||
options, strtabs, hashes, algorithm
|
||||
|
||||
type
|
||||
TParserFlag = enum
|
||||
@@ -63,6 +63,15 @@ type
|
||||
|
||||
ERetryParsing = object of ESynch
|
||||
|
||||
|
||||
|
||||
proc addTypeDef(section, name, t: PNode)
|
||||
proc parseStruct(p: var TParser, stmtList: PNode, isUnion: bool): PNode
|
||||
proc parseStructBody(p: var TParser, stmtList: PNode, isUnion: bool,
|
||||
kind: TNodeKind = nkRecList): PNode
|
||||
|
||||
|
||||
|
||||
proc newParserOptions*(): PParserOptions =
|
||||
new(result)
|
||||
result.prefixes = @[]
|
||||
@@ -682,24 +691,6 @@ proc parseField(p: var TParser, kind: TNodeKind): PNode =
|
||||
else: result = mangledIdent(p.tok.s, p)
|
||||
getTok(p, result)
|
||||
|
||||
proc parseStructBody(p: var TParser, isUnion: bool,
|
||||
kind: TNodeKind = nkRecList): PNode =
|
||||
result = newNodeP(kind, p)
|
||||
eat(p, pxCurlyLe, result)
|
||||
while p.tok.xkind notin {pxEof, pxCurlyRi}:
|
||||
var baseTyp = typeAtom(p)
|
||||
while true:
|
||||
var def = newNodeP(nkIdentDefs, p)
|
||||
var t = pointer(p, baseTyp)
|
||||
var i = parseField(p, kind)
|
||||
t = parseTypeSuffix(p, t)
|
||||
addSon(def, i, t, ast.emptyNode)
|
||||
addSon(result, def)
|
||||
if p.tok.xkind != pxComma: break
|
||||
getTok(p, def)
|
||||
eat(p, pxSemicolon, lastSon(result))
|
||||
eat(p, pxCurlyRi, result)
|
||||
|
||||
proc structPragmas(p: TParser, name: PNode, origName: string): PNode =
|
||||
assert name.kind == nkIdent
|
||||
result = newNodeP(nkPragmaExpr, p)
|
||||
@@ -712,6 +703,75 @@ proc structPragmas(p: TParser, name: PNode, origName: string): PNode =
|
||||
if pragmas.len > 0: addSon(result, pragmas)
|
||||
else: addSon(result, ast.emptyNode)
|
||||
|
||||
proc hashPosition(p: TParser): string =
|
||||
let lineInfo = parLineInfo(p)
|
||||
let fileInfo = fileInfos[lineInfo.fileIndex]
|
||||
result = $hash(fileInfo.shortName & "_" & $lineInfo.line & "_" & $lineInfo.col).uint
|
||||
|
||||
proc parseInnerStruct(p: var TParser, stmtList: PNode, isUnion: bool): PNode =
|
||||
getTok(p, nil)
|
||||
if p.tok.xkind != pxCurlyLe:
|
||||
parMessage(p, errUser, "Expected '{' but found '" & $(p.tok[]) & "'")
|
||||
|
||||
let structName = if isUnion: "INNER_C_UNION_" & p.hashPosition
|
||||
else: "INNER_C_STRUCT_" & p.hashPosition
|
||||
let typeSection = newNodeP(nkTypeSection, p)
|
||||
let newStruct = newNodeP(nkObjectTy, p)
|
||||
var pragmas = ast.emptyNode
|
||||
if isUnion:
|
||||
pragmas = newNodeP(nkPragma, p)
|
||||
addSon(pragmas, newIdentNodeP("union", p))
|
||||
addSon(newStruct, pragmas, ast.emptyNode) # no inheritance
|
||||
result = newNodeP(nkIdent, p)
|
||||
result.ident = getIdent(structName)
|
||||
let struct = parseStructBody(p, stmtList, isUnion)
|
||||
let defName = newNodeP(nkIdent, p)
|
||||
defName.ident = getIdent(structName)
|
||||
addSon(newStruct, struct)
|
||||
addTypeDef(typeSection, structPragmas(p, defName, "no_name"), newStruct)
|
||||
addSon(stmtList, typeSection)
|
||||
|
||||
proc parseStructBody(p: var TParser, stmtList: PNode, isUnion: bool,
|
||||
kind: TNodeKind = nkRecList): PNode =
|
||||
result = newNodeP(kind, p)
|
||||
eat(p, pxCurlyLe, result)
|
||||
while p.tok.xkind notin {pxEof, pxCurlyRi}:
|
||||
skipConst(p)
|
||||
var baseTyp: PNode
|
||||
if p.tok.xkind == pxSymbol and (p.tok.s == "struct" or p.tok.s == "union"):
|
||||
let gotUnion = if p.tok.s == "union": true else: false
|
||||
saveContext(p)
|
||||
getTok(p, nil)
|
||||
if p.tok.xkind == pxSymbol:
|
||||
backtrackContext(p)
|
||||
baseTyp = typeAtom(p)
|
||||
else:
|
||||
backtrackContext(p)
|
||||
baseTyp = parseInnerStruct(p, stmtList, gotUnion)
|
||||
if p.tok.xkind == pxSemiColon:
|
||||
let def = newNodeP(nkIdentDefs, p)
|
||||
var t = pointer(p, baseTyp)
|
||||
let i = fieldIdent("ano_" & p.hashPosition, p)
|
||||
t = parseTypeSuffix(p, t)
|
||||
addSon(def, i, t, ast.emptyNode)
|
||||
addSon(result, def)
|
||||
getTok(p, nil)
|
||||
continue
|
||||
else:
|
||||
baseTyp = typeAtom(p)
|
||||
|
||||
while true:
|
||||
var def = newNodeP(nkIdentDefs, p)
|
||||
var t = pointer(p, baseTyp)
|
||||
var i = parseField(p, kind)
|
||||
t = parseTypeSuffix(p, t)
|
||||
addSon(def, i, t, ast.emptyNode)
|
||||
addSon(result, def)
|
||||
if p.tok.xkind != pxComma: break
|
||||
getTok(p, def)
|
||||
eat(p, pxSemicolon, lastSon(result))
|
||||
eat(p, pxCurlyRi, result)
|
||||
|
||||
proc enumPragmas(p: TParser, name: PNode): PNode =
|
||||
result = newNodeP(nkPragmaExpr, p)
|
||||
addSon(result, name)
|
||||
@@ -722,7 +782,7 @@ proc enumPragmas(p: TParser, name: PNode): PNode =
|
||||
addSon(pragmas, e)
|
||||
addSon(result, pragmas)
|
||||
|
||||
proc parseStruct(p: var TParser, isUnion: bool): PNode =
|
||||
proc parseStruct(p: var TParser, stmtList: PNode, isUnion: bool): PNode =
|
||||
result = newNodeP(nkObjectTy, p)
|
||||
var pragmas = ast.emptyNode
|
||||
if isUnion:
|
||||
@@ -730,7 +790,7 @@ proc parseStruct(p: var TParser, isUnion: bool): PNode =
|
||||
addSon(pragmas, newIdentNodeP("union", p))
|
||||
addSon(result, pragmas, ast.emptyNode) # no inheritance
|
||||
if p.tok.xkind == pxCurlyLe:
|
||||
addSon(result, parseStructBody(p, isUnion))
|
||||
addSon(result, parseStructBody(p, stmtList, isUnion))
|
||||
else:
|
||||
addSon(result, newNodeP(nkRecList, p))
|
||||
|
||||
@@ -855,9 +915,28 @@ proc parseTrailingDefinedTypes(p: var TParser, section, typ: PNode) =
|
||||
newTyp = parseTypeSuffix(p, newTyp)
|
||||
addTypeDef(section, newName, newTyp)
|
||||
|
||||
proc enumFields(p: var TParser): PNode =
|
||||
proc createConst(name, typ, val: PNode, p: TParser): PNode =
|
||||
result = newNodeP(nkConstDef, p)
|
||||
addSon(result, name, typ, val)
|
||||
|
||||
proc exprToNumber(n: PNode not nil): tuple[succ: bool, val: BiggestInt] =
|
||||
result = (false, 0.BiggestInt)
|
||||
case n.kind:
|
||||
of nkPrefix:
|
||||
# Check for negative/positive numbers -3 or +6
|
||||
if n.sons.len == 2 and n.sons[0].kind == nkIdent and n.sons[1].kind == nkIntLit:
|
||||
let pre = n.sons[0]
|
||||
let num = n.sons[1]
|
||||
if pre.ident.s == "-": result = (true, - num.intVal)
|
||||
elif pre.ident.s == "+": result = (true, num.intVal)
|
||||
else: discard
|
||||
|
||||
proc enumFields(p: var TParser, constList: PNode): PNode =
|
||||
result = newNodeP(nkEnumTy, p)
|
||||
addSon(result, ast.emptyNode) # enum does not inherit from anything
|
||||
var i: BiggestInt = 0
|
||||
var field: tuple[id: BiggestInt, isNumber: bool, node: PNode]
|
||||
var fields = newSeq[type(field)]()
|
||||
while true:
|
||||
var e = skipIdent(p)
|
||||
if p.tok.xkind == pxAsgn:
|
||||
@@ -867,17 +946,59 @@ proc enumFields(p: var TParser): PNode =
|
||||
e = newNodeP(nkEnumFieldDef, p)
|
||||
addSon(e, a, c)
|
||||
skipCom(p, e)
|
||||
|
||||
addSon(result, e)
|
||||
if c.kind == nkIntLit:
|
||||
i = c.intVal
|
||||
field.isNumber = true
|
||||
else:
|
||||
var (success, number) = exprToNumber(c)
|
||||
if success:
|
||||
i = number
|
||||
field.isNumber = true
|
||||
else:
|
||||
field.isNumber = false
|
||||
else:
|
||||
inc(i)
|
||||
field.isNumber = true
|
||||
field.id = i
|
||||
field.node = e
|
||||
fields.add(field)
|
||||
if p.tok.xkind != pxComma: break
|
||||
getTok(p, e)
|
||||
# allow trailing comma:
|
||||
if p.tok.xkind == pxCurlyRi: break
|
||||
fields.sort do (x, y: type(field)) -> int:
|
||||
cmp(x.id, y.id)
|
||||
var lastId: BiggestInt
|
||||
var lastIdent: PNode
|
||||
for count, f in fields:
|
||||
if not f.isNumber:
|
||||
addSon(result, f.node)
|
||||
elif f.id == lastId and count > 0:
|
||||
var currentIdent: PNode
|
||||
case f.node.kind:
|
||||
of nkEnumFieldDef:
|
||||
if f.node.sons.len > 0 and f.node.sons[0].kind == nkIdent:
|
||||
currentIdent = f.node.sons[0]
|
||||
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
|
||||
of nkIdent: currentIdent = f.node
|
||||
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
|
||||
var constant = createConst( currentIdent, ast.emptyNode, lastIdent, p)
|
||||
constList.addSon(constant)
|
||||
else:
|
||||
addSon(result, f.node)
|
||||
lastId = f.id
|
||||
case f.node.kind:
|
||||
of nkEnumFieldDef:
|
||||
if f.node.sons.len > 0 and f.node.sons[0].kind == nkIdent:
|
||||
lastIdent = f.node.sons[0]
|
||||
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
|
||||
of nkIdent: lastIdent = f.node
|
||||
else: parMessage(p, errGenerated, "Warning: When sorting enum fields an expected nkIdent was not found. Check the fields!")
|
||||
|
||||
proc parseTypedefStruct(p: var TParser, result: PNode, isUnion: bool) =
|
||||
proc parseTypedefStruct(p: var TParser, result: PNode, stmtList: PNode, isUnion: bool) =
|
||||
getTok(p, result)
|
||||
if p.tok.xkind == pxCurlyLe:
|
||||
var t = parseStruct(p, isUnion)
|
||||
var t = parseStruct(p, stmtList, isUnion)
|
||||
var origName = p.tok.s
|
||||
markTypeIdent(p, nil)
|
||||
var name = skipIdent(p)
|
||||
@@ -890,7 +1011,7 @@ proc parseTypedefStruct(p: var TParser, result: PNode, isUnion: bool) =
|
||||
var nameOrType = skipIdent(p)
|
||||
case p.tok.xkind
|
||||
of pxCurlyLe:
|
||||
var t = parseStruct(p, isUnion)
|
||||
var t = parseStruct(p, stmtList, isUnion)
|
||||
if p.tok.xkind == pxSymbol:
|
||||
# typedef struct tagABC {} abc, *pabc;
|
||||
# --> abc is a better type name than tagABC!
|
||||
@@ -914,11 +1035,11 @@ proc parseTypedefStruct(p: var TParser, result: PNode, isUnion: bool) =
|
||||
else:
|
||||
expectIdent(p)
|
||||
|
||||
proc parseTypedefEnum(p: var TParser, result: PNode) =
|
||||
proc parseTypedefEnum(p: var TParser, result, constSection: PNode) =
|
||||
getTok(p, result)
|
||||
if p.tok.xkind == pxCurlyLe:
|
||||
getTok(p, result)
|
||||
var t = enumFields(p)
|
||||
var t = enumFields(p, constSection)
|
||||
eat(p, pxCurlyRi, t)
|
||||
var origName = p.tok.s
|
||||
markTypeIdent(p, nil)
|
||||
@@ -933,7 +1054,7 @@ proc parseTypedefEnum(p: var TParser, result: PNode) =
|
||||
case p.tok.xkind
|
||||
of pxCurlyLe:
|
||||
getTok(p, result)
|
||||
var t = enumFields(p)
|
||||
var t = enumFields(p, constSection)
|
||||
eat(p, pxCurlyRi, t)
|
||||
if p.tok.xkind == pxSymbol:
|
||||
# typedef enum tagABC {} abc, *pabc;
|
||||
@@ -960,27 +1081,36 @@ proc parseTypedefEnum(p: var TParser, result: PNode) =
|
||||
expectIdent(p)
|
||||
|
||||
proc parseTypeDef(p: var TParser): PNode =
|
||||
result = newNodeP(nkTypeSection, p)
|
||||
result = newNodeP(nkStmtList, p)
|
||||
var typeSection = newNodeP(nkTypeSection, p)
|
||||
var afterStatements = newNodeP(nkStmtList, p)
|
||||
while p.tok.xkind == pxSymbol and p.tok.s == "typedef":
|
||||
getTok(p, result)
|
||||
getTok(p, typeSection)
|
||||
inc(p.inTypeDef)
|
||||
expectIdent(p)
|
||||
case p.tok.s
|
||||
of "struct": parseTypedefStruct(p, result, isUnion=false)
|
||||
of "union": parseTypedefStruct(p, result, isUnion=true)
|
||||
of "enum": parseTypedefEnum(p, result)
|
||||
of "struct": parseTypedefStruct(p, typeSection, result, isUnion=false)
|
||||
of "union": parseTypedefStruct(p, typeSection, result, isUnion=true)
|
||||
of "enum":
|
||||
var constSection = newNodeP(nkConstSection, p)
|
||||
parseTypedefEnum(p, typeSection, constSection)
|
||||
addSon(afterStatements, constSection)
|
||||
of "class":
|
||||
if pfCpp in p.options.flags:
|
||||
parseTypedefStruct(p, result, isUnion=false)
|
||||
parseTypedefStruct(p, typeSection, result, isUnion=false)
|
||||
else:
|
||||
var t = typeAtom(p)
|
||||
otherTypeDef(p, result, t)
|
||||
otherTypeDef(p, typeSection, t)
|
||||
else:
|
||||
var t = typeAtom(p)
|
||||
otherTypeDef(p, result, t)
|
||||
otherTypeDef(p, typeSection, t)
|
||||
eat(p, pxSemicolon)
|
||||
dec(p.inTypeDef)
|
||||
|
||||
|
||||
addSon(result, typeSection)
|
||||
for s in afterStatements:
|
||||
addSon(result, s)
|
||||
|
||||
proc skipDeclarationSpecifiers(p: var TParser) =
|
||||
while p.tok.xkind == pxSymbol:
|
||||
case p.tok.s
|
||||
@@ -1092,10 +1222,6 @@ proc declaration(p: var TParser): PNode =
|
||||
result = parseVarDecl(p, baseTyp, rettyp, origName)
|
||||
assert result != nil
|
||||
|
||||
proc createConst(name, typ, val: PNode, p: TParser): PNode =
|
||||
result = newNodeP(nkConstDef, p)
|
||||
addSon(result, name, typ, val)
|
||||
|
||||
proc enumSpecifier(p: var TParser): PNode =
|
||||
saveContext(p)
|
||||
getTok(p, nil) # skip "enum"
|
||||
@@ -1141,12 +1267,16 @@ proc enumSpecifier(p: var TParser): PNode =
|
||||
closeContext(p)
|
||||
var name = result
|
||||
# create a type section containing the enum
|
||||
result = newNodeP(nkTypeSection, p)
|
||||
result = newNodeP(nkStmtList, p)
|
||||
var tSection = newNodeP(nkTypeSection, p)
|
||||
var t = newNodeP(nkTypeDef, p)
|
||||
getTok(p, t)
|
||||
var e = enumFields(p)
|
||||
var constSection = newNodeP(nkConstSection, p)
|
||||
var e = enumFields(p, constSection)
|
||||
addSon(t, exportSym(p, name, origName), ast.emptyNode, e)
|
||||
addSon(result, t)
|
||||
addSon(tSection, t)
|
||||
addSon(result, tSection)
|
||||
addSon(result, constSection)
|
||||
eat(p, pxCurlyRi, result)
|
||||
eat(p, pxSemicolon)
|
||||
of pxSemicolon:
|
||||
@@ -1608,8 +1738,8 @@ proc declarationOrStatement(p: var TParser): PNode =
|
||||
result = expressionStatement(p)
|
||||
assert result != nil
|
||||
|
||||
proc parseTuple(p: var TParser, isUnion: bool): PNode =
|
||||
result = parseStructBody(p, isUnion, nkTupleTy)
|
||||
proc parseTuple(p: var TParser, statements: PNode, isUnion: bool): PNode =
|
||||
parseStructBody(p, statements, isUnion, nkTupleTy)
|
||||
|
||||
proc parseTrailingDefinedIdents(p: var TParser, result, baseTyp: PNode) =
|
||||
var varSection = newNodeP(nkVarSection, p)
|
||||
@@ -1640,13 +1770,13 @@ proc parseStandaloneStruct(p: var TParser, isUnion: bool): PNode =
|
||||
if p.tok.xkind in {pxCurlyLe, pxSemiColon}:
|
||||
if origName.len > 0:
|
||||
var name = mangledIdent(origName, p)
|
||||
var t = parseStruct(p, isUnion)
|
||||
var t = parseStruct(p, result, isUnion)
|
||||
var typeSection = newNodeP(nkTypeSection, p)
|
||||
addTypeDef(typeSection, structPragmas(p, name, origName), t)
|
||||
addSon(result, typeSection)
|
||||
parseTrailingDefinedIdents(p, result, name)
|
||||
else:
|
||||
var t = parseTuple(p, isUnion)
|
||||
var t = parseTuple(p, result, isUnion)
|
||||
parseTrailingDefinedIdents(p, result, t)
|
||||
else:
|
||||
backtrackContext(p)
|
||||
@@ -2034,7 +2164,7 @@ proc parseStandaloneClass(p: var TParser, isStruct: bool): PNode =
|
||||
addTypeDef(typeSection, structPragmas(p, name, origName), t)
|
||||
parseTrailingDefinedIdents(p, result, name)
|
||||
else:
|
||||
var t = parseTuple(p, isUnion=false)
|
||||
var t = parseTuple(p, result, isUnion=false)
|
||||
parseTrailingDefinedIdents(p, result, t)
|
||||
else:
|
||||
backtrackContext(p)
|
||||
|
||||
40
compiler/c2nim/tests/enum.h
Normal file
40
compiler/c2nim/tests/enum.h
Normal file
@@ -0,0 +1,40 @@
|
||||
|
||||
enum vehicles
|
||||
{
|
||||
car = 0x10,
|
||||
truck,
|
||||
boat = 0x01,
|
||||
ship = 1,
|
||||
speedboat = 1,
|
||||
bicycle = 4,
|
||||
bobycar
|
||||
};
|
||||
|
||||
enum
|
||||
{
|
||||
red = 4,
|
||||
green = 2,
|
||||
blue
|
||||
};
|
||||
|
||||
typedef enum food
|
||||
{
|
||||
bread = 4,
|
||||
toast = 4,
|
||||
bun = 0x04,
|
||||
cucumber = 2,
|
||||
chocolate = 6
|
||||
};
|
||||
|
||||
typedef enum numbers
|
||||
{
|
||||
one = 1,
|
||||
two,
|
||||
nten = - 10,
|
||||
nnine,
|
||||
four = 4,
|
||||
three = + 3,
|
||||
positivenine = + 9,
|
||||
nfour = - 4,
|
||||
negativeten = -10
|
||||
};
|
||||
27
compiler/c2nim/tests/struct_anonym.h
Normal file
27
compiler/c2nim/tests/struct_anonym.h
Normal file
@@ -0,0 +1,27 @@
|
||||
|
||||
struct normal{
|
||||
int a;
|
||||
int b;
|
||||
};
|
||||
|
||||
typedef struct outerStruct {
|
||||
struct normal a_nomal_one;
|
||||
|
||||
int a;
|
||||
|
||||
struct {
|
||||
union {
|
||||
int b;
|
||||
} a_union_in_the_struct;
|
||||
|
||||
int c;
|
||||
};
|
||||
|
||||
union {
|
||||
int d;
|
||||
|
||||
struct {
|
||||
int e;
|
||||
} a_struct_in_the_union;
|
||||
} a_union;
|
||||
};
|
||||
@@ -79,6 +79,7 @@ Files: "lib/system/*.nim"
|
||||
Files: "lib/core/*.nim"
|
||||
Files: "lib/pure/*.nim"
|
||||
Files: "lib/pure/collections/*.nim"
|
||||
Files: "lib/pure/concurrency/*.nim"
|
||||
Files: "lib/impure/*.nim"
|
||||
Files: "lib/wrappers/*.nim"
|
||||
|
||||
|
||||
@@ -828,6 +828,9 @@ proc typeSectionFinalPass(c: PContext, n: PNode) =
|
||||
getCurrOwner(), s.info)
|
||||
|
||||
proc semTypeSection(c: PContext, n: PNode): PNode =
|
||||
## Processes a type section. This must be done in separate passes, in order
|
||||
## to allow the type definitions in the section to reference each other
|
||||
## without regard for the order of their definitions.
|
||||
typeSectionLeftSidePass(c, n)
|
||||
typeSectionRightSidePass(c, n)
|
||||
typeSectionFinalPass(c, n)
|
||||
|
||||
@@ -1118,6 +1118,11 @@ proc typeAllowed(t: PType, kind: TSymKind): bool =
|
||||
proc align(address, alignment: BiggestInt): BiggestInt =
|
||||
result = (address + (alignment - 1)) and not (alignment - 1)
|
||||
|
||||
const
|
||||
szNonConcreteType* = -3
|
||||
szIllegalRecursion* = -2
|
||||
szUnknownSize* = -1
|
||||
|
||||
proc computeSizeAux(typ: PType, a: var BiggestInt): BiggestInt
|
||||
proc computeRecSizeAux(n: PNode, a, currOffset: var BiggestInt): BiggestInt =
|
||||
var maxAlign, maxSize, b, res: BiggestInt
|
||||
@@ -1151,14 +1156,9 @@ proc computeRecSizeAux(n: PNode, a, currOffset: var BiggestInt): BiggestInt =
|
||||
of nkSym:
|
||||
result = computeSizeAux(n.sym.typ, a)
|
||||
n.sym.offset = int(currOffset)
|
||||
else:
|
||||
internalError("computeRecSizeAux()")
|
||||
else:
|
||||
a = 1
|
||||
result = - 1
|
||||
|
||||
const
|
||||
szIllegalRecursion* = -2
|
||||
szUnknownSize* = -1
|
||||
result = szNonConcreteType
|
||||
|
||||
proc computeSizeAux(typ: PType, a: var BiggestInt): BiggestInt =
|
||||
var res, maxAlign, length, currOffset: BiggestInt
|
||||
|
||||
@@ -787,10 +787,12 @@ proc genMagic(c: PCtx; n: PNode; dest: var TDest) =
|
||||
c.freeTemp(tmp)
|
||||
of mSwap:
|
||||
unused(n, dest)
|
||||
var d = c.genx(n.sons[1])
|
||||
var tmp = c.genx(n.sons[2])
|
||||
c.gABC(n, opcSwap, d, tmp)
|
||||
c.freeTemp(tmp)
|
||||
var
|
||||
d1 = c.genx(n.sons[1])
|
||||
d2 = c.genx(n.sons[2])
|
||||
c.gABC(n, opcSwap, d1, d2)
|
||||
c.genAsgnPatch(n.sons[1], d1)
|
||||
c.genAsgnPatch(n.sons[2], d2)
|
||||
of mIsNil: genUnaryABC(c, n, dest, opcIsNil)
|
||||
of mCopyStr:
|
||||
if dest < 0: dest = c.getTemp(n.typ)
|
||||
|
||||
@@ -26,12 +26,12 @@ export TPort
|
||||
## **Note:** This module is still largely experimental.
|
||||
|
||||
|
||||
# TODO: Discarded void PFutures need to checked for exception.
|
||||
# TODO: Discarded void PFutures need to be checked for exception.
|
||||
# TODO: ``except`` statement (without `try`) does not work.
|
||||
# TODO: Multiple exception names in a ``except`` don't work.
|
||||
# TODO: The effect system (raises: []) has trouble with my try transformation.
|
||||
# TODO: Can't await in a 'except' body
|
||||
|
||||
# TODO: getCurrentException(Msg) don't work
|
||||
|
||||
# -- Futures
|
||||
|
||||
@@ -77,7 +77,8 @@ proc fail*[T](future: PFuture[T], error: ref EBase) =
|
||||
# This is to prevent exceptions from being silently ignored when a future
|
||||
# is discarded.
|
||||
# TODO: This may turn out to be a bad idea.
|
||||
raise error
|
||||
# Turns out this is a bad idea.
|
||||
#raise error
|
||||
|
||||
proc `callback=`*(future: PFutureBase, cb: proc () {.closure,gcsafe.}) =
|
||||
## Sets the callback proc to be called when the future completes.
|
||||
@@ -775,14 +776,16 @@ proc accept*(socket: TAsyncFD): PFuture[TAsyncFD] =
|
||||
|
||||
# -- Await Macro
|
||||
|
||||
template createCb*(retFutureSym, iteratorNameSym: expr): stmt {.immediate.} =
|
||||
template createCb*(retFutureSym, iteratorNameSym,
|
||||
name: expr): stmt {.immediate.} =
|
||||
var nameIterVar = iteratorNameSym
|
||||
proc cb {.closure,gcsafe.} =
|
||||
try:
|
||||
if not nameIterVar.finished:
|
||||
var next = nameIterVar()
|
||||
if next == nil:
|
||||
assert retFutureSym.finished, "Async procedure's return Future was not finished."
|
||||
assert retFutureSym.finished, "Async procedure's (" &
|
||||
name & ") return Future was not finished."
|
||||
else:
|
||||
next.callback = cb
|
||||
except:
|
||||
@@ -987,7 +990,8 @@ macro async*(prc: stmt): stmt {.immediate.} =
|
||||
|
||||
# -> createCb(retFuture)
|
||||
var cbName = newIdentNode("cb")
|
||||
var procCb = newCall("createCb", retFutureSym, iteratorNameSym)
|
||||
var procCb = newCall("createCb", retFutureSym, iteratorNameSym,
|
||||
newStrLitNode(prc[0].getName))
|
||||
outerProcBody.add procCb
|
||||
|
||||
# -> return retFuture
|
||||
@@ -1010,6 +1014,7 @@ macro async*(prc: stmt): stmt {.immediate.} =
|
||||
result[6] = outerProcBody
|
||||
|
||||
#echo(treeRepr(result))
|
||||
#if prc[0].getName == "routeReq":
|
||||
#echo(toStrLit(result))
|
||||
|
||||
proc recvLine*(socket: TAsyncFD): PFuture[string] {.async.} =
|
||||
|
||||
@@ -112,6 +112,10 @@ proc incl*[A](s: var TSet[A], key: A) =
|
||||
## includes an element `key` in `s`.
|
||||
inclImpl()
|
||||
|
||||
proc incl*[A](s: var TSet[A], other: TSet[A]) =
|
||||
## includes everything in `other` in `s`
|
||||
for item in other: incl(s, item)
|
||||
|
||||
proc excl*[A](s: var TSet[A], key: A) =
|
||||
## excludes `key` from the set `s`.
|
||||
var index = rawGet(s, key)
|
||||
@@ -119,6 +123,10 @@ proc excl*[A](s: var TSet[A], key: A) =
|
||||
s.data[index].slot = seDeleted
|
||||
dec(s.counter)
|
||||
|
||||
proc excl*[A](s: var TSet[A], other: TSet[A]) =
|
||||
## excludes everything in `other` from `s`.
|
||||
for item in other: excl(s, item)
|
||||
|
||||
proc containsOrIncl*[A](s: var TSet[A], key: A): bool =
|
||||
## returns true if `s` contains `key`, otherwise `key` is included in `s`
|
||||
## and false is returned.
|
||||
@@ -147,6 +155,43 @@ proc `$`*[A](s: TSet[A]): string =
|
||||
## The `$` operator for hash sets.
|
||||
dollarImpl()
|
||||
|
||||
proc union*[A](s1, s2: TSet[A]): TSet[A] =
|
||||
## returns a new set of all items that are contained in at
|
||||
## least one of `s1` and `s2`
|
||||
result = s1
|
||||
incl(result, s2)
|
||||
|
||||
proc intersection*[A](s1, s2: TSet[A]): TSet[A] =
|
||||
## returns a new set of all items that are contained in both `s1` and `s2`
|
||||
result = initSet[A](min(s1.data.len, s2.data.len))
|
||||
for item in s1:
|
||||
if item in s2: incl(result, item)
|
||||
|
||||
proc symmetricDifference*[A](s1, s2: TSet[A]): TSet[A] =
|
||||
## returns a new set of all items that are contained in either
|
||||
## `s1` or `s2`, but not both
|
||||
result = s1
|
||||
for item in s2:
|
||||
if containsOrIncl(result, item): excl(result, item)
|
||||
|
||||
proc `+`*[A](s1, s2: TSet[A]): TSet[A] {.inline.} =
|
||||
## alias for `union`
|
||||
result = union(s1, s2)
|
||||
|
||||
proc `*`*[A](s1, s2: TSet[A]): TSet[A] {.inline.} =
|
||||
## alias for `intersection`
|
||||
result = intersection(s1, s2)
|
||||
|
||||
proc `-+-`*[A](s1, s2: TSet[A]): TSet[A] {.inline.} =
|
||||
## alias for `symmetricDifference`
|
||||
result = symmetricDifference(s1, s2)
|
||||
|
||||
proc disjoint*[A](s1, s2: TSet[A]): bool =
|
||||
## returns true iff `s1` and `s2` have no items in common
|
||||
for item in s1:
|
||||
if item in s2: return false
|
||||
return true
|
||||
|
||||
# ------------------------------ ordered set ------------------------------
|
||||
|
||||
type
|
||||
@@ -211,6 +256,10 @@ proc incl*[A](s: var TOrderedSet[A], key: A) =
|
||||
## includes an element `key` in `s`.
|
||||
inclImpl()
|
||||
|
||||
proc incl*[A](s: var TSet[A], other: TOrderedSet[A]) =
|
||||
## includes everything in `other` in `s`
|
||||
for item in other: incl(s, item)
|
||||
|
||||
proc containsOrIncl*[A](s: var TOrderedSet[A], key: A): bool =
|
||||
## returns true if `s` contains `key`, otherwise `key` is included in `s`
|
||||
## and false is returned.
|
||||
|
||||
81
tests/sets/tsets3.nim
Normal file
81
tests/sets/tsets3.nim
Normal file
@@ -0,0 +1,81 @@
|
||||
include sets
|
||||
|
||||
let
|
||||
s1: TSet[int] = toSet([1, 2, 4, 8, 16])
|
||||
s2: TSet[int] = toSet([1, 2, 3, 5, 8])
|
||||
s3: TSet[int] = toSet([3, 5, 7])
|
||||
|
||||
block union:
|
||||
let
|
||||
s1_s2 = union(s1, s2)
|
||||
s1_s3 = s1 + s3
|
||||
s2_s3 = s2 + s3
|
||||
|
||||
assert s1_s2.len == 7
|
||||
assert s1_s3.len == 8
|
||||
assert s2_s3.len == 6
|
||||
|
||||
for i in s1:
|
||||
assert i in s1_s2
|
||||
assert i in s1_s3
|
||||
for i in s2:
|
||||
assert i in s1_s2
|
||||
assert i in s2_s3
|
||||
for i in s3:
|
||||
assert i in s1_s3
|
||||
assert i in s2_s3
|
||||
|
||||
assert((s1 + s1) == s1)
|
||||
assert((s2 + s1) == s1_s2)
|
||||
|
||||
block intersection:
|
||||
let
|
||||
s1_s2 = intersection(s1, s2)
|
||||
s1_s3 = intersection(s1, s3)
|
||||
s2_s3 = s2 * s3
|
||||
|
||||
assert s1_s2.len == 3
|
||||
assert s1_s3.len == 0
|
||||
assert s2_s3.len == 2
|
||||
|
||||
for i in s1_s2:
|
||||
assert i in s1
|
||||
assert i in s2
|
||||
for i in s1_s3:
|
||||
assert i in s1
|
||||
assert i in s3
|
||||
for i in s2_s3:
|
||||
assert i in s2
|
||||
assert i in s3
|
||||
|
||||
assert((s2 * s2) == s2)
|
||||
assert((s3 * s2) == s2_s3)
|
||||
|
||||
block symmetricDifference:
|
||||
let
|
||||
s1_s2 = symmetricDifference(s1, s2)
|
||||
s1_s3 = s1 -+- s3
|
||||
s2_s3 = s2 -+- s3
|
||||
|
||||
assert s1_s2.len == 4
|
||||
assert s1_s3.len == 8
|
||||
assert s2_s3.len == 4
|
||||
|
||||
for i in s1:
|
||||
assert i in s1_s2 xor i in s2
|
||||
assert i in s1_s3 xor i in s3
|
||||
for i in s2:
|
||||
assert i in s1_s2 xor i in s1
|
||||
assert i in s2_s3 xor i in s3
|
||||
for i in s3:
|
||||
assert i in s1_s3 xor i in s1
|
||||
assert i in s2_s3 xor i in s2
|
||||
|
||||
assert((s3 -+- s3) == initSet[int]())
|
||||
assert((s3 -+- s1) == s1_s3)
|
||||
|
||||
block disjoint:
|
||||
assert(not disjoint(s1, s2))
|
||||
assert disjoint(s1, s3)
|
||||
assert(not disjoint(s2, s3))
|
||||
assert(not disjoint(s2, s2))
|
||||
Reference in New Issue
Block a user