Replaced ignoreStackAndRegisters with stackSize in GC_step

This commit is contained in:
Ruslan Mustakov
2016-05-11 20:00:22 +06:00
parent f288eb7543
commit 0dc35b7841
3 changed files with 55 additions and 33 deletions

View File

@@ -56,7 +56,7 @@ file as well). With this switch the GC supports the following operations:
.. code-block:: nim
proc GC_setMaxPause*(MaxPauseInUs: int)
proc GC_step*(us: int, strongAdvice, ignoreStackAndRegisters = false)
proc GC_step*(us: int, strongAdvice = false, stackSize = -1)
The unit of the parameters ``MaxPauseInUs`` and ``us`` is microseconds.
@@ -75,11 +75,13 @@ These two procs are the two modus operandi of the realtime GC:
This allows the GC to perform some work for up to ``us`` time. This is
useful to call in a main loop to ensure the GC can do its work. To
bind all GC activity to a ``GC_step`` call, deactivate the GC with
``GC_disable`` at program startup. Notice that you may ask GC to not
scan stack and registers for references via ``ignoreStackAndRegisters``
parameter. This may reduce the step time depending on the stack depth,
but use it only when you are sure that neither the stack nor the registers
contain unique references to objects that must be preserved.
``GC_disable`` at program startup. If ``strongAdvice`` is set to ``true``,
GC will be forced to perform collection cycle. Otherwise, GC may decide not
to do anything, if there is not much garbage to collect.
You may also specify the current stack size via ``stackSize`` parameter.
It can improve performance, when you know that there are no unique Nim
references below certain point on the stack. Make sure the size you specify
is greater than the potential worst case size.
These procs provide a "best effort" realtime guarantee; in particular the
cycle collector is not aware of deadlines yet. Deactivate it to get more

View File

@@ -905,19 +905,19 @@ proc unmarkStackAndRegisters(gch: var GcHeap) =
#sysAssert c.typ != nil, "unmarkStackAndRegisters 2"
gch.decStack.len = 0
proc collectCTBody(gch: var GcHeap, ignoreStackAndRegisters = false) =
proc collectCTBody(gch: var GcHeap) =
when withRealTime:
let t0 = getticks()
sysAssert(allocInv(gch.region), "collectCT: begin")
when not defined(nimCoroutines):
gch.stat.maxStackSize = max(gch.stat.maxStackSize, stackSize())
sysAssert(gch.decStack.len == 0, "collectCT")
prepareForInteriorPointerChecking(gch.region)
if not ignoreStackAndRegisters:
when not defined(nimCoroutines):
gch.stat.maxStackSize = max(gch.stat.maxStackSize, stackSize())
markStackAndRegisters(gch)
markThreadStacks(gch)
gch.stat.maxStackCells = max(gch.stat.maxStackCells, gch.decStack.len)
inc(gch.stat.stackScans)
markStackAndRegisters(gch)
markThreadStacks(gch)
gch.stat.maxStackCells = max(gch.stat.maxStackCells, gch.decStack.len)
inc(gch.stat.stackScans)
if collectZCT(gch):
when cycleGC:
if getOccupiedMem(gch.region) >= gch.cycleThreshold or alwaysCycleGC:
@@ -927,8 +927,7 @@ proc collectCTBody(gch: var GcHeap, ignoreStackAndRegisters = false) =
gch.cycleThreshold = max(InitialCycleThreshold, getOccupiedMem() *
CycleIncrease)
gch.stat.maxThreshold = max(gch.stat.maxThreshold, gch.cycleThreshold)
if not ignoreStackAndRegisters:
unmarkStackAndRegisters(gch)
unmarkStackAndRegisters(gch)
sysAssert(allocInv(gch.region), "collectCT: end")
when withRealTime:
@@ -972,17 +971,28 @@ when withRealTime:
proc GC_setMaxPause*(MaxPauseInUs: int) =
gch.maxPause = MaxPauseInUs.toNano
proc GC_step(gch: var GcHeap, us: int, strongAdvice: bool, ignoreStackAndRegisters: bool) =
proc GC_step(gch: var GcHeap, us: int, strongAdvice: bool) =
acquire(gch)
gch.maxPause = us.toNano
if (gch.zct.len >= ZctThreshold or (cycleGC and
getOccupiedMem(gch.region)>=gch.cycleThreshold) or alwaysGC) or
strongAdvice:
collectCTBody(gch, ignoreStackAndRegisters)
collectCTBody(gch)
release(gch)
proc GC_step*(us: int, strongAdvice, ignoreStackAndRegisters = false) =
GC_step(gch, us, strongAdvice, ignoreStackAndRegisters)
proc GC_step*(us: int, strongAdvice = false, stackSize = -1) {.noinline.} =
var stackTop {.volatile.}: pointer
let prevStackBottom = gch.stackBottom
if stackSize >= 0:
stackTop = addr(stackTop)
when stackIncreases:
gch.stackBottom = cast[pointer](
cast[ByteAddress](stackTop) - sizeof(pointer) * 6 - stackSize)
else:
gch.stackBottom = cast[pointer](
cast[ByteAddress](stackTop) + sizeof(pointer) * 6 + stackSize)
GC_step(gch, us, strongAdvice)
gch.stackBottom = prevStackBottom
when not defined(useNimRtl):
proc GC_disable() =

View File

@@ -894,18 +894,18 @@ proc unmarkStackAndRegisters(gch: var GcHeap) =
decRef(d[i])
gch.decStack.len = 0
proc collectCTBody(gch: var GcHeap, ignoreStackAndRegisters = false) =
proc collectCTBody(gch: var GcHeap) =
when withRealTime:
let t0 = getticks()
sysAssert(allocInv(gch.region), "collectCT: begin")
when not defined(nimCoroutines):
gch.stat.maxStackSize = max(gch.stat.maxStackSize, stackSize())
sysAssert(gch.decStack.len == 0, "collectCT")
prepareForInteriorPointerChecking(gch.region)
if not ignoreStackAndRegisters:
when not defined(nimCoroutines):
gch.stat.maxStackSize = max(gch.stat.maxStackSize, stackSize())
markStackAndRegisters(gch)
gch.stat.maxStackCells = max(gch.stat.maxStackCells, gch.decStack.len)
inc(gch.stat.stackScans)
markStackAndRegisters(gch)
gch.stat.maxStackCells = max(gch.stat.maxStackCells, gch.decStack.len)
inc(gch.stat.stackScans)
if collectZCT(gch):
when cycleGC:
if getOccupiedMem(gch.region) >= gch.cycleThreshold or alwaysCycleGC:
@@ -914,8 +914,7 @@ proc collectCTBody(gch: var GcHeap, ignoreStackAndRegisters = false) =
gch.cycleThreshold = max(InitialCycleThreshold, getOccupiedMem() *
CycleIncrease)
gch.stat.maxThreshold = max(gch.stat.maxThreshold, gch.cycleThreshold)
if not ignoreStackAndRegisters:
unmarkStackAndRegisters(gch)
unmarkStackAndRegisters(gch)
sysAssert(allocInv(gch.region), "collectCT: end")
when withRealTime:
@@ -950,15 +949,26 @@ when withRealTime:
proc GC_setMaxPause*(MaxPauseInUs: int) =
gch.maxPause = MaxPauseInUs.toNano
proc GC_step(gch: var GcHeap, us: int, strongAdvice, ignoreStackAndRegisters: bool) =
proc GC_step(gch: var GcHeap, us: int, strongAdvice: bool) =
gch.maxPause = us.toNano
if (gch.zct.len >= ZctThreshold or (cycleGC and
getOccupiedMem(gch.region)>=gch.cycleThreshold) or alwaysGC) or
strongAdvice:
collectCTBody(gch, ignoreStackAndRegisters)
collectCTBody(gch)
proc GC_step*(us: int, strongAdvice, ignoreStackAndRegisters = false) =
GC_step(gch, us, strongAdvice, ignoreStackAndRegisters)
proc GC_step*(us: int, strongAdvice = false, stackSize = -1) {.noinline.} =
var stackTop {.volatile.}: pointer
let prevStackBottom = gch.stackBottom
if stackSize >= 0:
stackTop = addr(stackTop)
when stackIncreases:
gch.stackBottom = cast[pointer](
cast[ByteAddress](stackTop) - sizeof(pointer) * 6 - stackSize)
else:
gch.stackBottom = cast[pointer](
cast[ByteAddress](stackTop) + sizeof(pointer) * 6 + stackSize)
GC_step(gch, us, strongAdvice)
gch.stackBottom = prevStackBottom
when not defined(useNimRtl):
proc GC_disable() =