gc_ms.nim 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527
  1. #
  2. #
  3. # Nim's Runtime Library
  4. # (c) Copyright 2015 Andreas Rumpf
  5. #
  6. # See the file "copying.txt", included in this
  7. # distribution, for details about the copyright.
  8. #
  9. # A simple mark&sweep garbage collector for Nim. Define the
  10. # symbol ``gcUseBitvectors`` to generate a variant of this GC.
  11. {.push profiler:off.}
  12. const
  13. InitialThreshold = 4*1024*1024 # X MB because marking&sweeping is slow
  14. withBitvectors = defined(gcUseBitvectors)
  15. # bitvectors are significantly faster for GC-bench, but slower for
  16. # bootstrapping and use more memory
  17. rcWhite = 0
  18. rcGrey = 1 # unused
  19. rcBlack = 2
  20. template mulThreshold(x): untyped = x * 2
  21. when defined(memProfiler):
  22. proc nimProfile(requestedSize: int)
  23. when hasThreadSupport:
  24. import std/sharedlist
  25. type
  26. WalkOp = enum
  27. waMarkGlobal, # we need to mark conservatively for global marker procs
  28. # as these may refer to a global var and not to a thread
  29. # local
  30. waMarkPrecise # fast precise marking
  31. Finalizer {.compilerproc.} = proc (self: pointer) {.nimcall, benign, raises: [].}
  32. # A ref type can have a finalizer that is called before the object's
  33. # storage is freed.
  34. GcStat = object
  35. collections: int # number of performed full collections
  36. maxThreshold: int # max threshold that has been set
  37. maxStackSize: int # max stack size
  38. freedObjects: int # max entries in cycle table
  39. GcStack {.final, pure.} = object
  40. when nimCoroutines:
  41. prev: ptr GcStack
  42. next: ptr GcStack
  43. maxStackSize: int # Used to track statistics because we can not use
  44. # GcStat.maxStackSize when multiple stacks exist.
  45. bottom: pointer
  46. when nimCoroutines:
  47. pos: pointer
  48. GcHeap = object # this contains the zero count and
  49. # non-zero count table
  50. stack: GcStack
  51. when nimCoroutines:
  52. activeStack: ptr GcStack # current executing coroutine stack.
  53. cycleThreshold: int
  54. when useCellIds:
  55. idGenerator: int
  56. when withBitvectors:
  57. allocated, marked: CellSet
  58. tempStack: CellSeq # temporary stack for recursion elimination
  59. recGcLock: int # prevent recursion via finalizers; no thread lock
  60. region: MemRegion # garbage collected region
  61. stat: GcStat
  62. when hasThreadSupport:
  63. toDispose: SharedList[pointer]
  64. gcThreadId: int
  65. additionalRoots: CellSeq # dummy roots for GC_ref/unref
  66. when defined(nimTracing):
  67. tracing: bool
  68. indentation: int
  69. var
  70. gch {.rtlThreadVar.}: GcHeap
  71. when not defined(useNimRtl):
  72. instantiateForRegion(gch.region)
  73. template gcAssert(cond: bool, msg: string) =
  74. when defined(useGcAssert):
  75. if not cond:
  76. cstderr.rawWrite "[GCASSERT] "
  77. cstderr.rawWrite msg
  78. rawQuit 1
  79. proc cellToUsr(cell: PCell): pointer {.inline.} =
  80. # convert object (=pointer to refcount) to pointer to userdata
  81. result = cast[pointer](cast[int](cell)+%ByteAddress(sizeof(Cell)))
  82. proc usrToCell(usr: pointer): PCell {.inline.} =
  83. # convert pointer to userdata to object (=pointer to refcount)
  84. result = cast[PCell](cast[int](usr)-%ByteAddress(sizeof(Cell)))
  85. proc extGetCellType(c: pointer): PNimType {.compilerproc.} =
  86. # used for code generation concerning debugging
  87. result = usrToCell(c).typ
  88. proc unsureAsgnRef(dest: PPointer, src: pointer) {.inline, compilerproc.} =
  89. dest[] = src
  90. proc internRefcount(p: pointer): int {.exportc: "getRefcount".} =
  91. result = 0
  92. # this that has to equals zero, otherwise we have to round up UnitsPerPage:
  93. when BitsPerPage mod (sizeof(int)*8) != 0:
  94. {.error: "(BitsPerPage mod BitsPerUnit) should be zero!".}
  95. # forward declarations:
  96. proc collectCT(gch: var GcHeap; size: int) {.benign, raises: [].}
  97. proc forAllChildren(cell: PCell, op: WalkOp) {.benign, raises: [].}
  98. proc doOperation(p: pointer, op: WalkOp) {.benign, raises: [].}
  99. proc forAllChildrenAux(dest: pointer, mt: PNimType, op: WalkOp) {.benign, raises: [].}
  100. # we need the prototype here for debugging purposes
  101. when defined(nimGcRefLeak):
  102. const
  103. MaxTraceLen = 20 # tracking the last 20 calls is enough
  104. type
  105. GcStackTrace = object
  106. lines: array[0..MaxTraceLen-1, cstring]
  107. files: array[0..MaxTraceLen-1, cstring]
  108. proc captureStackTrace(f: PFrame, st: var GcStackTrace) =
  109. const
  110. firstCalls = 5
  111. var
  112. it = f
  113. i = 0
  114. total = 0
  115. while it != nil and i <= high(st.lines)-(firstCalls-1):
  116. # the (-1) is for the "..." entry
  117. st.lines[i] = it.procname
  118. st.files[i] = it.filename
  119. inc(i)
  120. inc(total)
  121. it = it.prev
  122. var b = it
  123. while it != nil:
  124. inc(total)
  125. it = it.prev
  126. for j in 1..total-i-(firstCalls-1):
  127. if b != nil: b = b.prev
  128. if total != i:
  129. st.lines[i] = "..."
  130. st.files[i] = "..."
  131. inc(i)
  132. while b != nil and i <= high(st.lines):
  133. st.lines[i] = b.procname
  134. st.files[i] = b.filename
  135. inc(i)
  136. b = b.prev
  137. var ax: array[10_000, GcStackTrace]
  138. proc nimGCref(p: pointer) {.compilerproc.} =
  139. # we keep it from being collected by pretending it's not even allocated:
  140. when false:
  141. when withBitvectors: excl(gch.allocated, usrToCell(p))
  142. else: usrToCell(p).refcount = rcBlack
  143. when defined(nimGcRefLeak):
  144. captureStackTrace(framePtr, ax[gch.additionalRoots.len])
  145. add(gch.additionalRoots, usrToCell(p))
  146. proc nimGCunref(p: pointer) {.compilerproc.} =
  147. let cell = usrToCell(p)
  148. var L = gch.additionalRoots.len-1
  149. var i = L
  150. let d = gch.additionalRoots.d
  151. while i >= 0:
  152. if d[i] == cell:
  153. d[i] = d[L]
  154. when defined(nimGcRefLeak):
  155. ax[i] = ax[L]
  156. dec gch.additionalRoots.len
  157. break
  158. dec(i)
  159. when false:
  160. when withBitvectors: incl(gch.allocated, usrToCell(p))
  161. else: usrToCell(p).refcount = rcWhite
  162. when defined(nimGcRefLeak):
  163. proc writeLeaks() =
  164. for i in 0..gch.additionalRoots.len-1:
  165. c_fprintf(stdout, "[Heap] NEW STACK TRACE\n")
  166. for ii in 0..MaxTraceLen-1:
  167. let line = ax[i].lines[ii]
  168. let file = ax[i].files[ii]
  169. if isNil(line): break
  170. c_fprintf(stdout, "[Heap] %s(%s)\n", file, line)
  171. include gc_common
  172. proc initGC() =
  173. when not defined(useNimRtl):
  174. gch.cycleThreshold = InitialThreshold
  175. gch.stat.collections = 0
  176. gch.stat.maxThreshold = 0
  177. gch.stat.maxStackSize = 0
  178. init(gch.tempStack)
  179. init(gch.additionalRoots)
  180. when withBitvectors:
  181. init(gch.allocated)
  182. init(gch.marked)
  183. when hasThreadSupport:
  184. init(gch.toDispose)
  185. gch.gcThreadId = atomicInc(gHeapidGenerator) - 1
  186. gcAssert(gch.gcThreadId >= 0, "invalid computed thread ID")
  187. proc forAllSlotsAux(dest: pointer, n: ptr TNimNode, op: WalkOp) {.benign.} =
  188. var d = cast[int](dest)
  189. case n.kind
  190. of nkSlot: forAllChildrenAux(cast[pointer](d +% n.offset), n.typ, op)
  191. of nkList:
  192. for i in 0..n.len-1:
  193. forAllSlotsAux(dest, n.sons[i], op)
  194. of nkCase:
  195. var m = selectBranch(dest, n)
  196. if m != nil: forAllSlotsAux(dest, m, op)
  197. of nkNone: sysAssert(false, "forAllSlotsAux")
  198. proc forAllChildrenAux(dest: pointer, mt: PNimType, op: WalkOp) =
  199. var d = cast[int](dest)
  200. if dest == nil: return # nothing to do
  201. if ntfNoRefs notin mt.flags:
  202. case mt.kind
  203. of tyRef, tyString, tySequence: # leaf:
  204. doOperation(cast[PPointer](d)[], op)
  205. of tyObject, tyTuple:
  206. forAllSlotsAux(dest, mt.node, op)
  207. of tyArray, tyArrayConstr, tyOpenArray:
  208. for i in 0..(mt.size div mt.base.size)-1:
  209. forAllChildrenAux(cast[pointer](d +% i *% mt.base.size), mt.base, op)
  210. else: discard
  211. proc forAllChildren(cell: PCell, op: WalkOp) =
  212. gcAssert(cell != nil, "forAllChildren: 1")
  213. gcAssert(cell.typ != nil, "forAllChildren: 2")
  214. gcAssert cell.typ.kind in {tyRef, tySequence, tyString}, "forAllChildren: 3"
  215. let marker = cell.typ.marker
  216. if marker != nil:
  217. marker(cellToUsr(cell), op.int)
  218. else:
  219. case cell.typ.kind
  220. of tyRef: # common case
  221. forAllChildrenAux(cellToUsr(cell), cell.typ.base, op)
  222. of tySequence:
  223. when not defined(nimSeqsV2):
  224. var d = cast[int](cellToUsr(cell))
  225. var s = cast[PGenericSeq](d)
  226. if s != nil:
  227. for i in 0..s.len-1:
  228. forAllChildrenAux(cast[pointer](d +% align(GenericSeqSize, cell.typ.base.align) +% i *% cell.typ.base.size), cell.typ.base, op)
  229. else: discard
  230. proc rawNewObj(typ: PNimType, size: int, gch: var GcHeap): pointer =
  231. # generates a new object and sets its reference counter to 0
  232. incTypeSize typ, size
  233. gcAssert(typ.kind in {tyRef, tyString, tySequence}, "newObj: 1")
  234. collectCT(gch, size + sizeof(Cell))
  235. var res = cast[PCell](rawAlloc(gch.region, size + sizeof(Cell)))
  236. gcAssert((cast[int](res) and (MemAlign-1)) == 0, "newObj: 2")
  237. # now it is buffered in the ZCT
  238. res.typ = typ
  239. when leakDetector and not hasThreadSupport:
  240. if framePtr != nil and framePtr.prev != nil:
  241. res.filename = framePtr.prev.filename
  242. res.line = framePtr.prev.line
  243. res.refcount = 0
  244. when withBitvectors: incl(gch.allocated, res)
  245. when useCellIds:
  246. inc gch.idGenerator
  247. res.id = gch.idGenerator
  248. result = cellToUsr(res)
  249. when useCellIds:
  250. proc getCellId*[T](x: ref T): int =
  251. let p = usrToCell(cast[pointer](x))
  252. result = p.id
  253. {.pop.}
  254. proc newObj(typ: PNimType, size: int): pointer {.compilerRtl.} =
  255. result = rawNewObj(typ, size, gch)
  256. zeroMem(result, size)
  257. when defined(memProfiler): nimProfile(size)
  258. proc newObjNoInit(typ: PNimType, size: int): pointer {.compilerRtl.} =
  259. result = rawNewObj(typ, size, gch)
  260. when defined(memProfiler): nimProfile(size)
  261. proc newObjRC1(typ: PNimType, size: int): pointer {.compilerRtl.} =
  262. result = rawNewObj(typ, size, gch)
  263. zeroMem(result, size)
  264. when defined(memProfiler): nimProfile(size)
  265. when not defined(nimSeqsV2):
  266. {.push overflowChecks: on.}
  267. proc newSeq(typ: PNimType, len: int): pointer {.compilerRtl.} =
  268. # `newObj` already uses locks, so no need for them here.
  269. let size = align(GenericSeqSize, typ.base.align) + len * typ.base.size
  270. result = newObj(typ, size)
  271. cast[PGenericSeq](result).len = len
  272. cast[PGenericSeq](result).reserved = len
  273. when defined(memProfiler): nimProfile(size)
  274. proc newSeqRC1(typ: PNimType, len: int): pointer {.compilerRtl.} =
  275. let size = align(GenericSeqSize, typ.base.align) + len * typ.base.size
  276. result = newObj(typ, size)
  277. cast[PGenericSeq](result).len = len
  278. cast[PGenericSeq](result).reserved = len
  279. when defined(memProfiler): nimProfile(size)
  280. {.pop.}
  281. proc growObj(old: pointer, newsize: int, gch: var GcHeap): pointer =
  282. collectCT(gch, newsize + sizeof(Cell))
  283. var ol = usrToCell(old)
  284. sysAssert(ol.typ != nil, "growObj: 1")
  285. gcAssert(ol.typ.kind in {tyString, tySequence}, "growObj: 2")
  286. var res = cast[PCell](rawAlloc(gch.region, newsize + sizeof(Cell)))
  287. var elemSize, elemAlign = 1
  288. if ol.typ.kind != tyString:
  289. elemSize = ol.typ.base.size
  290. elemAlign = ol.typ.base.align
  291. incTypeSize ol.typ, newsize
  292. var oldsize = align(GenericSeqSize, elemAlign) + cast[PGenericSeq](old).len*elemSize
  293. copyMem(res, ol, oldsize + sizeof(Cell))
  294. zeroMem(cast[pointer](cast[int](res)+% oldsize +% sizeof(Cell)),
  295. newsize-oldsize)
  296. sysAssert((cast[int](res) and (MemAlign-1)) == 0, "growObj: 3")
  297. when withBitvectors: incl(gch.allocated, res)
  298. when useCellIds:
  299. inc gch.idGenerator
  300. res.id = gch.idGenerator
  301. result = cellToUsr(res)
  302. when defined(memProfiler): nimProfile(newsize-oldsize)
  303. proc growObj(old: pointer, newsize: int): pointer {.rtl.} =
  304. result = growObj(old, newsize, gch)
  305. {.push profiler:off.}
  306. # ----------------- collector -----------------------------------------------
  307. proc mark(gch: var GcHeap, c: PCell) =
  308. when hasThreadSupport:
  309. for c in gch.toDispose:
  310. nimGCunref(c)
  311. when withBitvectors:
  312. incl(gch.marked, c)
  313. gcAssert gch.tempStack.len == 0, "stack not empty!"
  314. forAllChildren(c, waMarkPrecise)
  315. while gch.tempStack.len > 0:
  316. dec gch.tempStack.len
  317. var d = gch.tempStack.d[gch.tempStack.len]
  318. if not containsOrIncl(gch.marked, d):
  319. forAllChildren(d, waMarkPrecise)
  320. else:
  321. # XXX no 'if c.refCount != rcBlack' here?
  322. when defined(nimTracing):
  323. if gch.tracing:
  324. for i in 1..gch.indentation: c_fprintf(stdout, " ")
  325. c_fprintf(stdout, "start marking %p of type %s ((\n",
  326. c, c.typ.name)
  327. inc gch.indentation, 2
  328. c.refcount = rcBlack
  329. gcAssert gch.tempStack.len == 0, "stack not empty!"
  330. forAllChildren(c, waMarkPrecise)
  331. while gch.tempStack.len > 0:
  332. dec gch.tempStack.len
  333. var d = gch.tempStack.d[gch.tempStack.len]
  334. if d.refcount == rcWhite:
  335. d.refcount = rcBlack
  336. forAllChildren(d, waMarkPrecise)
  337. when defined(nimTracing):
  338. if gch.tracing:
  339. dec gch.indentation, 2
  340. for i in 1..gch.indentation: c_fprintf(stdout, " ")
  341. c_fprintf(stdout, "finished marking %p of type %s))\n",
  342. c, c.typ.name)
  343. proc doOperation(p: pointer, op: WalkOp) =
  344. if p == nil: return
  345. var c: PCell = usrToCell(p)
  346. gcAssert(c != nil, "doOperation: 1")
  347. case op
  348. of waMarkGlobal: mark(gch, c)
  349. of waMarkPrecise:
  350. when defined(nimTracing):
  351. if c.refcount == rcWhite: mark(gch, c)
  352. else:
  353. add(gch.tempStack, c)
  354. proc nimGCvisit(d: pointer, op: int) {.compilerRtl.} =
  355. doOperation(d, WalkOp(op))
  356. proc freeCyclicCell(gch: var GcHeap, c: PCell) =
  357. inc gch.stat.freedObjects
  358. prepareDealloc(c)
  359. when reallyDealloc: rawDealloc(gch.region, c)
  360. else:
  361. gcAssert(c.typ != nil, "freeCyclicCell")
  362. zeroMem(c, sizeof(Cell))
  363. proc sweep(gch: var GcHeap) =
  364. when withBitvectors:
  365. for c in gch.allocated.elementsExcept(gch.marked):
  366. gch.allocated.excl(c)
  367. freeCyclicCell(gch, c)
  368. else:
  369. for x in allObjects(gch.region):
  370. if isCell(x):
  371. # cast to PCell is correct here:
  372. var c = cast[PCell](x)
  373. if c.refcount == rcBlack: c.refcount = rcWhite
  374. else: freeCyclicCell(gch, c)
  375. proc markGlobals(gch: var GcHeap) =
  376. if gch.gcThreadId == 0:
  377. when defined(nimTracing):
  378. if gch.tracing:
  379. c_fprintf(stdout, "------- globals marking phase:\n")
  380. for i in 0 .. globalMarkersLen-1: globalMarkers[i]()
  381. when defined(nimTracing):
  382. if gch.tracing:
  383. c_fprintf(stdout, "------- thread locals marking phase:\n")
  384. for i in 0 .. threadLocalMarkersLen-1: threadLocalMarkers[i]()
  385. when defined(nimTracing):
  386. if gch.tracing:
  387. c_fprintf(stdout, "------- additional roots marking phase:\n")
  388. let d = gch.additionalRoots.d
  389. for i in 0 .. gch.additionalRoots.len-1: mark(gch, d[i])
  390. proc gcMark(gch: var GcHeap, p: pointer) {.inline.} =
  391. # the addresses are not as cells on the stack, so turn them to cells:
  392. var c = cast[int](p)
  393. if c >% PageSize:
  394. # fast check: does it look like a cell?
  395. var objStart = cast[PCell](interiorAllocatedPtr(gch.region, p))
  396. if objStart != nil:
  397. mark(gch, objStart)
  398. proc markStackAndRegisters(gch: var GcHeap) {.noinline, cdecl.} =
  399. forEachStackSlot(gch, gcMark)
  400. proc collectCTBody(gch: var GcHeap) =
  401. when not nimCoroutines:
  402. gch.stat.maxStackSize = max(gch.stat.maxStackSize, stackSize())
  403. when defined(nimTracing):
  404. if gch.tracing:
  405. c_fprintf(stdout, "------- stack marking phase:\n")
  406. prepareForInteriorPointerChecking(gch.region)
  407. markStackAndRegisters(gch)
  408. markGlobals(gch)
  409. sweep(gch)
  410. inc(gch.stat.collections)
  411. when withBitvectors:
  412. deinit(gch.marked)
  413. init(gch.marked)
  414. gch.cycleThreshold = max(InitialThreshold, getOccupiedMem().mulThreshold)
  415. gch.stat.maxThreshold = max(gch.stat.maxThreshold, gch.cycleThreshold)
  416. sysAssert(allocInv(gch.region), "collectCT: end")
  417. proc collectCT(gch: var GcHeap; size: int) =
  418. let fmem = getFreeMem(gch.region)
  419. if (getOccupiedMem(gch.region) >= gch.cycleThreshold or
  420. size > fmem and fmem > InitialThreshold) and gch.recGcLock == 0:
  421. collectCTBody(gch)
  422. when not defined(useNimRtl):
  423. proc GC_disable() =
  424. inc(gch.recGcLock)
  425. proc GC_enable() =
  426. when defined(nimDoesntTrackDefects):
  427. if gch.recGcLock <= 0:
  428. raise newException(AssertionDefect,
  429. "API usage error: GC_enable called but GC is already enabled")
  430. dec(gch.recGcLock)
  431. proc GC_setStrategy(strategy: GC_Strategy) = discard
  432. proc GC_enableMarkAndSweep() =
  433. gch.cycleThreshold = InitialThreshold
  434. proc GC_disableMarkAndSweep() =
  435. gch.cycleThreshold = high(typeof(gch.cycleThreshold))-1
  436. # set to the max value to suppress the cycle detector
  437. when defined(nimTracing):
  438. proc GC_logTrace*() =
  439. gch.tracing = true
  440. proc GC_fullCollect() =
  441. let oldThreshold = gch.cycleThreshold
  442. gch.cycleThreshold = 0 # forces cycle collection
  443. collectCT(gch, 0)
  444. gch.cycleThreshold = oldThreshold
  445. proc GC_getStatistics(): string =
  446. result = "[GC] total memory: " & $getTotalMem() & "\n" &
  447. "[GC] occupied memory: " & $getOccupiedMem() & "\n" &
  448. "[GC] collections: " & $gch.stat.collections & "\n" &
  449. "[GC] max threshold: " & $gch.stat.maxThreshold & "\n" &
  450. "[GC] freed objects: " & $gch.stat.freedObjects & "\n"
  451. when nimCoroutines:
  452. result.add "[GC] number of stacks: " & $gch.stack.len & "\n"
  453. for stack in items(gch.stack):
  454. result.add "[GC] stack " & stack.bottom.repr & "[GC] max stack size " & $stack.maxStackSize & "\n"
  455. else:
  456. result.add "[GC] max stack size: " & $gch.stat.maxStackSize & "\n"
  457. {.pop.}