assign.nim 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297
  1. #
  2. #
  3. # Nim's Runtime Library
  4. # (c) Copyright 2012 Andreas Rumpf
  5. #
  6. # See the file "copying.txt", included in this
  7. # distribution, for details about the copyright.
  8. #
  9. include seqs_v2_reimpl
  10. proc genericResetAux(dest: pointer, n: ptr TNimNode) {.benign.}
  11. proc genericAssignAux(dest, src: pointer, mt: PNimType, shallow: bool) {.benign.}
  12. proc genericAssignAux(dest, src: pointer, n: ptr TNimNode,
  13. shallow: bool) {.benign.} =
  14. var
  15. d = cast[ByteAddress](dest)
  16. s = cast[ByteAddress](src)
  17. case n.kind
  18. of nkSlot:
  19. genericAssignAux(cast[pointer](d +% n.offset),
  20. cast[pointer](s +% n.offset), n.typ, shallow)
  21. of nkList:
  22. for i in 0..n.len-1:
  23. genericAssignAux(dest, src, n.sons[i], shallow)
  24. of nkCase:
  25. var dd = selectBranch(dest, n)
  26. var m = selectBranch(src, n)
  27. # reset if different branches are in use; note different branches also
  28. # imply that's not self-assignment (``x = x``)!
  29. if m != dd and dd != nil:
  30. genericResetAux(dest, dd)
  31. copyMem(cast[pointer](d +% n.offset), cast[pointer](s +% n.offset),
  32. n.typ.size)
  33. if m != nil:
  34. genericAssignAux(dest, src, m, shallow)
  35. of nkNone: sysAssert(false, "genericAssignAux")
  36. #else:
  37. # echo "ugh memory corruption! ", n.kind
  38. # quit 1
  39. template deepSeqAssignImpl(operation, additionalArg) {.dirty.} =
  40. var d = cast[ptr NimSeqV2Reimpl](dest)
  41. var s = cast[ptr NimSeqV2Reimpl](src)
  42. d.len = s.len
  43. let elem = mt.base
  44. d.p = cast[ptr NimSeqPayloadReimpl](newSeqPayload(s.len, elem.size, elem.align))
  45. let bs = elem.size
  46. let ba = elem.align
  47. let headerSize = align(sizeof(NimSeqPayloadBase), ba)
  48. for i in 0..d.len-1:
  49. operation(d.p +! (headerSize+i*bs), s.p +! (headerSize+i*bs), mt.base, additionalArg)
  50. proc genericAssignAux(dest, src: pointer, mt: PNimType, shallow: bool) =
  51. var
  52. d = cast[ByteAddress](dest)
  53. s = cast[ByteAddress](src)
  54. sysAssert(mt != nil, "genericAssignAux 2")
  55. case mt.kind
  56. of tyString:
  57. when defined(nimSeqsV2):
  58. var x = cast[ptr NimStringV2](dest)
  59. var s2 = cast[ptr NimStringV2](s)[]
  60. nimAsgnStrV2(x[], s2)
  61. else:
  62. var x = cast[PPointer](dest)
  63. var s2 = cast[PPointer](s)[]
  64. if s2 == nil or shallow or (
  65. cast[PGenericSeq](s2).reserved and seqShallowFlag) != 0:
  66. unsureAsgnRef(x, s2)
  67. else:
  68. unsureAsgnRef(x, copyString(cast[NimString](s2)))
  69. of tySequence:
  70. when defined(nimSeqsV2):
  71. deepSeqAssignImpl(genericAssignAux, shallow)
  72. else:
  73. var s2 = cast[PPointer](src)[]
  74. var seq = cast[PGenericSeq](s2)
  75. var x = cast[PPointer](dest)
  76. if s2 == nil or shallow or (seq.reserved and seqShallowFlag) != 0:
  77. # this can happen! nil sequences are allowed
  78. unsureAsgnRef(x, s2)
  79. return
  80. sysAssert(dest != nil, "genericAssignAux 3")
  81. if ntfNoRefs in mt.base.flags:
  82. var ss = nimNewSeqOfCap(mt, seq.len)
  83. cast[PGenericSeq](ss).len = seq.len
  84. unsureAsgnRef(x, ss)
  85. var dst = cast[ByteAddress](cast[PPointer](dest)[])
  86. copyMem(cast[pointer](dst +% align(GenericSeqSize, mt.base.align)),
  87. cast[pointer](cast[ByteAddress](s2) +% align(GenericSeqSize, mt.base.align)),
  88. seq.len *% mt.base.size)
  89. else:
  90. unsureAsgnRef(x, newSeq(mt, seq.len))
  91. var dst = cast[ByteAddress](cast[PPointer](dest)[])
  92. for i in 0..seq.len-1:
  93. genericAssignAux(
  94. cast[pointer](dst +% align(GenericSeqSize, mt.base.align) +% i *% mt.base.size ),
  95. cast[pointer](cast[ByteAddress](s2) +% align(GenericSeqSize, mt.base.align) +% i *% mt.base.size ),
  96. mt.base, shallow)
  97. of tyObject:
  98. var it = mt.base
  99. # don't use recursion here on the PNimType because the subtype
  100. # check should only be done at the very end:
  101. while it != nil:
  102. genericAssignAux(dest, src, it.node, shallow)
  103. it = it.base
  104. genericAssignAux(dest, src, mt.node, shallow)
  105. # we need to copy m_type field for tyObject, as it could be empty for
  106. # sequence reallocations:
  107. when defined(nimSeqsV2):
  108. var pint = cast[ptr PNimTypeV2](dest)
  109. #chckObjAsgn(cast[ptr PNimTypeV2](src)[].typeInfoV2, mt)
  110. pint[] = cast[PNimTypeV2](mt.typeInfoV2)
  111. else:
  112. var pint = cast[ptr PNimType](dest)
  113. # We need to copy the *static* type not the dynamic type:
  114. # if p of TB:
  115. # var tbObj = TB(p)
  116. # tbObj of TC # needs to be false!
  117. #c_fprintf(stdout, "%s %s\n", pint[].name, mt.name)
  118. let srcType = cast[ptr PNimType](src)[]
  119. if srcType != nil:
  120. # `!= nil` needed because of cases where object is not initialized properly (see bug #16706)
  121. # note that you can have `srcType == nil` yet `src != nil`
  122. chckObjAsgn(srcType, mt)
  123. pint[] = mt # cast[ptr PNimType](src)[]
  124. of tyTuple:
  125. genericAssignAux(dest, src, mt.node, shallow)
  126. of tyArray, tyArrayConstr:
  127. for i in 0..(mt.size div mt.base.size)-1:
  128. genericAssignAux(cast[pointer](d +% i *% mt.base.size),
  129. cast[pointer](s +% i *% mt.base.size), mt.base, shallow)
  130. of tyRef:
  131. unsureAsgnRef(cast[PPointer](dest), cast[PPointer](s)[])
  132. else:
  133. copyMem(dest, src, mt.size) # copy raw bits
  134. proc genericAssign(dest, src: pointer, mt: PNimType) {.compilerproc.} =
  135. genericAssignAux(dest, src, mt, false)
  136. proc genericShallowAssign(dest, src: pointer, mt: PNimType) {.compilerproc.} =
  137. genericAssignAux(dest, src, mt, true)
  138. when false:
  139. proc debugNimType(t: PNimType) =
  140. if t.isNil:
  141. cprintf("nil!")
  142. return
  143. var k: cstring
  144. case t.kind
  145. of tyBool: k = "bool"
  146. of tyChar: k = "char"
  147. of tyEnum: k = "enum"
  148. of tyArray: k = "array"
  149. of tyObject: k = "object"
  150. of tyTuple: k = "tuple"
  151. of tyRange: k = "range"
  152. of tyPtr: k = "ptr"
  153. of tyRef: k = "ref"
  154. of tyVar: k = "var"
  155. of tySequence: k = "seq"
  156. of tyProc: k = "proc"
  157. of tyPointer: k = "range"
  158. of tyOpenArray: k = "openarray"
  159. of tyString: k = "string"
  160. of tyCstring: k = "cstring"
  161. of tyInt: k = "int"
  162. of tyInt32: k = "int32"
  163. else: k = "other"
  164. cprintf("%s %ld\n", k, t.size)
  165. debugNimType(t.base)
  166. proc genericSeqAssign(dest, src: pointer, mt: PNimType) {.compilerproc.} =
  167. var src = src # ugly, but I like to stress the parser sometimes :-)
  168. genericAssign(dest, addr(src), mt)
  169. proc genericAssignOpenArray(dest, src: pointer, len: int,
  170. mt: PNimType) {.compilerproc.} =
  171. var
  172. d = cast[ByteAddress](dest)
  173. s = cast[ByteAddress](src)
  174. for i in 0..len-1:
  175. genericAssign(cast[pointer](d +% i *% mt.base.size),
  176. cast[pointer](s +% i *% mt.base.size), mt.base)
  177. proc objectInit(dest: pointer, typ: PNimType) {.compilerproc, benign.}
  178. proc objectInitAux(dest: pointer, n: ptr TNimNode) {.benign.} =
  179. var d = cast[ByteAddress](dest)
  180. case n.kind
  181. of nkNone: sysAssert(false, "objectInitAux")
  182. of nkSlot: objectInit(cast[pointer](d +% n.offset), n.typ)
  183. of nkList:
  184. for i in 0..n.len-1:
  185. objectInitAux(dest, n.sons[i])
  186. of nkCase:
  187. var m = selectBranch(dest, n)
  188. if m != nil: objectInitAux(dest, m)
  189. proc objectInit(dest: pointer, typ: PNimType) =
  190. # the generic init proc that takes care of initialization of complex
  191. # objects on the stack or heap
  192. var d = cast[ByteAddress](dest)
  193. case typ.kind
  194. of tyObject:
  195. # iterate over any structural type
  196. # here we have to init the type field:
  197. when defined(nimSeqsV2):
  198. var pint = cast[ptr PNimTypeV2](dest)
  199. pint[] = cast[PNimTypeV2](typ.typeInfoV2)
  200. else:
  201. var pint = cast[ptr PNimType](dest)
  202. pint[] = typ
  203. objectInitAux(dest, typ.node)
  204. of tyTuple:
  205. objectInitAux(dest, typ.node)
  206. of tyArray, tyArrayConstr:
  207. for i in 0..(typ.size div typ.base.size)-1:
  208. objectInit(cast[pointer](d +% i * typ.base.size), typ.base)
  209. else: discard # nothing to do
  210. # ---------------------- assign zero -----------------------------------------
  211. proc genericReset(dest: pointer, mt: PNimType) {.compilerproc, benign.}
  212. proc genericResetAux(dest: pointer, n: ptr TNimNode) =
  213. var d = cast[ByteAddress](dest)
  214. case n.kind
  215. of nkNone: sysAssert(false, "genericResetAux")
  216. of nkSlot: genericReset(cast[pointer](d +% n.offset), n.typ)
  217. of nkList:
  218. for i in 0..n.len-1: genericResetAux(dest, n.sons[i])
  219. of nkCase:
  220. var m = selectBranch(dest, n)
  221. if m != nil: genericResetAux(dest, m)
  222. zeroMem(cast[pointer](d +% n.offset), n.typ.size)
  223. proc genericReset(dest: pointer, mt: PNimType) =
  224. var d = cast[ByteAddress](dest)
  225. sysAssert(mt != nil, "genericReset 2")
  226. case mt.kind
  227. of tyRef:
  228. unsureAsgnRef(cast[PPointer](dest), nil)
  229. of tyString:
  230. when defined(nimSeqsV2):
  231. var s = cast[ptr NimStringV2](dest)
  232. frees(s[])
  233. zeroMem(dest, mt.size)
  234. else:
  235. unsureAsgnRef(cast[PPointer](dest), nil)
  236. of tySequence:
  237. when defined(nimSeqsV2):
  238. frees(cast[ptr NimSeqV2Reimpl](dest)[])
  239. zeroMem(dest, mt.size)
  240. else:
  241. unsureAsgnRef(cast[PPointer](dest), nil)
  242. of tyTuple:
  243. genericResetAux(dest, mt.node)
  244. of tyObject:
  245. genericResetAux(dest, mt.node)
  246. # also reset the type field for tyObject, for correct branch switching!
  247. when defined(nimSeqsV2):
  248. var pint = cast[ptr PNimTypeV2](dest)
  249. pint[] = nil
  250. else:
  251. var pint = cast[ptr PNimType](dest)
  252. pint[] = nil
  253. of tyArray, tyArrayConstr:
  254. for i in 0..(mt.size div mt.base.size)-1:
  255. genericReset(cast[pointer](d +% i *% mt.base.size), mt.base)
  256. else:
  257. zeroMem(dest, mt.size) # set raw bits to zero
  258. proc selectBranch(discVal, L: int,
  259. a: ptr array[0x7fff, ptr TNimNode]): ptr TNimNode =
  260. result = a[L] # a[L] contains the ``else`` part (but may be nil)
  261. if discVal <% L:
  262. let x = a[discVal]
  263. if x != nil: result = x
  264. proc FieldDiscriminantCheck(oldDiscVal, newDiscVal: int,
  265. a: ptr array[0x7fff, ptr TNimNode],
  266. L: int) {.compilerproc.} =
  267. let oldBranch = selectBranch(oldDiscVal, L, a)
  268. let newBranch = selectBranch(newDiscVal, L, a)
  269. when defined(nimOldCaseObjects):
  270. if newBranch != oldBranch and oldDiscVal != 0:
  271. sysFatal(FieldDefect, "assignment to discriminant changes object branch")
  272. else:
  273. if newBranch != oldBranch:
  274. if oldDiscVal != 0:
  275. sysFatal(FieldDefect, "assignment to discriminant changes object branch")
  276. else:
  277. sysFatal(FieldDefect, "assignment to discriminant changes object branch; compile with -d:nimOldCaseObjects for a transition period")