vm-engine.c 130 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204
  1. /* Copyright (C) 2001, 2009-2015, 2018, 2019
  2. * Free Software Foundation, Inc.
  3. *
  4. * This library is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public License
  6. * as published by the Free Software Foundation; either version 3 of
  7. * the License, or (at your option) any later version.
  8. *
  9. * This library is distributed in the hope that it will be useful, but
  10. * WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with this library; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
  17. * 02110-1301 USA
  18. */
  19. /* This file is included in vm.c multiple times. */
  20. #define UNPACK_8_8_8(op,a,b,c) \
  21. do \
  22. { \
  23. a = (op >> 8) & 0xff; \
  24. b = (op >> 16) & 0xff; \
  25. c = op >> 24; \
  26. } \
  27. while (0)
  28. #define UNPACK_8_16(op,a,b) \
  29. do \
  30. { \
  31. a = (op >> 8) & 0xff; \
  32. b = op >> 16; \
  33. } \
  34. while (0)
  35. #define UNPACK_16_8(op,a,b) \
  36. do \
  37. { \
  38. a = (op >> 8) & 0xffff; \
  39. b = op >> 24; \
  40. } \
  41. while (0)
  42. #define UNPACK_12_12(op,a,b) \
  43. do \
  44. { \
  45. a = (op >> 8) & 0xfff; \
  46. b = op >> 20; \
  47. } \
  48. while (0)
  49. #define UNPACK_24(op,a) \
  50. do \
  51. { \
  52. a = op >> 8; \
  53. } \
  54. while (0)
  55. /* Assign some registers by hand. There used to be a bigger list here,
  56. but it was never tested, and in the case of x86-32, was a source of
  57. compilation failures. It can be revived if it's useful, but my naive
  58. hope is that simply annotating the locals with "register" will be a
  59. sufficient hint to the compiler. */
  60. #ifdef __GNUC__
  61. # if defined __x86_64__
  62. /* GCC 4.6 chooses %rbp for IP_REG and %rbx for SP_REG, which works
  63. well. Tell it to keep the jump table in a r12, which is
  64. callee-saved. */
  65. # define JT_REG asm ("r12")
  66. # endif
  67. #endif
  68. #ifndef IP_REG
  69. # define IP_REG
  70. #endif
  71. #ifndef FP_REG
  72. # define FP_REG
  73. #endif
  74. #ifndef JT_REG
  75. # define JT_REG
  76. #endif
  77. #define VM_ASSERT(condition, handler) \
  78. do { \
  79. if (SCM_UNLIKELY (!(condition))) \
  80. { \
  81. SYNC_IP(); \
  82. handler; \
  83. } \
  84. } while (0)
  85. #ifdef VM_ENABLE_ASSERTIONS
  86. # define ASSERT(condition) VM_ASSERT (condition, abort())
  87. #else
  88. # define ASSERT(condition)
  89. #endif
  90. #if VM_USE_HOOKS
  91. #define RUN_HOOK(exp) \
  92. do { \
  93. if (SCM_UNLIKELY (vp->trace_level > 0)) \
  94. { \
  95. SYNC_IP (); \
  96. exp; \
  97. CACHE_SP (); \
  98. } \
  99. } while (0)
  100. #else
  101. #define RUN_HOOK(exp)
  102. #endif
  103. #define RUN_HOOK0(h) RUN_HOOK (vm_dispatch_##h##_hook (vp))
  104. #define RUN_HOOK1(h, arg) RUN_HOOK (vm_dispatch_##h##_hook (vp, arg))
  105. #define APPLY_HOOK() \
  106. RUN_HOOK0 (apply)
  107. #define PUSH_CONTINUATION_HOOK() \
  108. RUN_HOOK0 (push_continuation)
  109. #define POP_CONTINUATION_HOOK(old_fp) \
  110. RUN_HOOK1 (pop_continuation, old_fp)
  111. #define NEXT_HOOK() \
  112. RUN_HOOK0 (next)
  113. #define ABORT_CONTINUATION_HOOK() \
  114. RUN_HOOK0 (abort)
  115. /* Virtual Machine
  116. The VM has three state bits: the instruction pointer (IP), the frame
  117. pointer (FP), and the stack pointer (SP). We cache the IP in a
  118. machine register, local to the VM, because it is used extensively by
  119. the VM. We do the same for SP. The FP is used more by code outside
  120. the VM than by the VM itself, we don't bother caching it locally.
  121. Keeping vp->ip in sync with the local IP would be a big lose, as it
  122. is updated so often. Instead of updating vp->ip all the time, we
  123. call SYNC_IP whenever we would need to know the IP of the top frame.
  124. In practice, we need to SYNC_IP whenever we call out of the VM to a
  125. function that would like to walk the stack, perhaps as the result of
  126. an exception. On the other hand, we do always keep vp->sp in sync
  127. with the local SP.
  128. One more thing. We allow the stack to move, when it expands.
  129. Therefore if you call out to a C procedure that could call Scheme
  130. code, or otherwise push anything on the stack, you will need to
  131. CACHE_SP afterwards to restore the possibly-changed stack pointer. */
  132. #define SYNC_IP() vp->ip = (ip)
  133. #define CACHE_SP() sp = vp->sp
  134. #define CACHE_REGISTER() \
  135. do { \
  136. ip = vp->ip; \
  137. CACHE_SP (); \
  138. } while (0)
  139. /* Reserve stack space for a frame. Will check that there is sufficient
  140. stack space for N locals, including the procedure. Invoke after
  141. preparing the new frame and setting the fp and ip.
  142. If there is not enough space for this frame, we try to expand the
  143. stack, possibly relocating it somewhere else in the address space.
  144. Because of the possible relocation, no pointer into the stack besides
  145. FP is valid across an ALLOC_FRAME call. Be careful! */
  146. #define ALLOC_FRAME(n) \
  147. do { \
  148. sp = vp->fp - (n); \
  149. if (sp < vp->sp_min_since_gc) \
  150. { \
  151. if (SCM_UNLIKELY (sp < vp->stack_limit)) \
  152. { \
  153. SYNC_IP (); \
  154. vm_expand_stack (vp, sp); \
  155. CACHE_SP (); \
  156. } \
  157. else \
  158. vp->sp_min_since_gc = vp->sp = sp; \
  159. } \
  160. else \
  161. vp->sp = sp; \
  162. } while (0)
  163. /* Reset the current frame to hold N locals. Used when we know that no
  164. stack expansion is needed. */
  165. #define RESET_FRAME(n) \
  166. do { \
  167. vp->sp = sp = vp->fp - (n); \
  168. if (sp < vp->sp_min_since_gc) \
  169. vp->sp_min_since_gc = sp; \
  170. } while (0)
  171. /* Compute the number of locals in the frame. At a call, this is equal
  172. to the number of actual arguments when a function is first called,
  173. plus one for the function. */
  174. #define FRAME_LOCALS_COUNT() (vp->fp - sp)
  175. #define FRAME_LOCALS_COUNT_FROM(slot) (FRAME_LOCALS_COUNT () - slot)
  176. /* Restore registers after returning from a frame. */
  177. #define RESTORE_FRAME() \
  178. do { \
  179. } while (0)
  180. #ifdef HAVE_LABELS_AS_VALUES
  181. # define BEGIN_DISPATCH_SWITCH /* */
  182. # define END_DISPATCH_SWITCH /* */
  183. # define NEXT(n) \
  184. do \
  185. { \
  186. ip += n; \
  187. NEXT_HOOK (); \
  188. op = *ip; \
  189. goto *jump_table[op & 0xff]; \
  190. } \
  191. while (0)
  192. # define VM_DEFINE_OP(opcode, tag, name, meta) \
  193. op_##tag:
  194. #else
  195. # define BEGIN_DISPATCH_SWITCH \
  196. vm_start: \
  197. NEXT_HOOK (); \
  198. op = *ip; \
  199. switch (op & 0xff) \
  200. {
  201. # define END_DISPATCH_SWITCH \
  202. }
  203. # define NEXT(n) \
  204. do \
  205. { \
  206. ip += n; \
  207. goto vm_start; \
  208. } \
  209. while (0)
  210. # define VM_DEFINE_OP(opcode, tag, name, meta) \
  211. op_##tag: \
  212. case opcode:
  213. #endif
  214. #define FP_SLOT(i) SCM_FRAME_SLOT (vp->fp, i)
  215. #define FP_REF(i) SCM_FRAME_LOCAL (vp->fp, i)
  216. #define FP_SET(i,o) SCM_FRAME_LOCAL (vp->fp, i) = o
  217. #define SP_REF_SLOT(i) (sp[i])
  218. #define SP_SET_SLOT(i,o) (sp[i] = o)
  219. #define SP_REF(i) (sp[i].as_scm)
  220. #define SP_SET(i,o) (sp[i].as_scm = o)
  221. #define SP_REF_F64(i) (sp[i].as_f64)
  222. #define SP_SET_F64(i,o) (sp[i].as_f64 = o)
  223. #define SP_REF_U64(i) (sp[i].as_u64)
  224. #define SP_SET_U64(i,o) (sp[i].as_u64 = o)
  225. #define SP_REF_S64(i) (sp[i].as_s64)
  226. #define SP_SET_S64(i,o) (sp[i].as_s64 = o)
  227. #define VARIABLE_REF(v) SCM_VARIABLE_REF (v)
  228. #define VARIABLE_SET(v,o) SCM_VARIABLE_SET (v, o)
  229. #define VARIABLE_BOUNDP(v) (!scm_is_eq (VARIABLE_REF (v), SCM_UNDEFINED))
  230. #define BR_NARGS(rel) \
  231. scm_t_uint32 expected; \
  232. UNPACK_24 (op, expected); \
  233. if (FRAME_LOCALS_COUNT() rel expected) \
  234. { \
  235. scm_t_int32 offset = ip[1]; \
  236. offset >>= 8; /* Sign-extending shift. */ \
  237. NEXT (offset); \
  238. } \
  239. NEXT (2)
  240. #define BR_UNARY(x, exp) \
  241. scm_t_uint32 test; \
  242. SCM x; \
  243. UNPACK_24 (op, test); \
  244. x = SP_REF (test); \
  245. if ((ip[1] & 0x1) ? !(exp) : (exp)) \
  246. { \
  247. scm_t_int32 offset = ip[1]; \
  248. offset >>= 8; /* Sign-extending shift. */ \
  249. NEXT (offset); \
  250. } \
  251. NEXT (2)
  252. #define BR_BINARY(x, y, exp) \
  253. scm_t_uint32 a, b; \
  254. SCM x, y; \
  255. UNPACK_24 (op, a); \
  256. UNPACK_24 (ip[1], b); \
  257. x = SP_REF (a); \
  258. y = SP_REF (b); \
  259. if ((ip[2] & 0x1) ? !(exp) : (exp)) \
  260. { \
  261. scm_t_int32 offset = ip[2]; \
  262. offset >>= 8; /* Sign-extending shift. */ \
  263. NEXT (offset); \
  264. } \
  265. NEXT (3)
  266. #define BR_ARITHMETIC(crel,srel) \
  267. { \
  268. scm_t_uint32 a, b; \
  269. SCM x, y; \
  270. UNPACK_24 (op, a); \
  271. UNPACK_24 (ip[1], b); \
  272. x = SP_REF (a); \
  273. y = SP_REF (b); \
  274. if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
  275. { \
  276. scm_t_signed_bits x_bits = SCM_UNPACK (x); \
  277. scm_t_signed_bits y_bits = SCM_UNPACK (y); \
  278. if ((ip[2] & 0x1) ? !(x_bits crel y_bits) : (x_bits crel y_bits)) \
  279. { \
  280. scm_t_int32 offset = ip[2]; \
  281. offset >>= 8; /* Sign-extending shift. */ \
  282. NEXT (offset); \
  283. } \
  284. NEXT (3); \
  285. } \
  286. else \
  287. { \
  288. SCM res; \
  289. SYNC_IP (); \
  290. res = srel (x, y); \
  291. CACHE_SP (); \
  292. if ((ip[2] & 0x1) ? scm_is_false (res) : scm_is_true (res)) \
  293. { \
  294. scm_t_int32 offset = ip[2]; \
  295. offset >>= 8; /* Sign-extending shift. */ \
  296. NEXT (offset); \
  297. } \
  298. NEXT (3); \
  299. } \
  300. }
  301. #define BR_U64_ARITHMETIC(crel) \
  302. { \
  303. scm_t_uint32 a, b; \
  304. scm_t_uint64 x, y; \
  305. UNPACK_24 (op, a); \
  306. UNPACK_24 (ip[1], b); \
  307. x = SP_REF_U64 (a); \
  308. y = SP_REF_U64 (b); \
  309. if ((ip[2] & 0x1) ? !(x crel y) : (x crel y)) \
  310. { \
  311. scm_t_int32 offset = ip[2]; \
  312. offset >>= 8; /* Sign-extending shift. */ \
  313. NEXT (offset); \
  314. } \
  315. NEXT (3); \
  316. }
  317. #define BR_F64_ARITHMETIC(crel) \
  318. { \
  319. scm_t_uint32 a, b; \
  320. double x, y; \
  321. UNPACK_24 (op, a); \
  322. UNPACK_24 (ip[1], b); \
  323. x = SP_REF_F64 (a); \
  324. y = SP_REF_F64 (b); \
  325. if ((ip[2] & 0x1) ? !(x crel y) : (x crel y)) \
  326. { \
  327. scm_t_int32 offset = ip[2]; \
  328. offset >>= 8; /* Sign-extending shift. */ \
  329. NEXT (offset); \
  330. } \
  331. NEXT (3); \
  332. }
  333. #define ARGS1(a1) \
  334. scm_t_uint16 dst, src; \
  335. SCM a1; \
  336. UNPACK_12_12 (op, dst, src); \
  337. a1 = SP_REF (src)
  338. #define ARGS2(a1, a2) \
  339. scm_t_uint8 dst, src1, src2; \
  340. SCM a1, a2; \
  341. UNPACK_8_8_8 (op, dst, src1, src2); \
  342. a1 = SP_REF (src1); \
  343. a2 = SP_REF (src2)
  344. #define RETURN(x) \
  345. do { SP_SET (dst, x); NEXT (1); } while (0)
  346. #define RETURN_EXP(exp) \
  347. do { SCM __x; SYNC_IP (); __x = exp; CACHE_SP (); RETURN (__x); } while (0)
  348. /* The maximum/minimum tagged integers. */
  349. #define INUM_MAX \
  350. ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
  351. #define INUM_MIN \
  352. ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
  353. #define INUM_STEP \
  354. ((scm_t_signed_bits) SCM_UNPACK (SCM_INUM1) \
  355. - (scm_t_signed_bits) SCM_UNPACK (SCM_INUM0))
  356. #define BINARY_INTEGER_OP(CFUNC,SFUNC) \
  357. { \
  358. ARGS2 (x, y); \
  359. if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
  360. { \
  361. scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y); \
  362. if (SCM_FIXABLE (n)) \
  363. RETURN (SCM_I_MAKINUM (n)); \
  364. } \
  365. RETURN_EXP (SFUNC (x, y)); \
  366. }
  367. #define VM_VALIDATE(x, pred, proc, what) \
  368. VM_ASSERT (pred (x), vm_error_not_a_ ## what (proc, x))
  369. #define VM_VALIDATE_ATOMIC_BOX(x, proc) \
  370. VM_VALIDATE (x, scm_is_atomic_box, proc, atomic_box)
  371. #define VM_VALIDATE_BYTEVECTOR(x, proc) \
  372. VM_VALIDATE (x, SCM_BYTEVECTOR_P, proc, bytevector)
  373. #define VM_VALIDATE_MUTABLE_BYTEVECTOR(obj, proc) \
  374. VM_VALIDATE (obj, SCM_MUTABLE_BYTEVECTOR_P, proc, mutable_bytevector)
  375. #define VM_VALIDATE_CHAR(x, proc) \
  376. VM_VALIDATE (x, SCM_CHARP, proc, char)
  377. #define VM_VALIDATE_PAIR(x, proc) \
  378. VM_VALIDATE (x, scm_is_pair, proc, pair)
  379. #define VM_VALIDATE_MUTABLE_PAIR(x, proc) \
  380. VM_VALIDATE (x, scm_is_mutable_pair, proc, mutable_pair)
  381. #define VM_VALIDATE_STRING(obj, proc) \
  382. VM_VALIDATE (obj, scm_is_string, proc, string)
  383. #define VM_VALIDATE_STRUCT(obj, proc) \
  384. VM_VALIDATE (obj, SCM_STRUCTP, proc, struct)
  385. #define VM_VALIDATE_VARIABLE(obj, proc) \
  386. VM_VALIDATE (obj, SCM_VARIABLEP, proc, variable)
  387. #define VM_VALIDATE_VECTOR(obj, proc) \
  388. VM_VALIDATE (obj, SCM_I_IS_VECTOR, proc, vector)
  389. #define VM_VALIDATE_MUTABLE_VECTOR(obj, proc) \
  390. VM_VALIDATE (obj, SCM_I_IS_MUTABLE_VECTOR, proc, mutable_vector)
  391. #define VM_VALIDATE_INDEX(u64, size, proc) \
  392. VM_ASSERT (u64 < size, vm_error_out_of_range_uint64 (proc, u64))
  393. /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
  394. #define ALIGNED_P(ptr, type) \
  395. ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
  396. static SCM
  397. VM_NAME (scm_i_thread *thread, struct scm_vm *vp,
  398. scm_i_jmp_buf *registers, int resume)
  399. {
  400. /* Instruction pointer: A pointer to the opcode that is currently
  401. running. */
  402. register scm_t_uint32 *ip IP_REG;
  403. /* Stack pointer: A pointer to the hot end of the stack, off of which
  404. we index arguments and local variables. Pushed at function calls,
  405. popped on returns. */
  406. register union scm_vm_stack_element *sp FP_REG;
  407. /* Current opcode: A cache of *ip. */
  408. register scm_t_uint32 op;
  409. #ifdef HAVE_LABELS_AS_VALUES
  410. static const void *jump_table_[256] = {
  411. #define LABEL_ADDR(opcode, tag, name, meta) &&op_##tag,
  412. FOR_EACH_VM_OPERATION(LABEL_ADDR)
  413. #undef LABEL_ADDR
  414. };
  415. register const void **jump_table JT_REG;
  416. /* Attempt to keep JUMP_TABLE_POINTER in a register. This saves one
  417. load instruction at each instruction dispatch. */
  418. jump_table = jump_table_;
  419. #endif
  420. /* Load VM registers. */
  421. CACHE_REGISTER ();
  422. /* Usually a call to the VM happens on application, with the boot
  423. continuation on the next frame. Sometimes it happens after a
  424. non-local exit however; in that case the VM state is all set up,
  425. and we have but to jump to the next opcode. */
  426. if (SCM_UNLIKELY (resume))
  427. NEXT (0);
  428. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  429. ip = SCM_PROGRAM_CODE (FP_REF (0));
  430. else
  431. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  432. APPLY_HOOK ();
  433. NEXT (0);
  434. BEGIN_DISPATCH_SWITCH;
  435. /*
  436. * Call and return
  437. */
  438. /* halt _:24
  439. *
  440. * Bring the VM to a halt, returning all the values from the stack.
  441. */
  442. VM_DEFINE_OP (0, halt, "halt", OP1 (X32))
  443. {
  444. /* Boot closure in r0, empty frame in r1/r2, proc in r3, values from r4. */
  445. scm_t_uint32 nvals = FRAME_LOCALS_COUNT_FROM (4);
  446. SCM ret;
  447. if (nvals == 1)
  448. ret = FP_REF (4);
  449. else
  450. {
  451. scm_t_uint32 n;
  452. ret = SCM_EOL;
  453. SYNC_IP ();
  454. for (n = nvals; n > 0; n--)
  455. ret = scm_inline_cons (thread, FP_REF (4 + n - 1), ret);
  456. ret = scm_values (ret);
  457. }
  458. vp->ip = SCM_FRAME_RETURN_ADDRESS (vp->fp);
  459. vp->sp = SCM_FRAME_PREVIOUS_SP (vp->fp);
  460. vp->fp = SCM_FRAME_DYNAMIC_LINK (vp->fp);
  461. return ret;
  462. }
  463. /* call proc:24 _:8 nlocals:24
  464. *
  465. * Call a procedure. PROC is the local corresponding to a procedure.
  466. * The two values below PROC will be overwritten by the saved call
  467. * frame data. The new frame will have space for NLOCALS locals: one
  468. * for the procedure, and the rest for the arguments which should
  469. * already have been pushed on.
  470. *
  471. * When the call returns, execution proceeds with the next
  472. * instruction. There may be any number of values on the return
  473. * stack; the precise number can be had by subtracting the address of
  474. * PROC from the post-call SP.
  475. */
  476. VM_DEFINE_OP (1, call, "call", OP2 (X8_F24, X8_C24))
  477. {
  478. scm_t_uint32 proc, nlocals;
  479. union scm_vm_stack_element *old_fp, *new_fp;
  480. UNPACK_24 (op, proc);
  481. UNPACK_24 (ip[1], nlocals);
  482. PUSH_CONTINUATION_HOOK ();
  483. old_fp = vp->fp;
  484. new_fp = SCM_FRAME_SLOT (old_fp, proc - 1);
  485. SCM_FRAME_SET_DYNAMIC_LINK (new_fp, old_fp);
  486. SCM_FRAME_SET_RETURN_ADDRESS (new_fp, ip + 2);
  487. vp->fp = new_fp;
  488. RESET_FRAME (nlocals);
  489. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  490. ip = SCM_PROGRAM_CODE (FP_REF (0));
  491. else
  492. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  493. APPLY_HOOK ();
  494. NEXT (0);
  495. }
  496. /* call-label proc:24 _:8 nlocals:24 label:32
  497. *
  498. * Call a procedure in the same compilation unit.
  499. *
  500. * This instruction is just like "call", except that instead of
  501. * dereferencing PROC to find the call target, the call target is
  502. * known to be at LABEL, a signed 32-bit offset in 32-bit units from
  503. * the current IP. Since PROC is not dereferenced, it may be some
  504. * other representation of the closure.
  505. */
  506. VM_DEFINE_OP (2, call_label, "call-label", OP3 (X8_F24, X8_C24, L32))
  507. {
  508. scm_t_uint32 proc, nlocals;
  509. scm_t_int32 label;
  510. union scm_vm_stack_element *old_fp, *new_fp;
  511. UNPACK_24 (op, proc);
  512. UNPACK_24 (ip[1], nlocals);
  513. label = ip[2];
  514. PUSH_CONTINUATION_HOOK ();
  515. old_fp = vp->fp;
  516. new_fp = SCM_FRAME_SLOT (old_fp, proc - 1);
  517. SCM_FRAME_SET_DYNAMIC_LINK (new_fp, old_fp);
  518. SCM_FRAME_SET_RETURN_ADDRESS (new_fp, ip + 3);
  519. vp->fp = new_fp;
  520. RESET_FRAME (nlocals);
  521. ip += label;
  522. APPLY_HOOK ();
  523. NEXT (0);
  524. }
  525. /* tail-call nlocals:24
  526. *
  527. * Tail-call a procedure. Requires that the procedure and all of the
  528. * arguments have already been shuffled into position. Will reset the
  529. * frame to NLOCALS.
  530. */
  531. VM_DEFINE_OP (3, tail_call, "tail-call", OP1 (X8_C24))
  532. {
  533. scm_t_uint32 nlocals;
  534. UNPACK_24 (op, nlocals);
  535. RESET_FRAME (nlocals);
  536. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  537. ip = SCM_PROGRAM_CODE (FP_REF (0));
  538. else
  539. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  540. APPLY_HOOK ();
  541. NEXT (0);
  542. }
  543. /* tail-call-label nlocals:24 label:32
  544. *
  545. * Tail-call a known procedure. As call is to call-label, tail-call
  546. * is to tail-call-label.
  547. */
  548. VM_DEFINE_OP (4, tail_call_label, "tail-call-label", OP2 (X8_C24, L32))
  549. {
  550. scm_t_uint32 nlocals;
  551. scm_t_int32 label;
  552. UNPACK_24 (op, nlocals);
  553. label = ip[1];
  554. RESET_FRAME (nlocals);
  555. ip += label;
  556. APPLY_HOOK ();
  557. NEXT (0);
  558. }
  559. /* tail-call/shuffle from:24
  560. *
  561. * Tail-call a procedure. The procedure should already be set to slot
  562. * 0. The rest of the args are taken from the frame, starting at
  563. * FROM, shuffled down to start at slot 0. This is part of the
  564. * implementation of the call-with-values builtin.
  565. */
  566. VM_DEFINE_OP (5, tail_call_shuffle, "tail-call/shuffle", OP1 (X8_F24))
  567. {
  568. scm_t_uint32 n, from, nlocals;
  569. UNPACK_24 (op, from);
  570. VM_ASSERT (from > 0, abort ());
  571. nlocals = FRAME_LOCALS_COUNT ();
  572. for (n = 0; from + n < nlocals; n++)
  573. FP_SET (n + 1, FP_REF (from + n));
  574. RESET_FRAME (n + 1);
  575. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  576. ip = SCM_PROGRAM_CODE (FP_REF (0));
  577. else
  578. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  579. APPLY_HOOK ();
  580. NEXT (0);
  581. }
  582. /* receive dst:12 proc:12 _:8 nlocals:24
  583. *
  584. * Receive a single return value from a call whose procedure was in
  585. * PROC, asserting that the call actually returned at least one
  586. * value. Afterwards, resets the frame to NLOCALS locals.
  587. */
  588. VM_DEFINE_OP (6, receive, "receive", OP2 (X8_F12_F12, X8_C24) | OP_DST)
  589. {
  590. scm_t_uint16 dst, proc;
  591. scm_t_uint32 nlocals;
  592. UNPACK_12_12 (op, dst, proc);
  593. UNPACK_24 (ip[1], nlocals);
  594. VM_ASSERT (FRAME_LOCALS_COUNT () > proc + 1, vm_error_no_values ());
  595. FP_SET (dst, FP_REF (proc + 1));
  596. RESET_FRAME (nlocals);
  597. NEXT (2);
  598. }
  599. /* receive-values proc:24 allow-extra?:1 _:7 nvalues:24
  600. *
  601. * Receive a return of multiple values from a call whose procedure was
  602. * in PROC. If fewer than NVALUES values were returned, signal an
  603. * error. Unless ALLOW-EXTRA? is true, require that the number of
  604. * return values equals NVALUES exactly. After receive-values has
  605. * run, the values can be copied down via `mov'.
  606. */
  607. VM_DEFINE_OP (7, receive_values, "receive-values", OP2 (X8_F24, B1_X7_C24))
  608. {
  609. scm_t_uint32 proc, nvalues;
  610. UNPACK_24 (op, proc);
  611. UNPACK_24 (ip[1], nvalues);
  612. if (ip[1] & 0x1)
  613. VM_ASSERT (FRAME_LOCALS_COUNT () > proc + nvalues,
  614. vm_error_not_enough_values ());
  615. else
  616. VM_ASSERT (FRAME_LOCALS_COUNT () == proc + 1 + nvalues,
  617. vm_error_wrong_number_of_values (nvalues));
  618. NEXT (2);
  619. }
  620. VM_DEFINE_OP (8, unused_8, NULL, NOP)
  621. {
  622. vm_error_bad_instruction (op);
  623. abort (); /* never reached */
  624. }
  625. /* return-values nlocals:24
  626. *
  627. * Return a number of values from a call frame. This opcode
  628. * corresponds to an application of `values' in tail position. As
  629. * with tail calls, we expect that the values have already been
  630. * shuffled down to a contiguous array starting at slot 1.
  631. * If NLOCALS is not zero, we also reset the frame to hold NLOCALS
  632. * values.
  633. */
  634. VM_DEFINE_OP (9, return_values, "return-values", OP1 (X8_C24))
  635. {
  636. union scm_vm_stack_element *old_fp;
  637. scm_t_uint32 nlocals;
  638. UNPACK_24 (op, nlocals);
  639. if (nlocals)
  640. RESET_FRAME (nlocals);
  641. old_fp = vp->fp;
  642. ip = SCM_FRAME_RETURN_ADDRESS (vp->fp);
  643. vp->fp = SCM_FRAME_DYNAMIC_LINK (vp->fp);
  644. /* Clear stack frame. */
  645. old_fp[0].as_scm = SCM_BOOL_F;
  646. old_fp[1].as_scm = SCM_BOOL_F;
  647. POP_CONTINUATION_HOOK (old_fp);
  648. NEXT (0);
  649. }
  650. /*
  651. * Specialized call stubs
  652. */
  653. /* subr-call _:24
  654. *
  655. * Call a subr, passing all locals in this frame as arguments. Return
  656. * from the calling frame. This instruction is part of the
  657. * trampolines created in gsubr.c, and is not generated by the
  658. * compiler.
  659. */
  660. VM_DEFINE_OP (10, subr_call, "subr-call", OP1 (X32))
  661. {
  662. SCM ret;
  663. SYNC_IP ();
  664. ret = scm_apply_subr (sp, FRAME_LOCALS_COUNT ());
  665. CACHE_SP ();
  666. if (SCM_UNLIKELY (SCM_VALUESP (ret)))
  667. {
  668. SCM vals = scm_struct_ref (ret, SCM_INUM0);
  669. long len = scm_ilength (vals);
  670. ALLOC_FRAME (1 + len);
  671. while (len--)
  672. {
  673. SP_SET (len, SCM_CAR (vals));
  674. vals = SCM_CDR (vals);
  675. }
  676. NEXT (1);
  677. }
  678. else
  679. {
  680. ALLOC_FRAME (2);
  681. SP_SET (0, ret);
  682. NEXT (1);
  683. }
  684. }
  685. /* foreign-call cif-idx:12 ptr-idx:12
  686. *
  687. * Call a foreign function. Fetch the CIF and foreign pointer from
  688. * CIF-IDX and PTR-IDX, both free variables. Return from the calling
  689. * frame. Arguments are taken from the stack. This instruction is
  690. * part of the trampolines created by the FFI, and is not generated by
  691. * the compiler.
  692. */
  693. VM_DEFINE_OP (11, foreign_call, "foreign-call", OP1 (X8_C12_C12))
  694. {
  695. scm_t_uint16 cif_idx, ptr_idx;
  696. int err = 0;
  697. SCM closure, cif, pointer, ret;
  698. UNPACK_12_12 (op, cif_idx, ptr_idx);
  699. closure = FP_REF (0);
  700. cif = SCM_PROGRAM_FREE_VARIABLE_REF (closure, cif_idx);
  701. pointer = SCM_PROGRAM_FREE_VARIABLE_REF (closure, ptr_idx);
  702. SYNC_IP ();
  703. ret = scm_i_foreign_call (cif, pointer, &err, sp);
  704. CACHE_SP ();
  705. ALLOC_FRAME (3);
  706. SP_SET (1, ret);
  707. SP_SET (0, scm_from_int (err));
  708. NEXT (1);
  709. }
  710. /* continuation-call contregs:24
  711. *
  712. * Return to a continuation, nonlocally. The arguments to the
  713. * continuation are taken from the stack. CONTREGS is a free variable
  714. * containing the reified continuation. This instruction is part of
  715. * the implementation of undelimited continuations, and is not
  716. * generated by the compiler.
  717. */
  718. VM_DEFINE_OP (12, continuation_call, "continuation-call", OP1 (X8_C24))
  719. {
  720. SCM contregs;
  721. scm_t_uint32 contregs_idx;
  722. UNPACK_24 (op, contregs_idx);
  723. contregs =
  724. SCM_PROGRAM_FREE_VARIABLE_REF (FP_REF (0), contregs_idx);
  725. SYNC_IP ();
  726. scm_i_check_continuation (contregs);
  727. vm_return_to_continuation (scm_i_contregs_vp (contregs),
  728. scm_i_contregs_vm_cont (contregs),
  729. FRAME_LOCALS_COUNT_FROM (1),
  730. sp);
  731. scm_i_reinstate_continuation (contregs);
  732. /* no NEXT */
  733. abort ();
  734. }
  735. /* compose-continuation cont:24
  736. *
  737. * Compose a partial continuation with the current continuation. The
  738. * arguments to the continuation are taken from the stack. CONT is a
  739. * free variable containing the reified continuation. This
  740. * instruction is part of the implementation of partial continuations,
  741. * and is not generated by the compiler.
  742. */
  743. VM_DEFINE_OP (13, compose_continuation, "compose-continuation", OP1 (X8_C24))
  744. {
  745. SCM vmcont;
  746. scm_t_uint32 cont_idx;
  747. UNPACK_24 (op, cont_idx);
  748. vmcont = SCM_PROGRAM_FREE_VARIABLE_REF (FP_REF (0), cont_idx);
  749. SYNC_IP ();
  750. VM_ASSERT (SCM_VM_CONT_REWINDABLE_P (vmcont),
  751. vm_error_continuation_not_rewindable (vmcont));
  752. vm_reinstate_partial_continuation (vp, vmcont, FRAME_LOCALS_COUNT_FROM (1),
  753. &thread->dynstack, registers);
  754. CACHE_REGISTER ();
  755. NEXT (0);
  756. }
  757. /* tail-apply _:24
  758. *
  759. * Tail-apply the procedure in local slot 0 to the rest of the
  760. * arguments. This instruction is part of the implementation of
  761. * `apply', and is not generated by the compiler.
  762. */
  763. VM_DEFINE_OP (14, tail_apply, "tail-apply", OP1 (X32))
  764. {
  765. int i, list_idx, list_len, nlocals;
  766. SCM list;
  767. nlocals = FRAME_LOCALS_COUNT ();
  768. // At a minimum, there should be apply, f, and the list.
  769. VM_ASSERT (nlocals >= 3, abort ());
  770. list_idx = nlocals - 1;
  771. list = FP_REF (list_idx);
  772. list_len = scm_ilength (list);
  773. VM_ASSERT (list_len >= 0, vm_error_apply_to_non_list (list));
  774. nlocals = nlocals - 2 + list_len;
  775. ALLOC_FRAME (nlocals);
  776. for (i = 1; i < list_idx; i++)
  777. FP_SET (i - 1, FP_REF (i));
  778. /* Null out these slots, just in case there are less than 2 elements
  779. in the list. */
  780. FP_SET (list_idx - 1, SCM_UNDEFINED);
  781. FP_SET (list_idx, SCM_UNDEFINED);
  782. for (i = 0; i < list_len; i++, list = SCM_CDR (list))
  783. FP_SET (list_idx - 1 + i, SCM_CAR (list));
  784. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  785. ip = SCM_PROGRAM_CODE (FP_REF (0));
  786. else
  787. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  788. APPLY_HOOK ();
  789. NEXT (0);
  790. }
  791. /* call/cc _:24
  792. *
  793. * Capture the current continuation, and tail-apply the procedure in
  794. * local slot 1 to it. This instruction is part of the implementation
  795. * of `call/cc', and is not generated by the compiler.
  796. */
  797. VM_DEFINE_OP (15, call_cc, "call/cc", OP1 (X32))
  798. {
  799. SCM vm_cont, cont;
  800. scm_t_dynstack *dynstack;
  801. int first;
  802. SYNC_IP ();
  803. dynstack = scm_dynstack_capture_all (&thread->dynstack);
  804. vm_cont = scm_i_vm_capture_stack (vp->stack_top,
  805. SCM_FRAME_DYNAMIC_LINK (vp->fp),
  806. SCM_FRAME_PREVIOUS_SP (vp->fp),
  807. SCM_FRAME_RETURN_ADDRESS (vp->fp),
  808. dynstack,
  809. 0);
  810. /* FIXME: Seems silly to capture the registers here, when they are
  811. already captured in the registers local, which here we are
  812. copying out to the heap; and likewise, the setjmp(&registers)
  813. code already has the non-local return handler. But oh
  814. well! */
  815. cont = scm_i_make_continuation (&first, vp, vm_cont);
  816. if (first)
  817. {
  818. RESET_FRAME (2);
  819. SP_SET (1, SP_REF (0));
  820. SP_SET (0, cont);
  821. if (SCM_LIKELY (SCM_PROGRAM_P (SP_REF (1))))
  822. ip = SCM_PROGRAM_CODE (SP_REF (1));
  823. else
  824. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  825. APPLY_HOOK ();
  826. NEXT (0);
  827. }
  828. else
  829. {
  830. CACHE_REGISTER ();
  831. ABORT_CONTINUATION_HOOK ();
  832. NEXT (0);
  833. }
  834. }
  835. /* abort _:24
  836. *
  837. * Abort to a prompt handler. The tag is expected in r1, and the rest
  838. * of the values in the frame are returned to the prompt handler.
  839. * This corresponds to a tail application of abort-to-prompt.
  840. */
  841. VM_DEFINE_OP (16, abort, "abort", OP1 (X32))
  842. {
  843. scm_t_uint32 nlocals = FRAME_LOCALS_COUNT ();
  844. ASSERT (nlocals >= 2);
  845. /* FIXME: Really we should capture the caller's registers. Until
  846. then, manually advance the IP so that when the prompt resumes,
  847. it continues with the next instruction. */
  848. ip++;
  849. SYNC_IP ();
  850. vm_abort (vp, FP_REF (1), nlocals - 2, registers);
  851. /* vm_abort should not return */
  852. abort ();
  853. }
  854. /* builtin-ref dst:12 idx:12
  855. *
  856. * Load a builtin stub by index into DST.
  857. */
  858. VM_DEFINE_OP (17, builtin_ref, "builtin-ref", OP1 (X8_S12_C12) | OP_DST)
  859. {
  860. scm_t_uint16 dst, idx;
  861. UNPACK_12_12 (op, dst, idx);
  862. SP_SET (dst, scm_vm_builtin_ref (idx));
  863. NEXT (1);
  864. }
  865. /*
  866. * Function prologues
  867. */
  868. /* br-if-nargs-ne expected:24 _:8 offset:24
  869. * br-if-nargs-lt expected:24 _:8 offset:24
  870. * br-if-nargs-gt expected:24 _:8 offset:24
  871. *
  872. * If the number of actual arguments is not equal, less than, or greater
  873. * than EXPECTED, respectively, add OFFSET, a signed 24-bit number, to
  874. * the current instruction pointer.
  875. */
  876. VM_DEFINE_OP (18, br_if_nargs_ne, "br-if-nargs-ne", OP2 (X8_C24, X8_L24))
  877. {
  878. BR_NARGS (!=);
  879. }
  880. VM_DEFINE_OP (19, br_if_nargs_lt, "br-if-nargs-lt", OP2 (X8_C24, X8_L24))
  881. {
  882. BR_NARGS (<);
  883. }
  884. VM_DEFINE_OP (20, br_if_nargs_gt, "br-if-nargs-gt", OP2 (X8_C24, X8_L24))
  885. {
  886. BR_NARGS (>);
  887. }
  888. /* assert-nargs-ee expected:24
  889. * assert-nargs-ge expected:24
  890. * assert-nargs-le expected:24
  891. *
  892. * If the number of actual arguments is not ==, >=, or <= EXPECTED,
  893. * respectively, signal an error.
  894. */
  895. VM_DEFINE_OP (21, assert_nargs_ee, "assert-nargs-ee", OP1 (X8_C24))
  896. {
  897. scm_t_uint32 expected;
  898. UNPACK_24 (op, expected);
  899. VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
  900. vm_error_wrong_num_args (FP_REF (0)));
  901. NEXT (1);
  902. }
  903. VM_DEFINE_OP (22, assert_nargs_ge, "assert-nargs-ge", OP1 (X8_C24))
  904. {
  905. scm_t_uint32 expected;
  906. UNPACK_24 (op, expected);
  907. VM_ASSERT (FRAME_LOCALS_COUNT () >= expected,
  908. vm_error_wrong_num_args (FP_REF (0)));
  909. NEXT (1);
  910. }
  911. VM_DEFINE_OP (23, assert_nargs_le, "assert-nargs-le", OP1 (X8_C24))
  912. {
  913. scm_t_uint32 expected;
  914. UNPACK_24 (op, expected);
  915. VM_ASSERT (FRAME_LOCALS_COUNT () <= expected,
  916. vm_error_wrong_num_args (FP_REF (0)));
  917. NEXT (1);
  918. }
  919. /* alloc-frame nlocals:24
  920. *
  921. * Ensure that there is space on the stack for NLOCALS local variables,
  922. * setting them all to SCM_UNDEFINED, except those nargs values that
  923. * were passed as arguments and procedure.
  924. */
  925. VM_DEFINE_OP (24, alloc_frame, "alloc-frame", OP1 (X8_C24))
  926. {
  927. scm_t_uint32 nlocals, nargs;
  928. UNPACK_24 (op, nlocals);
  929. nargs = FRAME_LOCALS_COUNT ();
  930. ALLOC_FRAME (nlocals);
  931. while (nlocals-- > nargs)
  932. FP_SET (nlocals, SCM_UNDEFINED);
  933. NEXT (1);
  934. }
  935. /* reset-frame nlocals:24
  936. *
  937. * Like alloc-frame, but doesn't check that the stack is big enough.
  938. * Used to reset the frame size to something less than the size that
  939. * was previously set via alloc-frame.
  940. */
  941. VM_DEFINE_OP (25, reset_frame, "reset-frame", OP1 (X8_C24))
  942. {
  943. scm_t_uint32 nlocals;
  944. UNPACK_24 (op, nlocals);
  945. RESET_FRAME (nlocals);
  946. NEXT (1);
  947. }
  948. /* push src:24
  949. *
  950. * Push SRC onto the stack.
  951. */
  952. VM_DEFINE_OP (26, push, "push", OP1 (X8_S24))
  953. {
  954. scm_t_uint32 src;
  955. union scm_vm_stack_element val;
  956. /* FIXME: The compiler currently emits "push" for SCM, F64, U64,
  957. and S64 variables. However SCM values are the usual case, and
  958. on a 32-bit machine it might be cheaper to move a SCM than to
  959. move a 64-bit number. */
  960. UNPACK_24 (op, src);
  961. val = SP_REF_SLOT (src);
  962. ALLOC_FRAME (FRAME_LOCALS_COUNT () + 1);
  963. SP_SET_SLOT (0, val);
  964. NEXT (1);
  965. }
  966. /* pop dst:24
  967. *
  968. * Pop the stack, storing to DST.
  969. */
  970. VM_DEFINE_OP (27, pop, "pop", OP1 (X8_S24) | OP_DST)
  971. {
  972. scm_t_uint32 dst;
  973. union scm_vm_stack_element val;
  974. /* FIXME: The compiler currently emits "pop" for SCM, F64, U64,
  975. and S64 variables. However SCM values are the usual case, and
  976. on a 32-bit machine it might be cheaper to move a SCM than to
  977. move a 64-bit number. */
  978. UNPACK_24 (op, dst);
  979. val = SP_REF_SLOT (0);
  980. vp->sp = sp = sp + 1;
  981. SP_SET_SLOT (dst, val);
  982. NEXT (1);
  983. }
  984. /* drop count:24
  985. *
  986. * Drop some number of values from the stack.
  987. */
  988. VM_DEFINE_OP (28, drop, "drop", OP1 (X8_C24))
  989. {
  990. scm_t_uint32 count;
  991. UNPACK_24 (op, count);
  992. vp->sp = sp = sp + count;
  993. NEXT (1);
  994. }
  995. /* assert-nargs-ee/locals expected:12 nlocals:12
  996. *
  997. * Equivalent to a sequence of assert-nargs-ee and reserve-locals. The
  998. * number of locals reserved is EXPECTED + NLOCALS.
  999. */
  1000. VM_DEFINE_OP (29, assert_nargs_ee_locals, "assert-nargs-ee/locals", OP1 (X8_C12_C12))
  1001. {
  1002. scm_t_uint16 expected, nlocals;
  1003. UNPACK_12_12 (op, expected, nlocals);
  1004. VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
  1005. vm_error_wrong_num_args (FP_REF (0)));
  1006. ALLOC_FRAME (expected + nlocals);
  1007. while (nlocals--)
  1008. SP_SET (nlocals, SCM_UNDEFINED);
  1009. NEXT (1);
  1010. }
  1011. /* br-if-npos-gt nreq:24 _:8 npos:24 _:8 offset:24
  1012. *
  1013. * Find the first positional argument after NREQ. If it is greater
  1014. * than NPOS, jump to OFFSET.
  1015. *
  1016. * This instruction is only emitted for functions with multiple
  1017. * clauses, and an earlier clause has keywords and no rest arguments.
  1018. * See "Case-lambda" in the manual, for more on how case-lambda
  1019. * chooses the clause to apply.
  1020. */
  1021. VM_DEFINE_OP (30, br_if_npos_gt, "br-if-npos-gt", OP3 (X8_C24, X8_C24, X8_L24))
  1022. {
  1023. scm_t_uint32 nreq, npos;
  1024. UNPACK_24 (op, nreq);
  1025. UNPACK_24 (ip[1], npos);
  1026. /* We can only have too many positionals if there are more
  1027. arguments than NPOS. */
  1028. if (FRAME_LOCALS_COUNT() > npos)
  1029. {
  1030. scm_t_uint32 n;
  1031. for (n = nreq; n < npos; n++)
  1032. if (scm_is_keyword (FP_REF (n)))
  1033. break;
  1034. if (n == npos && !scm_is_keyword (FP_REF (n)))
  1035. {
  1036. scm_t_int32 offset = ip[2];
  1037. offset >>= 8; /* Sign-extending shift. */
  1038. NEXT (offset);
  1039. }
  1040. }
  1041. NEXT (3);
  1042. }
  1043. /* bind-kwargs nreq:24 flags:8 nreq-and-opt:24 _:8 ntotal:24 kw-offset:32
  1044. *
  1045. * flags := allow-other-keys:1 has-rest:1 _:6
  1046. *
  1047. * Find the last positional argument, and shuffle all the rest above
  1048. * NTOTAL. Initialize the intervening locals to SCM_UNDEFINED. Then
  1049. * load the constant at KW-OFFSET words from the current IP, and use it
  1050. * to bind keyword arguments. If HAS-REST, collect all shuffled
  1051. * arguments into a list, and store it in NREQ-AND-OPT. Finally, clear
  1052. * the arguments that we shuffled up.
  1053. *
  1054. * A macro-mega-instruction.
  1055. */
  1056. VM_DEFINE_OP (31, bind_kwargs, "bind-kwargs", OP4 (X8_C24, C8_C24, X8_C24, N32))
  1057. {
  1058. scm_t_uint32 nreq, nreq_and_opt, ntotal, npositional, nkw, n, nargs;
  1059. scm_t_int32 kw_offset;
  1060. scm_t_bits kw_bits;
  1061. SCM kw;
  1062. char allow_other_keys, has_rest;
  1063. UNPACK_24 (op, nreq);
  1064. allow_other_keys = ip[1] & 0x1;
  1065. has_rest = ip[1] & 0x2;
  1066. UNPACK_24 (ip[1], nreq_and_opt);
  1067. UNPACK_24 (ip[2], ntotal);
  1068. kw_offset = ip[3];
  1069. kw_bits = (scm_t_bits) (ip + kw_offset);
  1070. VM_ASSERT (!(kw_bits & 0x7), abort());
  1071. kw = SCM_PACK (kw_bits);
  1072. nargs = FRAME_LOCALS_COUNT ();
  1073. /* look in optionals for first keyword or last positional */
  1074. /* starting after the last required positional arg */
  1075. npositional = nreq;
  1076. while (/* while we have args */
  1077. npositional < nargs
  1078. /* and we still have positionals to fill */
  1079. && npositional < nreq_and_opt
  1080. /* and we haven't reached a keyword yet */
  1081. && !scm_is_keyword (FP_REF (npositional)))
  1082. /* bind this optional arg (by leaving it in place) */
  1083. npositional++;
  1084. nkw = nargs - npositional;
  1085. /* shuffle non-positional arguments above ntotal */
  1086. ALLOC_FRAME (ntotal + nkw);
  1087. n = nkw;
  1088. while (n--)
  1089. FP_SET (ntotal + n, FP_REF (npositional + n));
  1090. /* and fill optionals & keyword args with SCM_UNDEFINED */
  1091. n = npositional;
  1092. while (n < ntotal)
  1093. FP_SET (n++, SCM_UNDEFINED);
  1094. /* Now bind keywords, in the order given. */
  1095. for (n = 0; n < nkw; n++)
  1096. if (scm_is_keyword (FP_REF (ntotal + n)))
  1097. {
  1098. SCM walk;
  1099. for (walk = kw; scm_is_pair (walk); walk = SCM_CDR (walk))
  1100. if (scm_is_eq (SCM_CAAR (walk), FP_REF (ntotal + n)))
  1101. {
  1102. SCM si = SCM_CDAR (walk);
  1103. if (n + 1 < nkw)
  1104. {
  1105. FP_SET (SCM_I_INUMP (si) ? SCM_I_INUM (si) : scm_to_uint32 (si),
  1106. FP_REF (ntotal + n + 1));
  1107. }
  1108. else
  1109. vm_error_kwargs_missing_value (FP_REF (0),
  1110. FP_REF (ntotal + n));
  1111. break;
  1112. }
  1113. VM_ASSERT (scm_is_pair (walk) || allow_other_keys,
  1114. vm_error_kwargs_unrecognized_keyword (FP_REF (0),
  1115. FP_REF (ntotal + n)));
  1116. n++;
  1117. }
  1118. else
  1119. VM_ASSERT (has_rest, vm_error_kwargs_invalid_keyword (FP_REF (0),
  1120. FP_REF (ntotal + n)));
  1121. if (has_rest)
  1122. {
  1123. SCM rest = SCM_EOL;
  1124. n = nkw;
  1125. SYNC_IP ();
  1126. while (n--)
  1127. rest = scm_inline_cons (thread, FP_REF (ntotal + n), rest);
  1128. FP_SET (nreq_and_opt, rest);
  1129. }
  1130. RESET_FRAME (ntotal);
  1131. NEXT (4);
  1132. }
  1133. /* bind-rest dst:24
  1134. *
  1135. * Collect any arguments at or above DST into a list, and store that
  1136. * list at DST.
  1137. */
  1138. VM_DEFINE_OP (32, bind_rest, "bind-rest", OP1 (X8_F24) | OP_DST)
  1139. {
  1140. scm_t_uint32 dst, nargs;
  1141. SCM rest = SCM_EOL;
  1142. UNPACK_24 (op, dst);
  1143. nargs = FRAME_LOCALS_COUNT ();
  1144. if (nargs <= dst)
  1145. {
  1146. ALLOC_FRAME (dst + 1);
  1147. while (nargs < dst)
  1148. FP_SET (nargs++, SCM_UNDEFINED);
  1149. }
  1150. else
  1151. {
  1152. SYNC_IP ();
  1153. while (nargs-- > dst)
  1154. {
  1155. rest = scm_inline_cons (thread, FP_REF (nargs), rest);
  1156. FP_SET (nargs, SCM_UNDEFINED);
  1157. }
  1158. RESET_FRAME (dst + 1);
  1159. }
  1160. FP_SET (dst, rest);
  1161. NEXT (1);
  1162. }
  1163. /*
  1164. * Branching instructions
  1165. */
  1166. /* br offset:24
  1167. *
  1168. * Add OFFSET, a signed 24-bit number, to the current instruction
  1169. * pointer.
  1170. */
  1171. VM_DEFINE_OP (33, br, "br", OP1 (X8_L24))
  1172. {
  1173. scm_t_int32 offset = op;
  1174. offset >>= 8; /* Sign-extending shift. */
  1175. NEXT (offset);
  1176. }
  1177. /* br-if-true test:24 invert:1 _:7 offset:24
  1178. *
  1179. * If the value in TEST is true for the purposes of Scheme, add
  1180. * OFFSET, a signed 24-bit number, to the current instruction pointer.
  1181. */
  1182. VM_DEFINE_OP (34, br_if_true, "br-if-true", OP2 (X8_S24, B1_X7_L24))
  1183. {
  1184. BR_UNARY (x, scm_is_true (x));
  1185. }
  1186. /* br-if-null test:24 invert:1 _:7 offset:24
  1187. *
  1188. * If the value in TEST is the end-of-list or Lisp nil, add OFFSET, a
  1189. * signed 24-bit number, to the current instruction pointer.
  1190. */
  1191. VM_DEFINE_OP (35, br_if_null, "br-if-null", OP2 (X8_S24, B1_X7_L24))
  1192. {
  1193. BR_UNARY (x, scm_is_null (x));
  1194. }
  1195. /* br-if-nil test:24 invert:1 _:7 offset:24
  1196. *
  1197. * If the value in TEST is false to Lisp, add OFFSET, a signed 24-bit
  1198. * number, to the current instruction pointer.
  1199. */
  1200. VM_DEFINE_OP (36, br_if_nil, "br-if-nil", OP2 (X8_S24, B1_X7_L24))
  1201. {
  1202. BR_UNARY (x, scm_is_lisp_false (x));
  1203. }
  1204. /* br-if-pair test:24 invert:1 _:7 offset:24
  1205. *
  1206. * If the value in TEST is a pair, add OFFSET, a signed 24-bit number,
  1207. * to the current instruction pointer.
  1208. */
  1209. VM_DEFINE_OP (37, br_if_pair, "br-if-pair", OP2 (X8_S24, B1_X7_L24))
  1210. {
  1211. BR_UNARY (x, scm_is_pair (x));
  1212. }
  1213. /* br-if-struct test:24 invert:1 _:7 offset:24
  1214. *
  1215. * If the value in TEST is a struct, add OFFSET, a signed 24-bit
  1216. * number, to the current instruction pointer.
  1217. */
  1218. VM_DEFINE_OP (38, br_if_struct, "br-if-struct", OP2 (X8_S24, B1_X7_L24))
  1219. {
  1220. BR_UNARY (x, SCM_STRUCTP (x));
  1221. }
  1222. /* br-if-char test:24 invert:1 _:7 offset:24
  1223. *
  1224. * If the value in TEST is a char, add OFFSET, a signed 24-bit number,
  1225. * to the current instruction pointer.
  1226. */
  1227. VM_DEFINE_OP (39, br_if_char, "br-if-char", OP2 (X8_S24, B1_X7_L24))
  1228. {
  1229. BR_UNARY (x, SCM_CHARP (x));
  1230. }
  1231. /* br-if-tc7 test:24 invert:1 tc7:7 offset:24
  1232. *
  1233. * If the value in TEST has the TC7 given in the second word, add
  1234. * OFFSET, a signed 24-bit number, to the current instruction pointer.
  1235. */
  1236. VM_DEFINE_OP (40, br_if_tc7, "br-if-tc7", OP2 (X8_S24, B1_C7_L24))
  1237. {
  1238. BR_UNARY (x, SCM_HAS_TYP7 (x, (ip[1] >> 1) & 0x7f));
  1239. }
  1240. /* br-if-eq a:12 b:12 invert:1 _:7 offset:24
  1241. *
  1242. * If the value in A is eq? to the value in B, add OFFSET, a signed
  1243. * 24-bit number, to the current instruction pointer.
  1244. */
  1245. VM_DEFINE_OP (41, br_if_eq, "br-if-eq", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1246. {
  1247. BR_BINARY (x, y, scm_is_eq (x, y));
  1248. }
  1249. /* br-if-eqv a:12 b:12 invert:1 _:7 offset:24
  1250. *
  1251. * If the value in A is eqv? to the value in B, add OFFSET, a signed
  1252. * 24-bit number, to the current instruction pointer.
  1253. */
  1254. VM_DEFINE_OP (42, br_if_eqv, "br-if-eqv", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1255. {
  1256. BR_BINARY (x, y,
  1257. scm_is_eq (x, y)
  1258. || (SCM_NIMP (x) && SCM_NIMP (y)
  1259. && scm_is_true (scm_eqv_p (x, y))));
  1260. }
  1261. VM_DEFINE_OP (43, unused_43, NULL, NOP)
  1262. {
  1263. abort ();
  1264. }
  1265. /* br-if-logtest a:24 _:8 b:24 invert:1 _:7 offset:24
  1266. *
  1267. * If the exact integer in A has any bits in common with the exact
  1268. * integer in B, add OFFSET, a signed 24-bit number, to the current
  1269. * instruction pointer.
  1270. */
  1271. VM_DEFINE_OP (44, br_if_logtest, "br-if-logtest", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1272. {
  1273. SYNC_IP ();
  1274. {
  1275. BR_BINARY (x, y,
  1276. ((SCM_I_INUMP (x) && SCM_I_INUMP (y))
  1277. ? (SCM_UNPACK (x) & SCM_UNPACK (y) & ~scm_tc2_int)
  1278. : scm_is_true (scm_logtest (x, y))));
  1279. }
  1280. }
  1281. /* br-if-= a:12 b:12 invert:1 _:7 offset:24
  1282. *
  1283. * If the value in A is = to the value in B, add OFFSET, a signed
  1284. * 24-bit number, to the current instruction pointer.
  1285. */
  1286. VM_DEFINE_OP (45, br_if_ee, "br-if-=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1287. {
  1288. BR_ARITHMETIC (==, scm_num_eq_p);
  1289. }
  1290. /* br-if-< a:12 b:12 invert:1 _:7 offset:24
  1291. *
  1292. * If the value in A is < to the value in B, add OFFSET, a signed
  1293. * 24-bit number, to the current instruction pointer.
  1294. */
  1295. VM_DEFINE_OP (46, br_if_lt, "br-if-<", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1296. {
  1297. BR_ARITHMETIC (<, scm_less_p);
  1298. }
  1299. /* br-if-<= a:12 b:12 invert:1 _:7 offset:24
  1300. *
  1301. * If the value in A is <= to the value in B, add OFFSET, a signed
  1302. * 24-bit number, to the current instruction pointer.
  1303. */
  1304. VM_DEFINE_OP (47, br_if_le, "br-if-<=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1305. {
  1306. BR_ARITHMETIC (<=, scm_leq_p);
  1307. }
  1308. /*
  1309. * Lexical binding instructions
  1310. */
  1311. /* mov dst:12 src:12
  1312. *
  1313. * Copy a value from one local slot to another.
  1314. */
  1315. VM_DEFINE_OP (48, mov, "mov", OP1 (X8_S12_S12) | OP_DST)
  1316. {
  1317. scm_t_uint16 dst;
  1318. scm_t_uint16 src;
  1319. UNPACK_12_12 (op, dst, src);
  1320. /* FIXME: The compiler currently emits "mov" for SCM, F64, U64,
  1321. and S64 variables. However SCM values are the usual case, and
  1322. on a 32-bit machine it might be cheaper to move a SCM than to
  1323. move a 64-bit number. */
  1324. SP_SET_SLOT (dst, SP_REF_SLOT (src));
  1325. NEXT (1);
  1326. }
  1327. /* long-mov dst:24 _:8 src:24
  1328. *
  1329. * Copy a value from one local slot to another.
  1330. */
  1331. VM_DEFINE_OP (49, long_mov, "long-mov", OP2 (X8_S24, X8_S24) | OP_DST)
  1332. {
  1333. scm_t_uint32 dst;
  1334. scm_t_uint32 src;
  1335. UNPACK_24 (op, dst);
  1336. UNPACK_24 (ip[1], src);
  1337. /* FIXME: The compiler currently emits "long-mov" for SCM, F64,
  1338. U64, and S64 variables. However SCM values are the usual case,
  1339. and on a 32-bit machine it might be cheaper to move a SCM than
  1340. to move a 64-bit number. */
  1341. SP_SET_SLOT (dst, SP_REF_SLOT (src));
  1342. NEXT (2);
  1343. }
  1344. /* long-fmov dst:24 _:8 src:24
  1345. *
  1346. * Copy a value from one local slot to another. Slot indexes are
  1347. * relative to the FP.
  1348. */
  1349. VM_DEFINE_OP (50, long_fmov, "long-fmov", OP2 (X8_F24, X8_F24) | OP_DST)
  1350. {
  1351. scm_t_uint32 dst;
  1352. scm_t_uint32 src;
  1353. UNPACK_24 (op, dst);
  1354. UNPACK_24 (ip[1], src);
  1355. FP_SET (dst, FP_REF (src));
  1356. NEXT (2);
  1357. }
  1358. /* box dst:12 src:12
  1359. *
  1360. * Create a new variable holding SRC, and place it in DST.
  1361. */
  1362. VM_DEFINE_OP (51, box, "box", OP1 (X8_S12_S12) | OP_DST)
  1363. {
  1364. scm_t_uint16 dst, src;
  1365. UNPACK_12_12 (op, dst, src);
  1366. SYNC_IP ();
  1367. SP_SET (dst, scm_inline_cell (thread, scm_tc7_variable,
  1368. SCM_UNPACK (SP_REF (src))));
  1369. NEXT (1);
  1370. }
  1371. /* box-ref dst:12 src:12
  1372. *
  1373. * Unpack the variable at SRC into DST, asserting that the variable is
  1374. * actually bound.
  1375. */
  1376. VM_DEFINE_OP (52, box_ref, "box-ref", OP1 (X8_S12_S12) | OP_DST)
  1377. {
  1378. scm_t_uint16 dst, src;
  1379. SCM var;
  1380. UNPACK_12_12 (op, dst, src);
  1381. var = SP_REF (src);
  1382. VM_VALIDATE_VARIABLE (var, "variable-ref");
  1383. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (var));
  1384. SP_SET (dst, VARIABLE_REF (var));
  1385. NEXT (1);
  1386. }
  1387. /* box-set! dst:12 src:12
  1388. *
  1389. * Set the contents of the variable at DST to SET.
  1390. */
  1391. VM_DEFINE_OP (53, box_set, "box-set!", OP1 (X8_S12_S12))
  1392. {
  1393. scm_t_uint16 dst, src;
  1394. SCM var;
  1395. UNPACK_12_12 (op, dst, src);
  1396. var = SP_REF (dst);
  1397. VM_VALIDATE_VARIABLE (var, "variable-set!");
  1398. VARIABLE_SET (var, SP_REF (src));
  1399. NEXT (1);
  1400. }
  1401. /* make-closure dst:24 offset:32 _:8 nfree:24
  1402. *
  1403. * Make a new closure, and write it to DST. The code for the closure
  1404. * will be found at OFFSET words from the current IP. OFFSET is a
  1405. * signed 32-bit integer. Space for NFREE free variables will be
  1406. * allocated.
  1407. */
  1408. VM_DEFINE_OP (54, make_closure, "make-closure", OP3 (X8_S24, L32, X8_C24) | OP_DST)
  1409. {
  1410. scm_t_uint32 dst, nfree, n;
  1411. scm_t_int32 offset;
  1412. SCM closure;
  1413. UNPACK_24 (op, dst);
  1414. offset = ip[1];
  1415. UNPACK_24 (ip[2], nfree);
  1416. // FIXME: Assert range of nfree?
  1417. SYNC_IP ();
  1418. closure = scm_inline_words (thread, scm_tc7_program | (nfree << 16),
  1419. nfree + 2);
  1420. SCM_SET_CELL_WORD_1 (closure, ip + offset);
  1421. // FIXME: Elide these initializations?
  1422. for (n = 0; n < nfree; n++)
  1423. SCM_PROGRAM_FREE_VARIABLE_SET (closure, n, SCM_BOOL_F);
  1424. SP_SET (dst, closure);
  1425. NEXT (3);
  1426. }
  1427. /* free-ref dst:12 src:12 _:8 idx:24
  1428. *
  1429. * Load free variable IDX from the closure SRC into local slot DST.
  1430. */
  1431. VM_DEFINE_OP (55, free_ref, "free-ref", OP2 (X8_S12_S12, X8_C24) | OP_DST)
  1432. {
  1433. scm_t_uint16 dst, src;
  1434. scm_t_uint32 idx;
  1435. UNPACK_12_12 (op, dst, src);
  1436. UNPACK_24 (ip[1], idx);
  1437. /* CHECK_FREE_VARIABLE (src); */
  1438. SP_SET (dst, SCM_PROGRAM_FREE_VARIABLE_REF (SP_REF (src), idx));
  1439. NEXT (2);
  1440. }
  1441. /* free-set! dst:12 src:12 _:8 idx:24
  1442. *
  1443. * Set free variable IDX from the closure DST to SRC.
  1444. */
  1445. VM_DEFINE_OP (56, free_set, "free-set!", OP2 (X8_S12_S12, X8_C24))
  1446. {
  1447. scm_t_uint16 dst, src;
  1448. scm_t_uint32 idx;
  1449. UNPACK_12_12 (op, dst, src);
  1450. UNPACK_24 (ip[1], idx);
  1451. /* CHECK_FREE_VARIABLE (src); */
  1452. SCM_PROGRAM_FREE_VARIABLE_SET (SP_REF (dst), idx, SP_REF (src));
  1453. NEXT (2);
  1454. }
  1455. /*
  1456. * Immediates and statically allocated non-immediates
  1457. */
  1458. /* make-short-immediate dst:8 low-bits:16
  1459. *
  1460. * Make an immediate whose low bits are LOW-BITS, and whose top bits are
  1461. * 0.
  1462. */
  1463. VM_DEFINE_OP (57, make_short_immediate, "make-short-immediate", OP1 (X8_S8_I16) | OP_DST)
  1464. {
  1465. scm_t_uint8 dst;
  1466. scm_t_bits val;
  1467. UNPACK_8_16 (op, dst, val);
  1468. SP_SET (dst, SCM_PACK (val));
  1469. NEXT (1);
  1470. }
  1471. /* make-long-immediate dst:24 low-bits:32
  1472. *
  1473. * Make an immediate whose low bits are LOW-BITS, and whose top bits are
  1474. * 0.
  1475. */
  1476. VM_DEFINE_OP (58, make_long_immediate, "make-long-immediate", OP2 (X8_S24, I32) | OP_DST)
  1477. {
  1478. scm_t_uint32 dst;
  1479. scm_t_bits val;
  1480. UNPACK_24 (op, dst);
  1481. val = ip[1];
  1482. SP_SET (dst, SCM_PACK (val));
  1483. NEXT (2);
  1484. }
  1485. /* make-long-long-immediate dst:24 high-bits:32 low-bits:32
  1486. *
  1487. * Make an immediate with HIGH-BITS and LOW-BITS.
  1488. */
  1489. VM_DEFINE_OP (59, make_long_long_immediate, "make-long-long-immediate", OP3 (X8_S24, A32, B32) | OP_DST)
  1490. {
  1491. scm_t_uint32 dst;
  1492. scm_t_bits val;
  1493. UNPACK_24 (op, dst);
  1494. #if SIZEOF_SCM_T_BITS > 4
  1495. val = ip[1];
  1496. val <<= 32;
  1497. val |= ip[2];
  1498. #else
  1499. ASSERT (ip[1] == 0);
  1500. val = ip[2];
  1501. #endif
  1502. SP_SET (dst, SCM_PACK (val));
  1503. NEXT (3);
  1504. }
  1505. /* make-non-immediate dst:24 offset:32
  1506. *
  1507. * Load a pointer to statically allocated memory into DST. The
  1508. * object's memory is will be found OFFSET 32-bit words away from the
  1509. * current instruction pointer. OFFSET is a signed value. The
  1510. * intention here is that the compiler would produce an object file
  1511. * containing the words of a non-immediate object, and this
  1512. * instruction creates a pointer to that memory, effectively
  1513. * resurrecting that object.
  1514. *
  1515. * Whether the object is mutable or immutable depends on where it was
  1516. * allocated by the compiler, and loaded by the loader.
  1517. */
  1518. VM_DEFINE_OP (60, make_non_immediate, "make-non-immediate", OP2 (X8_S24, N32) | OP_DST)
  1519. {
  1520. scm_t_uint32 dst;
  1521. scm_t_int32 offset;
  1522. scm_t_uint32* loc;
  1523. scm_t_bits unpacked;
  1524. UNPACK_24 (op, dst);
  1525. offset = ip[1];
  1526. loc = ip + offset;
  1527. unpacked = (scm_t_bits) loc;
  1528. VM_ASSERT (!(unpacked & 0x7), abort());
  1529. SP_SET (dst, SCM_PACK (unpacked));
  1530. NEXT (2);
  1531. }
  1532. /* static-ref dst:24 offset:32
  1533. *
  1534. * Load a SCM value into DST. The SCM value will be fetched from
  1535. * memory, OFFSET 32-bit words away from the current instruction
  1536. * pointer. OFFSET is a signed value.
  1537. *
  1538. * The intention is for this instruction to be used to load constants
  1539. * that the compiler is unable to statically allocate, like symbols.
  1540. * These values would be initialized when the object file loads.
  1541. */
  1542. VM_DEFINE_OP (61, static_ref, "static-ref", OP2 (X8_S24, R32) | OP_DST)
  1543. {
  1544. scm_t_uint32 dst;
  1545. scm_t_int32 offset;
  1546. scm_t_uint32* loc;
  1547. scm_t_uintptr loc_bits;
  1548. UNPACK_24 (op, dst);
  1549. offset = ip[1];
  1550. loc = ip + offset;
  1551. loc_bits = (scm_t_uintptr) loc;
  1552. VM_ASSERT (ALIGNED_P (loc, SCM), abort());
  1553. SP_SET (dst, *((SCM *) loc_bits));
  1554. NEXT (2);
  1555. }
  1556. /* static-set! src:24 offset:32
  1557. *
  1558. * Store a SCM value into memory, OFFSET 32-bit words away from the
  1559. * current instruction pointer. OFFSET is a signed value.
  1560. */
  1561. VM_DEFINE_OP (62, static_set, "static-set!", OP2 (X8_S24, LO32))
  1562. {
  1563. scm_t_uint32 src;
  1564. scm_t_int32 offset;
  1565. scm_t_uint32* loc;
  1566. UNPACK_24 (op, src);
  1567. offset = ip[1];
  1568. loc = ip + offset;
  1569. VM_ASSERT (ALIGNED_P (loc, SCM), abort());
  1570. *((SCM *) loc) = SP_REF (src);
  1571. NEXT (2);
  1572. }
  1573. /* static-patch! _:24 dst-offset:32 src-offset:32
  1574. *
  1575. * Patch a pointer at DST-OFFSET to point to SRC-OFFSET. Both offsets
  1576. * are signed 32-bit values, indicating a memory address as a number
  1577. * of 32-bit words away from the current instruction pointer.
  1578. */
  1579. VM_DEFINE_OP (63, static_patch, "static-patch!", OP3 (X32, LO32, L32))
  1580. {
  1581. scm_t_int32 dst_offset, src_offset;
  1582. void *src;
  1583. void** dst_loc;
  1584. dst_offset = ip[1];
  1585. src_offset = ip[2];
  1586. dst_loc = (void **) (ip + dst_offset);
  1587. src = ip + src_offset;
  1588. VM_ASSERT (ALIGNED_P (dst_loc, void*), abort());
  1589. *dst_loc = src;
  1590. NEXT (3);
  1591. }
  1592. /*
  1593. * Mutable top-level bindings
  1594. */
  1595. /* There are three slightly different ways to resolve toplevel
  1596. variables.
  1597. 1. A toplevel reference outside of a function. These need to be
  1598. looked up when the expression is evaluated -- no later, and no
  1599. before. They are looked up relative to the module that is
  1600. current when the expression is evaluated. For example:
  1601. (if (foo) a b)
  1602. The "resolve" instruction resolves the variable (box), and then
  1603. access is via box-ref or box-set!.
  1604. 2. A toplevel reference inside a function. These are looked up
  1605. relative to the module that was current when the function was
  1606. defined. Unlike code at the toplevel, which is usually run only
  1607. once, these bindings benefit from memoized lookup, in which the
  1608. variable resulting from the lookup is cached in the function.
  1609. (lambda () (if (foo) a b))
  1610. The toplevel-box instruction is equivalent to "resolve", but
  1611. caches the resulting variable in statically allocated memory.
  1612. 3. A reference to an identifier with respect to a particular
  1613. module. This can happen for primitive references, and
  1614. references residualized by macro expansions. These can always
  1615. be cached. Use module-box for these.
  1616. */
  1617. /* current-module dst:24
  1618. *
  1619. * Store the current module in DST.
  1620. */
  1621. VM_DEFINE_OP (64, current_module, "current-module", OP1 (X8_S24) | OP_DST)
  1622. {
  1623. scm_t_uint32 dst;
  1624. UNPACK_24 (op, dst);
  1625. SYNC_IP ();
  1626. SP_SET (dst, scm_current_module ());
  1627. NEXT (1);
  1628. }
  1629. /* resolve dst:24 bound?:1 _:7 sym:24
  1630. *
  1631. * Resolve SYM in the current module, and place the resulting variable
  1632. * in DST.
  1633. */
  1634. VM_DEFINE_OP (65, resolve, "resolve", OP2 (X8_S24, B1_X7_S24) | OP_DST)
  1635. {
  1636. scm_t_uint32 dst;
  1637. scm_t_uint32 sym;
  1638. SCM var;
  1639. UNPACK_24 (op, dst);
  1640. UNPACK_24 (ip[1], sym);
  1641. SYNC_IP ();
  1642. var = scm_lookup (SP_REF (sym));
  1643. CACHE_SP ();
  1644. if (ip[1] & 0x1)
  1645. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (SP_REF (sym)));
  1646. SP_SET (dst, var);
  1647. NEXT (2);
  1648. }
  1649. /* define! dst:12 sym:12
  1650. *
  1651. * Look up a binding for SYM in the current module, creating it if
  1652. * necessary. Set its value to VAL.
  1653. */
  1654. VM_DEFINE_OP (66, define, "define!", OP1 (X8_S12_S12) | OP_DST)
  1655. {
  1656. scm_t_uint16 dst, sym;
  1657. SCM var;
  1658. UNPACK_12_12 (op, dst, sym);
  1659. SYNC_IP ();
  1660. var = scm_module_ensure_local_variable (scm_current_module (),
  1661. SP_REF (sym));
  1662. CACHE_SP ();
  1663. SP_SET (dst, var);
  1664. NEXT (1);
  1665. }
  1666. /* toplevel-box dst:24 var-offset:32 mod-offset:32 sym-offset:32 bound?:1 _:31
  1667. *
  1668. * Load a SCM value. The SCM value will be fetched from memory,
  1669. * VAR-OFFSET 32-bit words away from the current instruction pointer.
  1670. * VAR-OFFSET is a signed value. Up to here, toplevel-box is like
  1671. * static-ref.
  1672. *
  1673. * Then, if the loaded value is a variable, it is placed in DST, and control
  1674. * flow continues.
  1675. *
  1676. * Otherwise, we have to resolve the variable. In that case we load
  1677. * the module from MOD-OFFSET, just as we loaded the variable.
  1678. * Usually the module gets set when the closure is created. The name
  1679. * is an offset to a symbol.
  1680. *
  1681. * We use the module and the symbol to resolve the variable, placing it in
  1682. * DST, and caching the resolved variable so that we will hit the cache next
  1683. * time.
  1684. */
  1685. VM_DEFINE_OP (67, toplevel_box, "toplevel-box", OP5 (X8_S24, R32, R32, N32, B1_X31) | OP_DST)
  1686. {
  1687. scm_t_uint32 dst;
  1688. scm_t_int32 var_offset;
  1689. scm_t_uint32* var_loc_u32;
  1690. SCM *var_loc;
  1691. SCM var;
  1692. UNPACK_24 (op, dst);
  1693. var_offset = ip[1];
  1694. var_loc_u32 = ip + var_offset;
  1695. VM_ASSERT (ALIGNED_P (var_loc_u32, SCM), abort());
  1696. var_loc = (SCM *) var_loc_u32;
  1697. var = *var_loc;
  1698. if (SCM_UNLIKELY (!SCM_VARIABLEP (var)))
  1699. {
  1700. SCM mod, sym;
  1701. scm_t_int32 mod_offset = ip[2]; /* signed */
  1702. scm_t_int32 sym_offset = ip[3]; /* signed */
  1703. scm_t_uint32 *mod_loc = ip + mod_offset;
  1704. scm_t_uint32 *sym_loc = ip + sym_offset;
  1705. SYNC_IP ();
  1706. VM_ASSERT (ALIGNED_P (mod_loc, SCM), abort());
  1707. VM_ASSERT (ALIGNED_P (sym_loc, SCM), abort());
  1708. mod = *((SCM *) mod_loc);
  1709. sym = *((SCM *) sym_loc);
  1710. /* If the toplevel scope was captured before modules were
  1711. booted, use the root module. */
  1712. if (scm_is_false (mod))
  1713. mod = scm_the_root_module ();
  1714. var = scm_module_lookup (mod, sym);
  1715. CACHE_SP ();
  1716. if (ip[4] & 0x1)
  1717. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (sym));
  1718. *var_loc = var;
  1719. }
  1720. SP_SET (dst, var);
  1721. NEXT (5);
  1722. }
  1723. /* module-box dst:24 var-offset:32 mod-offset:32 sym-offset:32 bound?:1 _:31
  1724. *
  1725. * Like toplevel-box, except MOD-OFFSET points at the name of a module
  1726. * instead of the module itself.
  1727. */
  1728. VM_DEFINE_OP (68, module_box, "module-box", OP5 (X8_S24, R32, N32, N32, B1_X31) | OP_DST)
  1729. {
  1730. scm_t_uint32 dst;
  1731. scm_t_int32 var_offset;
  1732. scm_t_uint32* var_loc_u32;
  1733. SCM *var_loc;
  1734. SCM var;
  1735. UNPACK_24 (op, dst);
  1736. var_offset = ip[1];
  1737. var_loc_u32 = ip + var_offset;
  1738. VM_ASSERT (ALIGNED_P (var_loc_u32, SCM), abort());
  1739. var_loc = (SCM *) var_loc_u32;
  1740. var = *var_loc;
  1741. if (SCM_UNLIKELY (!SCM_VARIABLEP (var)))
  1742. {
  1743. SCM modname, sym;
  1744. scm_t_int32 modname_offset = ip[2]; /* signed */
  1745. scm_t_int32 sym_offset = ip[3]; /* signed */
  1746. scm_t_uint32 *modname_words = ip + modname_offset;
  1747. scm_t_uint32 *sym_loc = ip + sym_offset;
  1748. SYNC_IP ();
  1749. VM_ASSERT (!(((scm_t_uintptr) modname_words) & 0x7), abort());
  1750. VM_ASSERT (ALIGNED_P (sym_loc, SCM), abort());
  1751. modname = SCM_PACK ((scm_t_bits) modname_words);
  1752. sym = *((SCM *) sym_loc);
  1753. if (!scm_module_system_booted_p)
  1754. {
  1755. ASSERT (scm_is_true
  1756. scm_equal_p (modname,
  1757. scm_list_2
  1758. (SCM_BOOL_T,
  1759. scm_from_utf8_symbol ("guile"))));
  1760. var = scm_lookup (sym);
  1761. }
  1762. else if (scm_is_true (SCM_CAR (modname)))
  1763. var = scm_public_lookup (SCM_CDR (modname), sym);
  1764. else
  1765. var = scm_private_lookup (SCM_CDR (modname), sym);
  1766. CACHE_SP ();
  1767. if (ip[4] & 0x1)
  1768. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (sym));
  1769. *var_loc = var;
  1770. }
  1771. SP_SET (dst, var);
  1772. NEXT (5);
  1773. }
  1774. /*
  1775. * The dynamic environment
  1776. */
  1777. /* prompt tag:24 escape-only?:1 _:7 proc-slot:24 _:8 handler-offset:24
  1778. *
  1779. * Push a new prompt on the dynamic stack, with a tag from TAG and a
  1780. * handler at HANDLER-OFFSET words from the current IP. The handler
  1781. * will expect a multiple-value return as if from a call with the
  1782. * procedure at PROC-SLOT.
  1783. */
  1784. VM_DEFINE_OP (69, prompt, "prompt", OP3 (X8_S24, B1_X7_F24, X8_L24))
  1785. {
  1786. scm_t_uint32 tag, proc_slot;
  1787. scm_t_int32 offset;
  1788. scm_t_uint8 escape_only_p;
  1789. scm_t_dynstack_prompt_flags flags;
  1790. UNPACK_24 (op, tag);
  1791. escape_only_p = ip[1] & 0x1;
  1792. UNPACK_24 (ip[1], proc_slot);
  1793. offset = ip[2];
  1794. offset >>= 8; /* Sign extension */
  1795. /* Push the prompt onto the dynamic stack. */
  1796. flags = escape_only_p ? SCM_F_DYNSTACK_PROMPT_ESCAPE_ONLY : 0;
  1797. SYNC_IP ();
  1798. scm_dynstack_push_prompt (&thread->dynstack, flags,
  1799. SP_REF (tag),
  1800. vp->stack_top - vp->fp,
  1801. vp->stack_top - FP_SLOT (proc_slot),
  1802. ip + offset,
  1803. registers);
  1804. NEXT (3);
  1805. }
  1806. /* wind winder:12 unwinder:12
  1807. *
  1808. * Push wind and unwind procedures onto the dynamic stack. Note that
  1809. * neither are actually called; the compiler should emit calls to wind
  1810. * and unwind for the normal dynamic-wind control flow. Also note that
  1811. * the compiler should have inserted checks that they wind and unwind
  1812. * procs are thunks, if it could not prove that to be the case.
  1813. */
  1814. VM_DEFINE_OP (70, wind, "wind", OP1 (X8_S12_S12))
  1815. {
  1816. scm_t_uint16 winder, unwinder;
  1817. UNPACK_12_12 (op, winder, unwinder);
  1818. SYNC_IP ();
  1819. scm_dynstack_push_dynwind (&thread->dynstack,
  1820. SP_REF (winder), SP_REF (unwinder));
  1821. NEXT (1);
  1822. }
  1823. /* unwind _:24
  1824. *
  1825. * A normal exit from the dynamic extent of an expression. Pop the top
  1826. * entry off of the dynamic stack.
  1827. */
  1828. VM_DEFINE_OP (71, unwind, "unwind", OP1 (X32))
  1829. {
  1830. scm_dynstack_pop (&thread->dynstack);
  1831. NEXT (1);
  1832. }
  1833. /* push-fluid fluid:12 value:12
  1834. *
  1835. * Dynamically bind VALUE to FLUID.
  1836. */
  1837. VM_DEFINE_OP (72, push_fluid, "push-fluid", OP1 (X8_S12_S12))
  1838. {
  1839. scm_t_uint32 fluid, value;
  1840. UNPACK_12_12 (op, fluid, value);
  1841. SYNC_IP ();
  1842. scm_dynstack_push_fluid (&thread->dynstack,
  1843. SP_REF (fluid), SP_REF (value),
  1844. thread->dynamic_state);
  1845. NEXT (1);
  1846. }
  1847. /* pop-fluid _:24
  1848. *
  1849. * Leave the dynamic extent of a with-fluid* expression, restoring the
  1850. * fluid to its previous value.
  1851. */
  1852. VM_DEFINE_OP (73, pop_fluid, "pop-fluid", OP1 (X32))
  1853. {
  1854. SYNC_IP ();
  1855. scm_dynstack_unwind_fluid (&thread->dynstack,
  1856. thread->dynamic_state);
  1857. NEXT (1);
  1858. }
  1859. /* fluid-ref dst:12 src:12
  1860. *
  1861. * Reference the fluid in SRC, and place the value in DST.
  1862. */
  1863. VM_DEFINE_OP (74, fluid_ref, "fluid-ref", OP1 (X8_S12_S12) | OP_DST)
  1864. {
  1865. scm_t_uint16 dst, src;
  1866. SCM fluid;
  1867. struct scm_cache_entry *entry;
  1868. UNPACK_12_12 (op, dst, src);
  1869. fluid = SP_REF (src);
  1870. /* If we find FLUID in the cache, then it is indeed a fluid. */
  1871. entry = scm_cache_lookup (&thread->dynamic_state->cache, fluid);
  1872. if (SCM_LIKELY (scm_is_eq (SCM_PACK (entry->key), fluid)
  1873. && !SCM_UNBNDP (SCM_PACK (entry->value))))
  1874. {
  1875. SP_SET (dst, SCM_PACK (entry->value));
  1876. NEXT (1);
  1877. }
  1878. else
  1879. {
  1880. SYNC_IP ();
  1881. SP_SET (dst, scm_fluid_ref (fluid));
  1882. NEXT (1);
  1883. }
  1884. }
  1885. /* fluid-set fluid:12 val:12
  1886. *
  1887. * Set the value of the fluid in DST to the value in SRC.
  1888. */
  1889. VM_DEFINE_OP (75, fluid_set, "fluid-set!", OP1 (X8_S12_S12))
  1890. {
  1891. scm_t_uint16 a, b;
  1892. SCM fluid, value;
  1893. struct scm_cache_entry *entry;
  1894. UNPACK_12_12 (op, a, b);
  1895. fluid = SP_REF (a);
  1896. value = SP_REF (b);
  1897. /* If we find FLUID in the cache, then it is indeed a fluid. */
  1898. entry = scm_cache_lookup (&thread->dynamic_state->cache, fluid);
  1899. if (SCM_LIKELY (scm_is_eq (SCM_PACK (entry->key), fluid)))
  1900. {
  1901. entry->value = SCM_UNPACK (value);
  1902. NEXT (1);
  1903. }
  1904. else
  1905. {
  1906. SYNC_IP ();
  1907. scm_fluid_set_x (fluid, value);
  1908. NEXT (1);
  1909. }
  1910. }
  1911. /*
  1912. * Strings, symbols, and keywords
  1913. */
  1914. /* string-length dst:12 src:12
  1915. *
  1916. * Store the length of the string in SRC in DST.
  1917. */
  1918. VM_DEFINE_OP (76, string_length, "string-length", OP1 (X8_S12_S12) | OP_DST)
  1919. {
  1920. ARGS1 (str);
  1921. VM_VALIDATE_STRING (str, "string-length");
  1922. SP_SET_U64 (dst, scm_i_string_length (str));
  1923. NEXT (1);
  1924. }
  1925. /* string-ref dst:8 src:8 idx:8
  1926. *
  1927. * Fetch the character at position IDX in the string in SRC, and store
  1928. * it in DST.
  1929. */
  1930. VM_DEFINE_OP (77, string_ref, "string-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  1931. {
  1932. scm_t_uint8 dst, src, idx;
  1933. SCM str;
  1934. scm_t_uint64 c_idx;
  1935. UNPACK_8_8_8 (op, dst, src, idx);
  1936. str = SP_REF (src);
  1937. c_idx = SP_REF_U64 (idx);
  1938. VM_VALIDATE_STRING (str, "string-ref");
  1939. VM_VALIDATE_INDEX (c_idx, scm_i_string_length (str), "string-ref");
  1940. RETURN (scm_i_make_char (scm_i_string_ref (str, c_idx)));
  1941. }
  1942. /* string-set! instruction is currently number 192. Probably need to
  1943. reorder before releasing. */
  1944. /* string->number dst:12 src:12
  1945. *
  1946. * Parse a string in SRC to a number, and store in DST.
  1947. */
  1948. VM_DEFINE_OP (78, string_to_number, "string->number", OP1 (X8_S12_S12) | OP_DST)
  1949. {
  1950. scm_t_uint16 dst, src;
  1951. UNPACK_12_12 (op, dst, src);
  1952. SYNC_IP ();
  1953. SP_SET (dst,
  1954. scm_string_to_number (SP_REF (src),
  1955. SCM_UNDEFINED /* radix = 10 */));
  1956. NEXT (1);
  1957. }
  1958. /* string->symbol dst:12 src:12
  1959. *
  1960. * Parse a string in SRC to a symbol, and store in DST.
  1961. */
  1962. VM_DEFINE_OP (79, string_to_symbol, "string->symbol", OP1 (X8_S12_S12) | OP_DST)
  1963. {
  1964. scm_t_uint16 dst, src;
  1965. UNPACK_12_12 (op, dst, src);
  1966. SYNC_IP ();
  1967. SP_SET (dst, scm_string_to_symbol (SP_REF (src)));
  1968. NEXT (1);
  1969. }
  1970. /* symbol->keyword dst:12 src:12
  1971. *
  1972. * Make a keyword from the symbol in SRC, and store it in DST.
  1973. */
  1974. VM_DEFINE_OP (80, symbol_to_keyword, "symbol->keyword", OP1 (X8_S12_S12) | OP_DST)
  1975. {
  1976. scm_t_uint16 dst, src;
  1977. UNPACK_12_12 (op, dst, src);
  1978. SYNC_IP ();
  1979. SP_SET (dst, scm_symbol_to_keyword (SP_REF (src)));
  1980. NEXT (1);
  1981. }
  1982. /*
  1983. * Pairs
  1984. */
  1985. /* cons dst:8 car:8 cdr:8
  1986. *
  1987. * Cons CAR and CDR, and store the result in DST.
  1988. */
  1989. VM_DEFINE_OP (81, cons, "cons", OP1 (X8_S8_S8_S8) | OP_DST)
  1990. {
  1991. ARGS2 (x, y);
  1992. SYNC_IP ();
  1993. RETURN (scm_inline_cons (thread, x, y));
  1994. }
  1995. /* car dst:12 src:12
  1996. *
  1997. * Place the car of SRC in DST.
  1998. */
  1999. VM_DEFINE_OP (82, car, "car", OP1 (X8_S12_S12) | OP_DST)
  2000. {
  2001. ARGS1 (x);
  2002. VM_VALIDATE_PAIR (x, "car");
  2003. RETURN (SCM_CAR (x));
  2004. }
  2005. /* cdr dst:12 src:12
  2006. *
  2007. * Place the cdr of SRC in DST.
  2008. */
  2009. VM_DEFINE_OP (83, cdr, "cdr", OP1 (X8_S12_S12) | OP_DST)
  2010. {
  2011. ARGS1 (x);
  2012. VM_VALIDATE_PAIR (x, "cdr");
  2013. RETURN (SCM_CDR (x));
  2014. }
  2015. /* set-car! pair:12 car:12
  2016. *
  2017. * Set the car of DST to SRC.
  2018. */
  2019. VM_DEFINE_OP (84, set_car, "set-car!", OP1 (X8_S12_S12))
  2020. {
  2021. scm_t_uint16 a, b;
  2022. SCM x, y;
  2023. UNPACK_12_12 (op, a, b);
  2024. x = SP_REF (a);
  2025. y = SP_REF (b);
  2026. VM_VALIDATE_MUTABLE_PAIR (x, "set-car!");
  2027. SCM_SETCAR (x, y);
  2028. NEXT (1);
  2029. }
  2030. /* set-cdr! pair:12 cdr:12
  2031. *
  2032. * Set the cdr of DST to SRC.
  2033. */
  2034. VM_DEFINE_OP (85, set_cdr, "set-cdr!", OP1 (X8_S12_S12))
  2035. {
  2036. scm_t_uint16 a, b;
  2037. SCM x, y;
  2038. UNPACK_12_12 (op, a, b);
  2039. x = SP_REF (a);
  2040. y = SP_REF (b);
  2041. VM_VALIDATE_MUTABLE_PAIR (x, "set-cdr!");
  2042. SCM_SETCDR (x, y);
  2043. NEXT (1);
  2044. }
  2045. /*
  2046. * Numeric operations
  2047. */
  2048. /* add dst:8 a:8 b:8
  2049. *
  2050. * Add A to B, and place the result in DST.
  2051. */
  2052. VM_DEFINE_OP (86, add, "add", OP1 (X8_S8_S8_S8) | OP_DST)
  2053. {
  2054. BINARY_INTEGER_OP (+, scm_sum);
  2055. }
  2056. /* add/immediate dst:8 src:8 imm:8
  2057. *
  2058. * Add the unsigned 8-bit value IMM to the value from SRC, and place
  2059. * the result in DST.
  2060. */
  2061. VM_DEFINE_OP (87, add_immediate, "add/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2062. {
  2063. scm_t_uint8 dst, src, imm;
  2064. SCM x;
  2065. UNPACK_8_8_8 (op, dst, src, imm);
  2066. x = SP_REF (src);
  2067. if (SCM_LIKELY (SCM_I_INUMP (x)))
  2068. {
  2069. scm_t_signed_bits sum = SCM_I_INUM (x) + (scm_t_signed_bits) imm;
  2070. if (SCM_LIKELY (SCM_POSFIXABLE (sum)))
  2071. RETURN (SCM_I_MAKINUM (sum));
  2072. }
  2073. RETURN_EXP (scm_sum (x, SCM_I_MAKINUM (imm)));
  2074. }
  2075. /* sub dst:8 a:8 b:8
  2076. *
  2077. * Subtract B from A, and place the result in DST.
  2078. */
  2079. VM_DEFINE_OP (88, sub, "sub", OP1 (X8_S8_S8_S8) | OP_DST)
  2080. {
  2081. BINARY_INTEGER_OP (-, scm_difference);
  2082. }
  2083. /* sub/immediate dst:8 src:8 imm:8
  2084. *
  2085. * Subtract the unsigned 8-bit value IMM from the value in SRC, and
  2086. * place the result in DST.
  2087. */
  2088. VM_DEFINE_OP (89, sub_immediate, "sub/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2089. {
  2090. scm_t_uint8 dst, src, imm;
  2091. SCM x;
  2092. UNPACK_8_8_8 (op, dst, src, imm);
  2093. x = SP_REF (src);
  2094. if (SCM_LIKELY (SCM_I_INUMP (x)))
  2095. {
  2096. scm_t_signed_bits diff = SCM_I_INUM (x) - (scm_t_signed_bits) imm;
  2097. if (SCM_LIKELY (SCM_NEGFIXABLE (diff)))
  2098. RETURN (SCM_I_MAKINUM (diff));
  2099. }
  2100. RETURN_EXP (scm_difference (x, SCM_I_MAKINUM (imm)));
  2101. }
  2102. /* mul dst:8 a:8 b:8
  2103. *
  2104. * Multiply A and B, and place the result in DST.
  2105. */
  2106. VM_DEFINE_OP (90, mul, "mul", OP1 (X8_S8_S8_S8) | OP_DST)
  2107. {
  2108. ARGS2 (x, y);
  2109. RETURN_EXP (scm_product (x, y));
  2110. }
  2111. /* div dst:8 a:8 b:8
  2112. *
  2113. * Divide A by B, and place the result in DST.
  2114. */
  2115. VM_DEFINE_OP (91, div, "div", OP1 (X8_S8_S8_S8) | OP_DST)
  2116. {
  2117. ARGS2 (x, y);
  2118. RETURN_EXP (scm_divide (x, y));
  2119. }
  2120. /* quo dst:8 a:8 b:8
  2121. *
  2122. * Divide A by B, and place the quotient in DST.
  2123. */
  2124. VM_DEFINE_OP (92, quo, "quo", OP1 (X8_S8_S8_S8) | OP_DST)
  2125. {
  2126. ARGS2 (x, y);
  2127. RETURN_EXP (scm_quotient (x, y));
  2128. }
  2129. /* rem dst:8 a:8 b:8
  2130. *
  2131. * Divide A by B, and place the remainder in DST.
  2132. */
  2133. VM_DEFINE_OP (93, rem, "rem", OP1 (X8_S8_S8_S8) | OP_DST)
  2134. {
  2135. ARGS2 (x, y);
  2136. RETURN_EXP (scm_remainder (x, y));
  2137. }
  2138. /* mod dst:8 a:8 b:8
  2139. *
  2140. * Place the modulo of A by B in DST.
  2141. */
  2142. VM_DEFINE_OP (94, mod, "mod", OP1 (X8_S8_S8_S8) | OP_DST)
  2143. {
  2144. ARGS2 (x, y);
  2145. RETURN_EXP (scm_modulo (x, y));
  2146. }
  2147. /* ash dst:8 a:8 b:8
  2148. *
  2149. * Shift A arithmetically by B bits, and place the result in DST.
  2150. */
  2151. VM_DEFINE_OP (95, ash, "ash", OP1 (X8_S8_S8_S8) | OP_DST)
  2152. {
  2153. ARGS2 (x, y);
  2154. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2155. {
  2156. if (SCM_I_INUM (y) < 0)
  2157. /* Right shift, will be a fixnum. */
  2158. RETURN (SCM_I_MAKINUM
  2159. (SCM_SRS (SCM_I_INUM (x),
  2160. (-SCM_I_INUM (y) <= SCM_I_FIXNUM_BIT-1)
  2161. ? -SCM_I_INUM (y) : SCM_I_FIXNUM_BIT-1)));
  2162. else
  2163. /* Left shift. See comments in scm_ash. */
  2164. {
  2165. scm_t_signed_bits nn, bits_to_shift;
  2166. nn = SCM_I_INUM (x);
  2167. bits_to_shift = SCM_I_INUM (y);
  2168. if (bits_to_shift < SCM_I_FIXNUM_BIT-1
  2169. && ((scm_t_bits)
  2170. (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
  2171. <= 1))
  2172. RETURN (SCM_I_MAKINUM (nn < 0
  2173. ? -(-nn << bits_to_shift)
  2174. : (nn << bits_to_shift)));
  2175. /* fall through */
  2176. }
  2177. /* fall through */
  2178. }
  2179. RETURN_EXP (scm_ash (x, y));
  2180. }
  2181. /* logand dst:8 a:8 b:8
  2182. *
  2183. * Place the bitwise AND of A and B into DST.
  2184. */
  2185. VM_DEFINE_OP (96, logand, "logand", OP1 (X8_S8_S8_S8) | OP_DST)
  2186. {
  2187. ARGS2 (x, y);
  2188. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2189. /* Compute bitwise AND without untagging */
  2190. RETURN (SCM_PACK (SCM_UNPACK (x) & SCM_UNPACK (y)));
  2191. RETURN_EXP (scm_logand (x, y));
  2192. }
  2193. /* logior dst:8 a:8 b:8
  2194. *
  2195. * Place the bitwise inclusive OR of A with B in DST.
  2196. */
  2197. VM_DEFINE_OP (97, logior, "logior", OP1 (X8_S8_S8_S8) | OP_DST)
  2198. {
  2199. ARGS2 (x, y);
  2200. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2201. /* Compute bitwise OR without untagging */
  2202. RETURN (SCM_PACK (SCM_UNPACK (x) | SCM_UNPACK (y)));
  2203. RETURN_EXP (scm_logior (x, y));
  2204. }
  2205. /* logxor dst:8 a:8 b:8
  2206. *
  2207. * Place the bitwise exclusive OR of A with B in DST.
  2208. */
  2209. VM_DEFINE_OP (98, logxor, "logxor", OP1 (X8_S8_S8_S8) | OP_DST)
  2210. {
  2211. ARGS2 (x, y);
  2212. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2213. RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
  2214. RETURN_EXP (scm_logxor (x, y));
  2215. }
  2216. /* make-vector dst:8 length:8 init:8
  2217. *
  2218. * Make a vector and write it to DST. The vector will have space for
  2219. * LENGTH slots. They will be filled with the value in slot INIT.
  2220. */
  2221. VM_DEFINE_OP (99, make_vector, "make-vector", OP1 (X8_S8_S8_S8) | OP_DST)
  2222. {
  2223. scm_t_uint8 dst, length, init;
  2224. scm_t_uint64 length_val;
  2225. UNPACK_8_8_8 (op, dst, length, init);
  2226. length_val = SP_REF_U64 (length);
  2227. VM_VALIDATE_INDEX (length_val, (size_t) -1, "make-vector");
  2228. /* TODO: Inline this allocation. */
  2229. SYNC_IP ();
  2230. SP_SET (dst, scm_c_make_vector (length_val, SP_REF (init)));
  2231. NEXT (1);
  2232. }
  2233. /* make-vector/immediate dst:8 length:8 init:8
  2234. *
  2235. * Make a short vector of known size and write it to DST. The vector
  2236. * will have space for LENGTH slots, an immediate value. They will be
  2237. * filled with the value in slot INIT.
  2238. */
  2239. VM_DEFINE_OP (100, make_vector_immediate, "make-vector/immediate", OP1 (X8_S8_C8_S8) | OP_DST)
  2240. {
  2241. scm_t_uint8 dst, init;
  2242. scm_t_int32 length, n;
  2243. SCM val, vector;
  2244. UNPACK_8_8_8 (op, dst, length, init);
  2245. val = SP_REF (init);
  2246. SYNC_IP ();
  2247. vector = scm_inline_words (thread, scm_tc7_vector | (length << 8),
  2248. length + 1);
  2249. for (n = 0; n < length; n++)
  2250. SCM_SIMPLE_VECTOR_SET (vector, n, val);
  2251. SP_SET (dst, vector);
  2252. NEXT (1);
  2253. }
  2254. /* vector-length dst:12 src:12
  2255. *
  2256. * Store the length of the vector in SRC in DST.
  2257. */
  2258. VM_DEFINE_OP (101, vector_length, "vector-length", OP1 (X8_S12_S12) | OP_DST)
  2259. {
  2260. ARGS1 (vect);
  2261. VM_VALIDATE_VECTOR (vect, "vector-length");
  2262. SP_SET_U64 (dst, SCM_I_VECTOR_LENGTH (vect));
  2263. NEXT (1);
  2264. }
  2265. /* vector-ref dst:8 src:8 idx:8
  2266. *
  2267. * Fetch the item at position IDX in the vector in SRC, and store it
  2268. * in DST.
  2269. */
  2270. VM_DEFINE_OP (102, vector_ref, "vector-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2271. {
  2272. scm_t_uint8 dst, src, idx;
  2273. SCM vect;
  2274. scm_t_uint64 c_idx;
  2275. UNPACK_8_8_8 (op, dst, src, idx);
  2276. vect = SP_REF (src);
  2277. c_idx = SP_REF_U64 (idx);
  2278. VM_VALIDATE_VECTOR (vect, "vector-ref");
  2279. VM_VALIDATE_INDEX (c_idx, SCM_I_VECTOR_LENGTH (vect), "vector-ref");
  2280. RETURN (SCM_I_VECTOR_ELTS (vect)[c_idx]);
  2281. }
  2282. /* vector-ref/immediate dst:8 src:8 idx:8
  2283. *
  2284. * Fill DST with the item IDX elements into the vector at SRC. Useful
  2285. * for building data types using vectors.
  2286. */
  2287. VM_DEFINE_OP (103, vector_ref_immediate, "vector-ref/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2288. {
  2289. scm_t_uint8 dst, src, idx;
  2290. SCM vect;
  2291. UNPACK_8_8_8 (op, dst, src, idx);
  2292. vect = SP_REF (src);
  2293. VM_VALIDATE_VECTOR (vect, "vector-ref");
  2294. VM_VALIDATE_INDEX (idx, SCM_I_VECTOR_LENGTH (vect), "vector-ref");
  2295. SP_SET (dst, SCM_I_VECTOR_ELTS (vect)[idx]);
  2296. NEXT (1);
  2297. }
  2298. /* vector-set! dst:8 idx:8 src:8
  2299. *
  2300. * Store SRC into the vector DST at index IDX.
  2301. */
  2302. VM_DEFINE_OP (104, vector_set, "vector-set!", OP1 (X8_S8_S8_S8))
  2303. {
  2304. scm_t_uint8 dst, idx, src;
  2305. SCM vect, val;
  2306. scm_t_uint64 c_idx;
  2307. UNPACK_8_8_8 (op, dst, idx, src);
  2308. vect = SP_REF (dst);
  2309. c_idx = SP_REF_U64 (idx);
  2310. val = SP_REF (src);
  2311. VM_VALIDATE_MUTABLE_VECTOR (vect, "vector-set!");
  2312. VM_VALIDATE_INDEX (c_idx, SCM_I_VECTOR_LENGTH (vect), "vector-set!");
  2313. SCM_I_VECTOR_WELTS (vect)[c_idx] = val;
  2314. NEXT (1);
  2315. }
  2316. /* vector-set!/immediate dst:8 idx:8 src:8
  2317. *
  2318. * Store SRC into the vector DST at index IDX. Here IDX is an
  2319. * immediate value.
  2320. */
  2321. VM_DEFINE_OP (105, vector_set_immediate, "vector-set!/immediate", OP1 (X8_S8_C8_S8))
  2322. {
  2323. scm_t_uint8 dst, idx, src;
  2324. SCM vect, val;
  2325. UNPACK_8_8_8 (op, dst, idx, src);
  2326. vect = SP_REF (dst);
  2327. val = SP_REF (src);
  2328. VM_VALIDATE_MUTABLE_VECTOR (vect, "vector-set!");
  2329. VM_VALIDATE_INDEX (idx, SCM_I_VECTOR_LENGTH (vect), "vector-set!");
  2330. SCM_I_VECTOR_WELTS (vect)[idx] = val;
  2331. NEXT (1);
  2332. }
  2333. /*
  2334. * Structs and GOOPS
  2335. */
  2336. /* struct-vtable dst:12 src:12
  2337. *
  2338. * Store the vtable of SRC into DST.
  2339. */
  2340. VM_DEFINE_OP (106, struct_vtable, "struct-vtable", OP1 (X8_S12_S12) | OP_DST)
  2341. {
  2342. ARGS1 (obj);
  2343. VM_VALIDATE_STRUCT (obj, "struct_vtable");
  2344. RETURN (SCM_STRUCT_VTABLE (obj));
  2345. }
  2346. /* allocate-struct dst:8 vtable:8 nfields:8
  2347. *
  2348. * Allocate a new struct with VTABLE, and place it in DST. The struct
  2349. * will be constructed with space for NFIELDS fields, which should
  2350. * correspond to the field count of the VTABLE.
  2351. */
  2352. VM_DEFINE_OP (107, allocate_struct, "allocate-struct", OP1 (X8_S8_S8_S8) | OP_DST)
  2353. {
  2354. scm_t_uint8 dst, vtable, nfields;
  2355. SCM ret;
  2356. UNPACK_8_8_8 (op, dst, vtable, nfields);
  2357. /* TODO: Specify nfields as untagged value when calling
  2358. allocate-struct. */
  2359. SYNC_IP ();
  2360. ret = scm_allocate_struct (SP_REF (vtable),
  2361. scm_from_uint64 (SP_REF_U64 (nfields)));
  2362. SP_SET (dst, ret);
  2363. NEXT (1);
  2364. }
  2365. /* struct-ref dst:8 src:8 idx:8
  2366. *
  2367. * Fetch the item at slot IDX in the struct in SRC, and store it
  2368. * in DST.
  2369. */
  2370. VM_DEFINE_OP (108, struct_ref, "struct-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2371. {
  2372. scm_t_uint8 dst, src, idx;
  2373. SCM obj;
  2374. scm_t_uint64 index;
  2375. UNPACK_8_8_8 (op, dst, src, idx);
  2376. obj = SP_REF (src);
  2377. index = SP_REF_U64 (idx);
  2378. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2379. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2380. SCM_VTABLE_FLAG_SIMPLE)
  2381. && index < (SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2382. scm_vtable_index_size))))
  2383. RETURN (SCM_STRUCT_SLOT_REF (obj, index));
  2384. SYNC_IP ();
  2385. RETURN (scm_struct_ref (obj, scm_from_uint64 (index)));
  2386. }
  2387. /* struct-set! dst:8 idx:8 src:8
  2388. *
  2389. * Store SRC into the struct DST at slot IDX.
  2390. */
  2391. VM_DEFINE_OP (109, struct_set, "struct-set!", OP1 (X8_S8_S8_S8))
  2392. {
  2393. scm_t_uint8 dst, idx, src;
  2394. SCM obj, val;
  2395. scm_t_uint64 index;
  2396. UNPACK_8_8_8 (op, dst, idx, src);
  2397. obj = SP_REF (dst);
  2398. val = SP_REF (src);
  2399. index = SP_REF_U64 (idx);
  2400. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2401. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2402. SCM_VTABLE_FLAG_SIMPLE)
  2403. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2404. SCM_VTABLE_FLAG_SIMPLE_RW)
  2405. && index < (SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2406. scm_vtable_index_size))))
  2407. {
  2408. SCM_STRUCT_SLOT_SET (obj, index, val);
  2409. NEXT (1);
  2410. }
  2411. SYNC_IP ();
  2412. scm_struct_set_x (obj, scm_from_uint64 (index), val);
  2413. NEXT (1);
  2414. }
  2415. /* allocate-struct/immediate dst:8 vtable:8 nfields:8
  2416. *
  2417. * Allocate a new struct with VTABLE, and place it in DST. The struct
  2418. * will be constructed with space for NFIELDS fields, which should
  2419. * correspond to the field count of the VTABLE.
  2420. */
  2421. VM_DEFINE_OP (110, allocate_struct_immediate, "allocate-struct/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2422. {
  2423. scm_t_uint8 dst, vtable, nfields;
  2424. SCM ret;
  2425. UNPACK_8_8_8 (op, dst, vtable, nfields);
  2426. SYNC_IP ();
  2427. ret = scm_allocate_struct (SP_REF (vtable), SCM_I_MAKINUM (nfields));
  2428. SP_SET (dst, ret);
  2429. NEXT (1);
  2430. }
  2431. /* struct-ref/immediate dst:8 src:8 idx:8
  2432. *
  2433. * Fetch the item at slot IDX in the struct in SRC, and store it
  2434. * in DST. IDX is an immediate unsigned 8-bit value.
  2435. */
  2436. VM_DEFINE_OP (111, struct_ref_immediate, "struct-ref/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2437. {
  2438. scm_t_uint8 dst, src, idx;
  2439. SCM obj;
  2440. UNPACK_8_8_8 (op, dst, src, idx);
  2441. obj = SP_REF (src);
  2442. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2443. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2444. SCM_VTABLE_FLAG_SIMPLE)
  2445. && idx < SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2446. scm_vtable_index_size)))
  2447. RETURN (SCM_STRUCT_SLOT_REF (obj, idx));
  2448. SYNC_IP ();
  2449. RETURN (scm_struct_ref (obj, SCM_I_MAKINUM (idx)));
  2450. }
  2451. /* struct-set!/immediate dst:8 idx:8 src:8
  2452. *
  2453. * Store SRC into the struct DST at slot IDX. IDX is an immediate
  2454. * unsigned 8-bit value.
  2455. */
  2456. VM_DEFINE_OP (112, struct_set_immediate, "struct-set!/immediate", OP1 (X8_S8_C8_S8))
  2457. {
  2458. scm_t_uint8 dst, idx, src;
  2459. SCM obj, val;
  2460. UNPACK_8_8_8 (op, dst, idx, src);
  2461. obj = SP_REF (dst);
  2462. val = SP_REF (src);
  2463. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2464. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2465. SCM_VTABLE_FLAG_SIMPLE)
  2466. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2467. SCM_VTABLE_FLAG_SIMPLE_RW)
  2468. && idx < SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2469. scm_vtable_index_size)))
  2470. {
  2471. SCM_STRUCT_SLOT_SET (obj, idx, val);
  2472. NEXT (1);
  2473. }
  2474. SYNC_IP ();
  2475. scm_struct_set_x (obj, SCM_I_MAKINUM (idx), val);
  2476. NEXT (1);
  2477. }
  2478. /* class-of dst:12 type:12
  2479. *
  2480. * Store the vtable of SRC into DST.
  2481. */
  2482. VM_DEFINE_OP (113, class_of, "class-of", OP1 (X8_S12_S12) | OP_DST)
  2483. {
  2484. ARGS1 (obj);
  2485. if (SCM_INSTANCEP (obj))
  2486. RETURN (SCM_CLASS_OF (obj));
  2487. RETURN_EXP (scm_class_of (obj));
  2488. }
  2489. /*
  2490. * Arrays, packed uniform arrays, and bytevectors.
  2491. */
  2492. /* load-typed-array dst:24 _:8 type:24 _:8 shape:24 offset:32 len:32
  2493. *
  2494. * Load the contiguous typed array located at OFFSET 32-bit words away
  2495. * from the instruction pointer, and store into DST. LEN is a byte
  2496. * length. OFFSET is signed.
  2497. */
  2498. VM_DEFINE_OP (114, load_typed_array, "load-typed-array", OP5 (X8_S24, X8_S24, X8_S24, N32, C32) | OP_DST)
  2499. {
  2500. scm_t_uint32 dst, type, shape;
  2501. scm_t_int32 offset;
  2502. scm_t_uint32 len;
  2503. UNPACK_24 (op, dst);
  2504. UNPACK_24 (ip[1], type);
  2505. UNPACK_24 (ip[2], shape);
  2506. offset = ip[3];
  2507. len = ip[4];
  2508. SYNC_IP ();
  2509. SP_SET (dst, scm_from_contiguous_typed_array (SP_REF (type),
  2510. SP_REF (shape),
  2511. ip + offset, len));
  2512. NEXT (5);
  2513. }
  2514. /* make-array dst:24 _:8 type:24 _:8 fill:24 _:8 bounds:24
  2515. *
  2516. * Make a new array with TYPE, FILL, and BOUNDS, storing it in DST.
  2517. */
  2518. VM_DEFINE_OP (115, make_array, "make-array", OP4 (X8_S24, X8_S24, X8_S24, X8_S24) | OP_DST)
  2519. {
  2520. scm_t_uint32 dst, type, fill, bounds;
  2521. UNPACK_24 (op, dst);
  2522. UNPACK_24 (ip[1], type);
  2523. UNPACK_24 (ip[2], fill);
  2524. UNPACK_24 (ip[3], bounds);
  2525. SYNC_IP ();
  2526. SP_SET (dst, scm_make_typed_array (SP_REF (type), SP_REF (fill),
  2527. SP_REF (bounds)));
  2528. NEXT (4);
  2529. }
  2530. /* bv-u8-ref dst:8 src:8 idx:8
  2531. * bv-s8-ref dst:8 src:8 idx:8
  2532. * bv-u16-ref dst:8 src:8 idx:8
  2533. * bv-s16-ref dst:8 src:8 idx:8
  2534. * bv-u32-ref dst:8 src:8 idx:8
  2535. * bv-s32-ref dst:8 src:8 idx:8
  2536. * bv-u64-ref dst:8 src:8 idx:8
  2537. * bv-s64-ref dst:8 src:8 idx:8
  2538. * bv-f32-ref dst:8 src:8 idx:8
  2539. * bv-f64-ref dst:8 src:8 idx:8
  2540. *
  2541. * Fetch the item at byte offset IDX in the bytevector SRC, and store
  2542. * it in DST. All accesses use native endianness.
  2543. */
  2544. #define BV_REF(stem, type, size, slot) \
  2545. do { \
  2546. type result; \
  2547. scm_t_uint8 dst, src, idx; \
  2548. SCM bv; \
  2549. scm_t_uint64 c_idx; \
  2550. UNPACK_8_8_8 (op, dst, src, idx); \
  2551. bv = SP_REF (src); \
  2552. c_idx = SP_REF_U64 (idx); \
  2553. \
  2554. VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
  2555. \
  2556. VM_ASSERT (SCM_BYTEVECTOR_LENGTH (bv) >= size \
  2557. && SCM_BYTEVECTOR_LENGTH (bv) - size >= c_idx, \
  2558. vm_error_out_of_range_uint64 ("bv-" #stem "-ref", c_idx)); \
  2559. \
  2560. memcpy (&result, SCM_BYTEVECTOR_CONTENTS (bv) + c_idx, size); \
  2561. SP_SET_ ## slot (dst, result); \
  2562. NEXT (1); \
  2563. } while (0)
  2564. VM_DEFINE_OP (116, bv_u8_ref, "bv-u8-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2565. BV_REF (u8, scm_t_uint8, 1, U64);
  2566. VM_DEFINE_OP (117, bv_s8_ref, "bv-s8-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2567. BV_REF (s8, scm_t_int8, 1, S64);
  2568. VM_DEFINE_OP (118, bv_u16_ref, "bv-u16-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2569. BV_REF (u16, scm_t_uint16, 2, U64);
  2570. VM_DEFINE_OP (119, bv_s16_ref, "bv-s16-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2571. BV_REF (s16, scm_t_int16, 2, S64);
  2572. VM_DEFINE_OP (120, bv_u32_ref, "bv-u32-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2573. BV_REF (u32, scm_t_uint32, 4, U64);
  2574. VM_DEFINE_OP (121, bv_s32_ref, "bv-s32-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2575. BV_REF (s32, scm_t_int32, 4, S64);
  2576. VM_DEFINE_OP (122, bv_u64_ref, "bv-u64-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2577. BV_REF (u64, scm_t_uint64, 8, U64);
  2578. VM_DEFINE_OP (123, bv_s64_ref, "bv-s64-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2579. BV_REF (s64, scm_t_int64, 8, S64);
  2580. VM_DEFINE_OP (124, bv_f32_ref, "bv-f32-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2581. BV_REF (f32, float, 4, F64);
  2582. VM_DEFINE_OP (125, bv_f64_ref, "bv-f64-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2583. BV_REF (f64, double, 8, F64);
  2584. /* bv-u8-set! dst:8 idx:8 src:8
  2585. * bv-s8-set! dst:8 idx:8 src:8
  2586. * bv-u16-set! dst:8 idx:8 src:8
  2587. * bv-s16-set! dst:8 idx:8 src:8
  2588. * bv-u32-set! dst:8 idx:8 src:8
  2589. * bv-s32-set! dst:8 idx:8 src:8
  2590. * bv-u64-set! dst:8 idx:8 src:8
  2591. * bv-s64-set! dst:8 idx:8 src:8
  2592. * bv-f32-set! dst:8 idx:8 src:8
  2593. * bv-f64-set! dst:8 idx:8 src:8
  2594. *
  2595. * Store SRC into the bytevector DST at byte offset IDX. Multibyte
  2596. * values are written using native endianness.
  2597. */
  2598. #define BV_BOUNDED_SET(stem, type, min, max, size, slot_type, slot) \
  2599. do { \
  2600. scm_t_ ## slot_type slot_val; \
  2601. type val; \
  2602. scm_t_uint8 dst, idx, src; \
  2603. SCM bv; \
  2604. scm_t_uint64 c_idx; \
  2605. UNPACK_8_8_8 (op, dst, idx, src); \
  2606. bv = SP_REF (dst); \
  2607. c_idx = SP_REF_U64 (idx); \
  2608. slot_val = SP_REF_ ## slot (src); \
  2609. \
  2610. VM_VALIDATE_MUTABLE_BYTEVECTOR (bv, "bv-" #stem "-set!"); \
  2611. \
  2612. VM_ASSERT (SCM_BYTEVECTOR_LENGTH (bv) >= size \
  2613. && SCM_BYTEVECTOR_LENGTH (bv) - size >= c_idx, \
  2614. vm_error_out_of_range_uint64 ("bv-" #stem "-set!", c_idx)); \
  2615. \
  2616. VM_ASSERT (slot_val >= min && slot_val <= max, \
  2617. vm_error_out_of_range_ ## slot_type ("bv-" #stem "-set!", \
  2618. slot_val)); \
  2619. \
  2620. val = slot_val; \
  2621. memcpy (SCM_BYTEVECTOR_CONTENTS (bv) + c_idx, &val, size); \
  2622. NEXT (1); \
  2623. } while (0)
  2624. #define BV_SET(stem, type, size, slot) \
  2625. do { \
  2626. type val; \
  2627. scm_t_uint8 dst, idx, src; \
  2628. SCM bv; \
  2629. scm_t_uint64 c_idx; \
  2630. UNPACK_8_8_8 (op, dst, idx, src); \
  2631. bv = SP_REF (dst); \
  2632. c_idx = SP_REF_U64 (idx); \
  2633. val = SP_REF_ ## slot (src); \
  2634. \
  2635. VM_VALIDATE_MUTABLE_BYTEVECTOR (bv, "bv-" #stem "-set!"); \
  2636. \
  2637. VM_ASSERT (SCM_BYTEVECTOR_LENGTH (bv) >= size \
  2638. && SCM_BYTEVECTOR_LENGTH (bv) - size >= c_idx, \
  2639. vm_error_out_of_range_uint64 ("bv-" #stem "-set!", c_idx)); \
  2640. \
  2641. memcpy (SCM_BYTEVECTOR_CONTENTS (bv) + c_idx, &val, size); \
  2642. NEXT (1); \
  2643. } while (0)
  2644. VM_DEFINE_OP (126, bv_u8_set, "bv-u8-set!", OP1 (X8_S8_S8_S8))
  2645. BV_BOUNDED_SET (u8, scm_t_uint8,
  2646. 0, SCM_T_UINT8_MAX, 1, uint64, U64);
  2647. VM_DEFINE_OP (127, bv_s8_set, "bv-s8-set!", OP1 (X8_S8_S8_S8))
  2648. BV_BOUNDED_SET (s8, scm_t_int8,
  2649. SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1, int64, S64);
  2650. VM_DEFINE_OP (128, bv_u16_set, "bv-u16-set!", OP1 (X8_S8_S8_S8))
  2651. BV_BOUNDED_SET (u16, scm_t_uint16,
  2652. 0, SCM_T_UINT16_MAX, 2, uint64, U64);
  2653. VM_DEFINE_OP (129, bv_s16_set, "bv-s16-set!", OP1 (X8_S8_S8_S8))
  2654. BV_BOUNDED_SET (s16, scm_t_int16,
  2655. SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2, int64, S64);
  2656. VM_DEFINE_OP (130, bv_u32_set, "bv-u32-set!", OP1 (X8_S8_S8_S8))
  2657. BV_BOUNDED_SET (u32, scm_t_uint32,
  2658. 0, SCM_T_UINT32_MAX, 4, uint64, U64);
  2659. VM_DEFINE_OP (131, bv_s32_set, "bv-s32-set!", OP1 (X8_S8_S8_S8))
  2660. BV_BOUNDED_SET (s32, scm_t_int32,
  2661. SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4, int64, S64);
  2662. VM_DEFINE_OP (132, bv_u64_set, "bv-u64-set!", OP1 (X8_S8_S8_S8))
  2663. BV_SET (u64, scm_t_uint64, 8, U64);
  2664. VM_DEFINE_OP (133, bv_s64_set, "bv-s64-set!", OP1 (X8_S8_S8_S8))
  2665. BV_SET (s64, scm_t_int64, 8, S64);
  2666. VM_DEFINE_OP (134, bv_f32_set, "bv-f32-set!", OP1 (X8_S8_S8_S8))
  2667. BV_SET (f32, float, 4, F64);
  2668. VM_DEFINE_OP (135, bv_f64_set, "bv-f64-set!", OP1 (X8_S8_S8_S8))
  2669. BV_SET (f6, double, 8, F64);
  2670. /* scm->f64 dst:12 src:12
  2671. *
  2672. * Unpack a raw double-precision floating-point value from SRC and
  2673. * place it in DST. Note that SRC can be any value on which
  2674. * scm_to_double can operate.
  2675. */
  2676. VM_DEFINE_OP (136, scm_to_f64, "scm->f64", OP1 (X8_S12_S12) | OP_DST)
  2677. {
  2678. scm_t_uint16 dst, src;
  2679. UNPACK_12_12 (op, dst, src);
  2680. SYNC_IP ();
  2681. SP_SET_F64 (dst, scm_to_double (SP_REF (src)));
  2682. NEXT (1);
  2683. }
  2684. /* f64->scm dst:12 src:12
  2685. *
  2686. * Pack a raw double-precision floating point value into an inexact
  2687. * number allocated on the heap.
  2688. */
  2689. VM_DEFINE_OP (137, f64_to_scm, "f64->scm", OP1 (X8_S12_S12) | OP_DST)
  2690. {
  2691. scm_t_uint16 dst, src;
  2692. UNPACK_12_12 (op, dst, src);
  2693. SYNC_IP ();
  2694. SP_SET (dst, scm_from_double (SP_REF_F64 (src)));
  2695. NEXT (1);
  2696. }
  2697. /* fadd dst:8 a:8 b:8
  2698. *
  2699. * Add A to B, and place the result in DST. The operands and the
  2700. * result are unboxed double-precision floating-point numbers.
  2701. */
  2702. VM_DEFINE_OP (138, fadd, "fadd", OP1 (X8_S8_S8_S8) | OP_DST)
  2703. {
  2704. scm_t_uint8 dst, a, b;
  2705. UNPACK_8_8_8 (op, dst, a, b);
  2706. SP_SET_F64 (dst, SP_REF_F64 (a) + SP_REF_F64 (b));
  2707. NEXT (1);
  2708. }
  2709. /* fsub dst:8 a:8 b:8
  2710. *
  2711. * Subtract B from A, and place the result in DST. The operands and
  2712. * the result are unboxed double-precision floating-point numbers.
  2713. */
  2714. VM_DEFINE_OP (139, fsub, "fsub", OP1 (X8_S8_S8_S8) | OP_DST)
  2715. {
  2716. scm_t_uint8 dst, a, b;
  2717. UNPACK_8_8_8 (op, dst, a, b);
  2718. SP_SET_F64 (dst, SP_REF_F64 (a) - SP_REF_F64 (b));
  2719. NEXT (1);
  2720. }
  2721. /* fmul dst:8 a:8 b:8
  2722. *
  2723. * Multiply A and B, and place the result in DST. The operands and
  2724. * the result are unboxed double-precision floating-point numbers.
  2725. */
  2726. VM_DEFINE_OP (140, fmul, "fmul", OP1 (X8_S8_S8_S8) | OP_DST)
  2727. {
  2728. scm_t_uint8 dst, a, b;
  2729. UNPACK_8_8_8 (op, dst, a, b);
  2730. SP_SET_F64 (dst, SP_REF_F64 (a) * SP_REF_F64 (b));
  2731. NEXT (1);
  2732. }
  2733. /* fdiv dst:8 a:8 b:8
  2734. *
  2735. * Divide A by B, and place the result in DST. The operands and the
  2736. * result are unboxed double-precision floating-point numbers.
  2737. */
  2738. VM_DEFINE_OP (141, fdiv, "fdiv", OP1 (X8_S8_S8_S8) | OP_DST)
  2739. {
  2740. scm_t_uint8 dst, a, b;
  2741. UNPACK_8_8_8 (op, dst, a, b);
  2742. SP_SET_F64 (dst, SP_REF_F64 (a) / SP_REF_F64 (b));
  2743. NEXT (1);
  2744. }
  2745. /* apply-non-program _:24
  2746. *
  2747. * Used by the VM as a trampoline to apply non-programs.
  2748. */
  2749. VM_DEFINE_OP (142, apply_non_program, "apply-non-program", OP1 (X32))
  2750. {
  2751. SCM proc = FP_REF (0);
  2752. while (!SCM_PROGRAM_P (proc))
  2753. {
  2754. if (SCM_STRUCTP (proc) && SCM_STRUCT_APPLICABLE_P (proc))
  2755. {
  2756. proc = SCM_STRUCT_PROCEDURE (proc);
  2757. FP_SET (0, proc);
  2758. continue;
  2759. }
  2760. if (SCM_HAS_TYP7 (proc, scm_tc7_smob) && SCM_SMOB_APPLICABLE_P (proc))
  2761. {
  2762. scm_t_uint32 n = FRAME_LOCALS_COUNT();
  2763. /* Shuffle args up. (FIXME: no real need to shuffle; just set
  2764. IP and go. ) */
  2765. ALLOC_FRAME (n + 1);
  2766. while (n--)
  2767. FP_SET (n + 1, FP_REF (n));
  2768. proc = SCM_SMOB_DESCRIPTOR (proc).apply_trampoline;
  2769. FP_SET (0, proc);
  2770. continue;
  2771. }
  2772. SYNC_IP();
  2773. vm_error_wrong_type_apply (proc);
  2774. }
  2775. ip = SCM_PROGRAM_CODE (proc);
  2776. NEXT (0);
  2777. }
  2778. /* scm->u64 dst:12 src:12
  2779. *
  2780. * Unpack an unsigned 64-bit integer from SRC and place it in DST.
  2781. */
  2782. VM_DEFINE_OP (143, scm_to_u64, "scm->u64", OP1 (X8_S12_S12) | OP_DST)
  2783. {
  2784. scm_t_uint16 dst, src;
  2785. UNPACK_12_12 (op, dst, src);
  2786. SYNC_IP ();
  2787. SP_SET_U64 (dst, scm_to_uint64 (SP_REF (src)));
  2788. NEXT (1);
  2789. }
  2790. /* u64->scm dst:12 src:12
  2791. *
  2792. * Pack an unsigned 64-bit integer into a SCM value.
  2793. */
  2794. VM_DEFINE_OP (144, u64_to_scm, "u64->scm", OP1 (X8_S12_S12) | OP_DST)
  2795. {
  2796. scm_t_uint16 dst, src;
  2797. UNPACK_12_12 (op, dst, src);
  2798. SYNC_IP ();
  2799. SP_SET (dst, scm_from_uint64 (SP_REF_U64 (src)));
  2800. NEXT (1);
  2801. }
  2802. /* bv-length dst:12 src:12
  2803. *
  2804. * Store the length of the bytevector in SRC in DST, as an untagged
  2805. * 64-bit integer.
  2806. */
  2807. VM_DEFINE_OP (145, bv_length, "bv-length", OP1 (X8_S12_S12) | OP_DST)
  2808. {
  2809. ARGS1 (bv);
  2810. VM_VALIDATE_BYTEVECTOR (bv, "bytevector-length");
  2811. SP_SET_U64 (dst, SCM_BYTEVECTOR_LENGTH (bv));
  2812. NEXT (1);
  2813. }
  2814. /* br-if-= a:12 b:12 invert:1 _:7 offset:24
  2815. *
  2816. * If the value in A is = to the value in B, add OFFSET, a signed
  2817. * 24-bit number, to the current instruction pointer.
  2818. */
  2819. VM_DEFINE_OP (146, br_if_u64_ee, "br-if-u64-=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  2820. {
  2821. BR_U64_ARITHMETIC (==);
  2822. }
  2823. /* br-if-< a:12 b:12 invert:1 _:7 offset:24
  2824. *
  2825. * If the value in A is < to the value in B, add OFFSET, a signed
  2826. * 24-bit number, to the current instruction pointer.
  2827. */
  2828. VM_DEFINE_OP (147, br_if_u64_lt, "br-if-u64-<", OP3 (X8_S24, X8_S24, B1_X7_L24))
  2829. {
  2830. BR_U64_ARITHMETIC (<);
  2831. }
  2832. VM_DEFINE_OP (148, br_if_u64_le, "br-if-u64-<=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  2833. {
  2834. BR_U64_ARITHMETIC (<=);
  2835. }
  2836. /* uadd dst:8 a:8 b:8
  2837. *
  2838. * Add A to B, and place the result in DST. The operands and the
  2839. * result are unboxed unsigned 64-bit integers. Overflow will wrap
  2840. * around.
  2841. */
  2842. VM_DEFINE_OP (149, uadd, "uadd", OP1 (X8_S8_S8_S8) | OP_DST)
  2843. {
  2844. scm_t_uint8 dst, a, b;
  2845. UNPACK_8_8_8 (op, dst, a, b);
  2846. SP_SET_U64 (dst, SP_REF_U64 (a) + SP_REF_U64 (b));
  2847. NEXT (1);
  2848. }
  2849. /* usub dst:8 a:8 b:8
  2850. *
  2851. * Subtract B from A, and place the result in DST. The operands and
  2852. * the result are unboxed unsigned 64-bit integers. Overflow will
  2853. * wrap around.
  2854. */
  2855. VM_DEFINE_OP (150, usub, "usub", OP1 (X8_S8_S8_S8) | OP_DST)
  2856. {
  2857. scm_t_uint8 dst, a, b;
  2858. UNPACK_8_8_8 (op, dst, a, b);
  2859. SP_SET_U64 (dst, SP_REF_U64 (a) - SP_REF_U64 (b));
  2860. NEXT (1);
  2861. }
  2862. /* umul dst:8 a:8 b:8
  2863. *
  2864. * Multiply A and B, and place the result in DST. The operands and
  2865. * the result are unboxed unsigned 64-bit integers. Overflow will
  2866. * wrap around.
  2867. */
  2868. VM_DEFINE_OP (151, umul, "umul", OP1 (X8_S8_S8_S8) | OP_DST)
  2869. {
  2870. scm_t_uint8 dst, a, b;
  2871. UNPACK_8_8_8 (op, dst, a, b);
  2872. SP_SET_U64 (dst, SP_REF_U64 (a) * SP_REF_U64 (b));
  2873. NEXT (1);
  2874. }
  2875. /* uadd/immediate dst:8 src:8 imm:8
  2876. *
  2877. * Add the unsigned 64-bit value from SRC with the unsigned 8-bit
  2878. * value IMM and place the raw unsigned 64-bit result in DST.
  2879. * Overflow will wrap around.
  2880. */
  2881. VM_DEFINE_OP (152, uadd_immediate, "uadd/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2882. {
  2883. scm_t_uint8 dst, src, imm;
  2884. scm_t_uint64 x;
  2885. UNPACK_8_8_8 (op, dst, src, imm);
  2886. x = SP_REF_U64 (src);
  2887. SP_SET_U64 (dst, x + (scm_t_uint64) imm);
  2888. NEXT (1);
  2889. }
  2890. /* usub/immediate dst:8 src:8 imm:8
  2891. *
  2892. * Subtract the unsigned 8-bit value IMM from the unsigned 64-bit
  2893. * value in SRC and place the raw unsigned 64-bit result in DST.
  2894. * Overflow will wrap around.
  2895. */
  2896. VM_DEFINE_OP (153, usub_immediate, "usub/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2897. {
  2898. scm_t_uint8 dst, src, imm;
  2899. scm_t_uint64 x;
  2900. UNPACK_8_8_8 (op, dst, src, imm);
  2901. x = SP_REF_U64 (src);
  2902. SP_SET_U64 (dst, x - (scm_t_uint64) imm);
  2903. NEXT (1);
  2904. }
  2905. /* umul/immediate dst:8 src:8 imm:8
  2906. *
  2907. * Multiply the unsigned 64-bit value from SRC by the unsigned 8-bit
  2908. * value IMM and place the raw unsigned 64-bit result in DST.
  2909. * Overflow will wrap around.
  2910. */
  2911. VM_DEFINE_OP (154, umul_immediate, "umul/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2912. {
  2913. scm_t_uint8 dst, src, imm;
  2914. scm_t_uint64 x;
  2915. UNPACK_8_8_8 (op, dst, src, imm);
  2916. x = SP_REF_U64 (src);
  2917. SP_SET_U64 (dst, x * (scm_t_uint64) imm);
  2918. NEXT (1);
  2919. }
  2920. /* load-f64 dst:24 high-bits:32 low-bits:32
  2921. *
  2922. * Make a double-precision floating-point value with HIGH-BITS and
  2923. * LOW-BITS.
  2924. */
  2925. VM_DEFINE_OP (155, load_f64, "load-f64", OP3 (X8_S24, AF32, BF32) | OP_DST)
  2926. {
  2927. scm_t_uint32 dst;
  2928. scm_t_uint64 val;
  2929. UNPACK_24 (op, dst);
  2930. val = ip[1];
  2931. val <<= 32;
  2932. val |= ip[2];
  2933. SP_SET_U64 (dst, val);
  2934. NEXT (3);
  2935. }
  2936. /* load-u64 dst:24 high-bits:32 low-bits:32
  2937. *
  2938. * Make an unsigned 64-bit integer with HIGH-BITS and LOW-BITS.
  2939. */
  2940. VM_DEFINE_OP (156, load_u64, "load-u64", OP3 (X8_S24, AU32, BU32) | OP_DST)
  2941. {
  2942. scm_t_uint32 dst;
  2943. scm_t_uint64 val;
  2944. UNPACK_24 (op, dst);
  2945. val = ip[1];
  2946. val <<= 32;
  2947. val |= ip[2];
  2948. SP_SET_U64 (dst, val);
  2949. NEXT (3);
  2950. }
  2951. /* scm->s64 dst:12 src:12
  2952. *
  2953. * Unpack a signed 64-bit integer from SRC and place it in DST.
  2954. */
  2955. VM_DEFINE_OP (157, scm_to_s64, "scm->s64", OP1 (X8_S12_S12) | OP_DST)
  2956. {
  2957. scm_t_uint16 dst, src;
  2958. UNPACK_12_12 (op, dst, src);
  2959. SYNC_IP ();
  2960. SP_SET_S64 (dst, scm_to_int64 (SP_REF (src)));
  2961. NEXT (1);
  2962. }
  2963. /* s64->scm dst:12 src:12
  2964. *
  2965. * Pack an signed 64-bit integer into a SCM value.
  2966. */
  2967. VM_DEFINE_OP (158, s64_to_scm, "s64->scm", OP1 (X8_S12_S12) | OP_DST)
  2968. {
  2969. scm_t_uint16 dst, src;
  2970. UNPACK_12_12 (op, dst, src);
  2971. SYNC_IP ();
  2972. SP_SET (dst, scm_from_int64 (SP_REF_S64 (src)));
  2973. NEXT (1);
  2974. }
  2975. /* load-s64 dst:24 high-bits:32 low-bits:32
  2976. *
  2977. * Make an unsigned 64-bit integer with HIGH-BITS and LOW-BITS.
  2978. */
  2979. VM_DEFINE_OP (159, load_s64, "load-s64", OP3 (X8_S24, AS32, BS32) | OP_DST)
  2980. {
  2981. scm_t_uint32 dst;
  2982. scm_t_uint64 val;
  2983. UNPACK_24 (op, dst);
  2984. val = ip[1];
  2985. val <<= 32;
  2986. val |= ip[2];
  2987. SP_SET_U64 (dst, val);
  2988. NEXT (3);
  2989. }
  2990. /* current-thread dst:24
  2991. *
  2992. * Write the current thread into DST.
  2993. */
  2994. VM_DEFINE_OP (160, current_thread, "current-thread", OP1 (X8_S24) | OP_DST)
  2995. {
  2996. scm_t_uint32 dst;
  2997. UNPACK_24 (op, dst);
  2998. SP_SET (dst, thread->handle);
  2999. NEXT (1);
  3000. }
  3001. /* logsub dst:8 a:8 b:8
  3002. *
  3003. * Place the bitwise AND of A and the bitwise NOT of B into DST.
  3004. */
  3005. VM_DEFINE_OP (161, logsub, "logsub", OP1 (X8_S8_S8_S8) | OP_DST)
  3006. {
  3007. ARGS2 (x, y);
  3008. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  3009. {
  3010. scm_t_signed_bits a, b;
  3011. a = SCM_I_INUM (x);
  3012. b = SCM_I_INUM (y);
  3013. RETURN (SCM_I_MAKINUM (a & ~b));
  3014. }
  3015. RETURN_EXP (scm_logand (x, scm_lognot (y)));
  3016. }
  3017. /* ulogand dst:8 a:8 b:8
  3018. *
  3019. * Place the bitwise AND of the u64 values in A and B into DST.
  3020. */
  3021. VM_DEFINE_OP (162, ulogand, "ulogand", OP1 (X8_S8_S8_S8) | OP_DST)
  3022. {
  3023. scm_t_uint8 dst, a, b;
  3024. UNPACK_8_8_8 (op, dst, a, b);
  3025. SP_SET_U64 (dst, SP_REF_U64 (a) & SP_REF_U64 (b));
  3026. NEXT (1);
  3027. }
  3028. /* ulogior dst:8 a:8 b:8
  3029. *
  3030. * Place the bitwise inclusive OR of the u64 values in A and B into
  3031. * DST.
  3032. */
  3033. VM_DEFINE_OP (163, ulogior, "ulogior", OP1 (X8_S8_S8_S8) | OP_DST)
  3034. {
  3035. scm_t_uint8 dst, a, b;
  3036. UNPACK_8_8_8 (op, dst, a, b);
  3037. SP_SET_U64 (dst, SP_REF_U64 (a) | SP_REF_U64 (b));
  3038. NEXT (1);
  3039. }
  3040. /* ulogsub dst:8 a:8 b:8
  3041. *
  3042. * Place the (A & ~B) of the u64 values A and B into DST.
  3043. */
  3044. VM_DEFINE_OP (164, ulogsub, "ulogsub", OP1 (X8_S8_S8_S8) | OP_DST)
  3045. {
  3046. scm_t_uint8 dst, a, b;
  3047. UNPACK_8_8_8 (op, dst, a, b);
  3048. SP_SET_U64 (dst, SP_REF_U64 (a) & ~SP_REF_U64 (b));
  3049. NEXT (1);
  3050. }
  3051. /* ursh dst:8 a:8 b:8
  3052. *
  3053. * Shift the u64 value in A right by B bits, and place the result in
  3054. * DST. Only the lower 6 bits of B are used.
  3055. */
  3056. VM_DEFINE_OP (165, ursh, "ursh", OP1 (X8_S8_S8_S8) | OP_DST)
  3057. {
  3058. scm_t_uint8 dst, a, b;
  3059. UNPACK_8_8_8 (op, dst, a, b);
  3060. SP_SET_U64 (dst, SP_REF_U64 (a) >> (SP_REF_U64 (b) & 63));
  3061. NEXT (1);
  3062. }
  3063. /* ulsh dst:8 a:8 b:8
  3064. *
  3065. * Shift the u64 value in A left by B bits, and place the result in
  3066. * DST. Only the lower 6 bits of B are used.
  3067. */
  3068. VM_DEFINE_OP (166, ulsh, "ulsh", OP1 (X8_S8_S8_S8) | OP_DST)
  3069. {
  3070. scm_t_uint8 dst, a, b;
  3071. UNPACK_8_8_8 (op, dst, a, b);
  3072. SP_SET_U64 (dst, SP_REF_U64 (a) << (SP_REF_U64 (b) & 63));
  3073. NEXT (1);
  3074. }
  3075. /* scm->u64/truncate dst:12 src:12
  3076. *
  3077. * Unpack an exact integer from SRC and place it in the unsigned
  3078. * 64-bit register DST, truncating any high bits. If the number in
  3079. * SRC is negative, all the high bits will be set.
  3080. */
  3081. VM_DEFINE_OP (167, scm_to_u64_truncate, "scm->u64/truncate", OP1 (X8_S12_S12) | OP_DST)
  3082. {
  3083. scm_t_uint16 dst, src;
  3084. SCM x;
  3085. UNPACK_12_12 (op, dst, src);
  3086. x = SP_REF (src);
  3087. if (SCM_I_INUMP (x))
  3088. SP_SET_U64 (dst, (scm_t_uint64) SCM_I_INUM (x));
  3089. else
  3090. {
  3091. SYNC_IP ();
  3092. SP_SET_U64 (dst,
  3093. scm_to_uint64
  3094. (scm_logand (x, scm_from_uint64 ((scm_t_uint64) -1))));
  3095. }
  3096. NEXT (1);
  3097. }
  3098. /* ursh/immediate dst:8 a:8 b:8
  3099. *
  3100. * Shift the u64 value in A right by the immediate B bits, and place
  3101. * the result in DST. Only the lower 6 bits of B are used.
  3102. */
  3103. VM_DEFINE_OP (168, ursh_immediate, "ursh/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  3104. {
  3105. scm_t_uint8 dst, a, b;
  3106. UNPACK_8_8_8 (op, dst, a, b);
  3107. SP_SET_U64 (dst, SP_REF_U64 (a) >> (b & 63));
  3108. NEXT (1);
  3109. }
  3110. /* ulsh/immediate dst:8 a:8 b:8
  3111. *
  3112. * Shift the u64 value in A left by the immediate B bits, and place
  3113. * the result in DST. Only the lower 6 bits of B are used.
  3114. */
  3115. VM_DEFINE_OP (169, ulsh_immediate, "ulsh/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  3116. {
  3117. scm_t_uint8 dst, a, b;
  3118. UNPACK_8_8_8 (op, dst, a, b);
  3119. SP_SET_U64 (dst, SP_REF_U64 (a) << (b & 63));
  3120. NEXT (1);
  3121. }
  3122. #define BR_U64_SCM_COMPARISON(x, y, unboxed, boxed) \
  3123. do { \
  3124. scm_t_uint32 a, b; \
  3125. scm_t_uint64 x; \
  3126. SCM y_scm; \
  3127. \
  3128. UNPACK_24 (op, a); \
  3129. UNPACK_24 (ip[1], b); \
  3130. x = SP_REF_U64 (a); \
  3131. y_scm = SP_REF (b); \
  3132. \
  3133. if (SCM_I_INUMP (y_scm)) \
  3134. { \
  3135. scm_t_signed_bits y = SCM_I_INUM (y_scm); \
  3136. \
  3137. if ((ip[2] & 0x1) ? !(unboxed) : (unboxed)) \
  3138. { \
  3139. scm_t_int32 offset = ip[2]; \
  3140. offset >>= 8; /* Sign-extending shift. */ \
  3141. NEXT (offset); \
  3142. } \
  3143. NEXT (3); \
  3144. } \
  3145. else \
  3146. { \
  3147. SCM res; \
  3148. SYNC_IP (); \
  3149. res = boxed (scm_from_uint64 (x), y_scm); \
  3150. CACHE_SP (); \
  3151. if ((ip[2] & 0x1) ? scm_is_false (res) : scm_is_true (res)) \
  3152. { \
  3153. scm_t_int32 offset = ip[2]; \
  3154. offset >>= 8; /* Sign-extending shift. */ \
  3155. NEXT (offset); \
  3156. } \
  3157. NEXT (3); \
  3158. } \
  3159. } while (0)
  3160. /* br-if-u64-=-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3161. *
  3162. * If the U64 value in A is = to the SCM value in B, add OFFSET, a
  3163. * signed 24-bit number, to the current instruction pointer.
  3164. */
  3165. VM_DEFINE_OP (170, br_if_u64_ee_scm, "br-if-u64-=-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3166. {
  3167. BR_U64_SCM_COMPARISON(x, y, y >= 0 && (scm_t_uint64) y == x, scm_num_eq_p);
  3168. }
  3169. /* br-if-u64-<-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3170. *
  3171. * If the U64 value in A is < than the SCM value in B, add OFFSET, a
  3172. * signed 24-bit number, to the current instruction pointer.
  3173. */
  3174. VM_DEFINE_OP (171, br_if_u64_lt_scm, "br-if-u64-<-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3175. {
  3176. BR_U64_SCM_COMPARISON(x, y, y > 0 && (scm_t_uint64) y > x, scm_less_p);
  3177. }
  3178. /* br-if-u64-=-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3179. *
  3180. * If the U64 value in A is <= than the SCM value in B, add OFFSET, a
  3181. * signed 24-bit number, to the current instruction pointer.
  3182. */
  3183. VM_DEFINE_OP (172, br_if_u64_le_scm, "br-if-u64-<=-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3184. {
  3185. BR_U64_SCM_COMPARISON(x, y, y >= 0 && (scm_t_uint64) y >= x, scm_leq_p);
  3186. }
  3187. /* br-if-u64->-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3188. *
  3189. * If the U64 value in A is > than the SCM value in B, add OFFSET, a
  3190. * signed 24-bit number, to the current instruction pointer.
  3191. */
  3192. VM_DEFINE_OP (173, br_if_u64_gt_scm, "br-if-u64->-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3193. {
  3194. BR_U64_SCM_COMPARISON(x, y, y < 0 || (scm_t_uint64) y < x, scm_gr_p);
  3195. }
  3196. /* br-if-u64->=-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3197. *
  3198. * If the U64 value in A is >= than the SCM value in B, add OFFSET, a
  3199. * signed 24-bit number, to the current instruction pointer.
  3200. */
  3201. VM_DEFINE_OP (174, br_if_u64_ge_scm, "br-if-u64->=-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3202. {
  3203. BR_U64_SCM_COMPARISON(x, y, y <= 0 || (scm_t_uint64) y <= x, scm_geq_p);
  3204. }
  3205. /* integer->char a:12 b:12
  3206. *
  3207. * Convert the U64 value in B to a Scheme character, and return it in
  3208. * A.
  3209. */
  3210. VM_DEFINE_OP (175, integer_to_char, "integer->char", OP1 (X8_S12_S12) | OP_DST)
  3211. {
  3212. scm_t_uint16 dst, src;
  3213. scm_t_uint64 x;
  3214. UNPACK_12_12 (op, dst, src);
  3215. x = SP_REF_U64 (src);
  3216. VM_ASSERT (x <= (scm_t_uint64) SCM_CODEPOINT_MAX,
  3217. vm_error_out_of_range_uint64 ("integer->char", x));
  3218. SP_SET (dst, SCM_MAKE_ITAG8 ((scm_t_bits) (scm_t_wchar) x, scm_tc8_char));
  3219. NEXT (1);
  3220. }
  3221. /* char->integer a:12 b:12
  3222. *
  3223. * Untag the character in B to U64, and return it in A.
  3224. */
  3225. VM_DEFINE_OP (176, char_to_integer, "char->integer", OP1 (X8_S12_S12) | OP_DST)
  3226. {
  3227. scm_t_uint16 dst, src;
  3228. SCM x;
  3229. UNPACK_12_12 (op, dst, src);
  3230. x = SP_REF (src);
  3231. VM_VALIDATE_CHAR (x, "char->integer");
  3232. SP_SET_U64 (dst, SCM_CHAR (x));
  3233. NEXT (1);
  3234. }
  3235. /* ulogxor dst:8 a:8 b:8
  3236. *
  3237. * Place the bitwise exclusive OR of the u64 values in A and B into
  3238. * DST.
  3239. */
  3240. VM_DEFINE_OP (177, ulogxor, "ulogxor", OP1 (X8_S8_S8_S8) | OP_DST)
  3241. {
  3242. scm_t_uint8 dst, a, b;
  3243. UNPACK_8_8_8 (op, dst, a, b);
  3244. SP_SET_U64 (dst, SP_REF_U64 (a) ^ SP_REF_U64 (b));
  3245. NEXT (1);
  3246. }
  3247. /* make-atomic-box dst:12 src:12
  3248. *
  3249. * Create a new atomic box initialized to SRC, and place it in DST.
  3250. */
  3251. VM_DEFINE_OP (178, make_atomic_box, "make-atomic-box", OP1 (X8_S12_S12) | OP_DST)
  3252. {
  3253. SCM box;
  3254. scm_t_uint16 dst, src;
  3255. UNPACK_12_12 (op, dst, src);
  3256. SYNC_IP ();
  3257. box = scm_inline_cell (thread, scm_tc7_atomic_box,
  3258. SCM_UNPACK (SCM_UNSPECIFIED));
  3259. scm_atomic_set_scm (scm_atomic_box_loc (box), SP_REF (src));
  3260. SP_SET (dst, box);
  3261. NEXT (1);
  3262. }
  3263. /* atomic-box-ref dst:12 src:12
  3264. *
  3265. * Fetch the value of the atomic box at SRC into DST.
  3266. */
  3267. VM_DEFINE_OP (179, atomic_box_ref, "atomic-box-ref", OP1 (X8_S12_S12) | OP_DST)
  3268. {
  3269. scm_t_uint16 dst, src;
  3270. SCM box;
  3271. UNPACK_12_12 (op, dst, src);
  3272. box = SP_REF (src);
  3273. VM_VALIDATE_ATOMIC_BOX (box, "atomic-box-ref");
  3274. SP_SET (dst, scm_atomic_ref_scm (scm_atomic_box_loc (box)));
  3275. NEXT (1);
  3276. }
  3277. /* atomic-box-set! dst:12 src:12
  3278. *
  3279. * Set the contents of the atomic box at DST to SRC.
  3280. */
  3281. VM_DEFINE_OP (180, atomic_box_set, "atomic-box-set!", OP1 (X8_S12_S12))
  3282. {
  3283. scm_t_uint16 dst, src;
  3284. SCM box;
  3285. UNPACK_12_12 (op, dst, src);
  3286. box = SP_REF (dst);
  3287. VM_VALIDATE_ATOMIC_BOX (box, "atomic-box-set!");
  3288. scm_atomic_set_scm (scm_atomic_box_loc (box), SP_REF (src));
  3289. NEXT (1);
  3290. }
  3291. /* atomic-box-swap! dst:12 box:12 _:8 val:24
  3292. *
  3293. * Replace the contents of the atomic box at BOX to VAL and store the
  3294. * previous value at DST.
  3295. */
  3296. VM_DEFINE_OP (181, atomic_box_swap, "atomic-box-swap!", OP2 (X8_S12_S12, X8_S24) | OP_DST)
  3297. {
  3298. scm_t_uint16 dst, box;
  3299. scm_t_uint32 val;
  3300. SCM scm_box;
  3301. UNPACK_12_12 (op, dst, box);
  3302. UNPACK_24 (ip[1], val);
  3303. scm_box = SP_REF (box);
  3304. VM_VALIDATE_ATOMIC_BOX (scm_box, "atomic-box-swap!");
  3305. SP_SET (dst,
  3306. scm_atomic_swap_scm (scm_atomic_box_loc (scm_box), SP_REF (val)));
  3307. NEXT (2);
  3308. }
  3309. /* atomic-box-compare-and-swap! dst:12 box:12 _:8 expected:24 _:8 desired:24
  3310. *
  3311. * Set the contents of the atomic box at DST to SET.
  3312. */
  3313. VM_DEFINE_OP (182, atomic_box_compare_and_swap, "atomic-box-compare-and-swap!", OP3 (X8_S12_S12, X8_S24, X8_S24) | OP_DST)
  3314. {
  3315. scm_t_uint16 dst, box;
  3316. scm_t_uint32 expected, desired;
  3317. SCM scm_box, scm_expected, scm_result;
  3318. UNPACK_12_12 (op, dst, box);
  3319. UNPACK_24 (ip[1], expected);
  3320. UNPACK_24 (ip[2], desired);
  3321. scm_box = SP_REF (box);
  3322. VM_VALIDATE_ATOMIC_BOX (scm_box, "atomic-box-compare-and-swap!");
  3323. scm_result = scm_expected = SP_REF (expected);
  3324. while (!scm_atomic_compare_and_swap_scm (scm_atomic_box_loc (scm_box),
  3325. &scm_result, SP_REF (desired))
  3326. && scm_is_eq (scm_result, scm_expected))
  3327. {
  3328. /* 'scm_atomic_compare_and_swap_scm' has spuriously failed,
  3329. i.e. it has returned 0 to indicate failure, although the
  3330. observed value is 'eq?' to EXPECTED. In this case, we *must*
  3331. try again, because the API of 'atomic-box-compare-and-swap!'
  3332. provides no way to indicate to the caller that the exchange
  3333. failed when the observed value is 'eq?' to EXPECTED. */
  3334. }
  3335. SP_SET (dst, scm_result);
  3336. NEXT (3);
  3337. }
  3338. /* handle-interrupts _:24
  3339. *
  3340. * Handle pending interrupts.
  3341. */
  3342. VM_DEFINE_OP (183, handle_interrupts, "handle-interrupts", OP1 (X32))
  3343. {
  3344. if (SCM_LIKELY (scm_is_null
  3345. (scm_atomic_ref_scm (&thread->pending_asyncs))))
  3346. NEXT (1);
  3347. if (thread->block_asyncs > 0)
  3348. NEXT (1);
  3349. {
  3350. union scm_vm_stack_element *old_fp, *new_fp;
  3351. size_t old_frame_size = FRAME_LOCALS_COUNT ();
  3352. SCM proc = scm_i_async_pop (thread);
  3353. /* No PUSH_CONTINUATION_HOOK, as we can't usefully
  3354. POP_CONTINUATION_HOOK because there are no return values. */
  3355. /* Three slots: two for RA and dynamic link, one for proc. */
  3356. ALLOC_FRAME (old_frame_size + 3);
  3357. /* Set up a frame that will return right back to this
  3358. handle-interrupts opcode to handle any additional
  3359. interrupts. */
  3360. old_fp = vp->fp;
  3361. new_fp = SCM_FRAME_SLOT (old_fp, old_frame_size + 1);
  3362. SCM_FRAME_SET_DYNAMIC_LINK (new_fp, old_fp);
  3363. SCM_FRAME_SET_RETURN_ADDRESS (new_fp, ip);
  3364. vp->fp = new_fp;
  3365. SP_SET (0, proc);
  3366. ip = (scm_t_uint32 *) vm_handle_interrupt_code;
  3367. APPLY_HOOK ();
  3368. NEXT (0);
  3369. }
  3370. }
  3371. /* return-from-interrupt _:24
  3372. *
  3373. * Return from handling an interrupt, discarding any return values and
  3374. * stripping away the interrupt frame.
  3375. */
  3376. VM_DEFINE_OP (184, return_from_interrupt, "return-from-interrupt", OP1 (X32))
  3377. {
  3378. vp->sp = sp = SCM_FRAME_PREVIOUS_SP (vp->fp);
  3379. ip = SCM_FRAME_RETURN_ADDRESS (vp->fp);
  3380. vp->fp = SCM_FRAME_DYNAMIC_LINK (vp->fp);
  3381. NEXT (0);
  3382. }
  3383. /* push-dynamic-state state:24
  3384. *
  3385. * Save the current fluid bindings on the dynamic stack, and use STATE
  3386. * instead.
  3387. */
  3388. VM_DEFINE_OP (185, push_dynamic_state, "push-dynamic-state", OP1 (X8_S24))
  3389. {
  3390. scm_t_uint32 state;
  3391. UNPACK_24 (op, state);
  3392. SYNC_IP ();
  3393. scm_dynstack_push_dynamic_state (&thread->dynstack, SP_REF (state),
  3394. thread->dynamic_state);
  3395. NEXT (1);
  3396. }
  3397. /* pop-dynamic-state _:24
  3398. *
  3399. * Restore the saved fluid bindings from the dynamic stack.
  3400. */
  3401. VM_DEFINE_OP (186, pop_dynamic_state, "pop-dynamic-state", OP1 (X32))
  3402. {
  3403. SYNC_IP ();
  3404. scm_dynstack_unwind_dynamic_state (&thread->dynstack,
  3405. thread->dynamic_state);
  3406. NEXT (1);
  3407. }
  3408. /* br-if-f64-= a:12 b:12 invert:1 _:7 offset:24
  3409. *
  3410. * If the F64 value in A is = to the F64 value in B, add OFFSET, a
  3411. * signed 24-bit number, to the current instruction pointer.
  3412. */
  3413. VM_DEFINE_OP (187, br_if_f64_ee, "br-if-f64-=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3414. {
  3415. BR_F64_ARITHMETIC (==);
  3416. }
  3417. /* br-if-f64-< a:12 b:12 invert:1 _:7 offset:24
  3418. *
  3419. * If the F64 value in A is < to the F64 value in B, add OFFSET, a
  3420. * signed 24-bit number, to the current instruction pointer.
  3421. */
  3422. VM_DEFINE_OP (188, br_if_f64_lt, "br-if-f64-<", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3423. {
  3424. BR_F64_ARITHMETIC (<);
  3425. }
  3426. /* br-if-f64-<= a:24 _:8 b:24 invert:1 _:7 offset:24
  3427. *
  3428. * If the F64 value in A is <= than the F64 value in B, add OFFSET, a
  3429. * signed 24-bit number, to the current instruction pointer.
  3430. */
  3431. VM_DEFINE_OP (189, br_if_f64_le, "br-if-f64-<=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3432. {
  3433. BR_F64_ARITHMETIC (<=);
  3434. }
  3435. /* br-if-f64-> a:24 _:8 b:24 invert:1 _:7 offset:24
  3436. *
  3437. * If the F64 value in A is > than the F64 value in B, add OFFSET, a
  3438. * signed 24-bit number, to the current instruction pointer.
  3439. */
  3440. VM_DEFINE_OP (190, br_if_f64_gt, "br-if-f64->", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3441. {
  3442. BR_F64_ARITHMETIC (>);
  3443. }
  3444. /* br-if-uf4->= a:24 _:8 b:24 invert:1 _:7 offset:24
  3445. *
  3446. * If the F64 value in A is >= than the F64 value in B, add OFFSET, a
  3447. * signed 24-bit number, to the current instruction pointer.
  3448. */
  3449. VM_DEFINE_OP (191, br_if_f64_ge, "br-if-f64->=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3450. {
  3451. BR_F64_ARITHMETIC (>=);
  3452. }
  3453. /* string-set! dst:8 idx:8 src:8
  3454. *
  3455. * Store the character SRC into the string DST at index IDX.
  3456. */
  3457. VM_DEFINE_OP (192, string_set, "string-set!", OP1 (X8_S8_S8_S8))
  3458. {
  3459. scm_t_uint8 dst, idx, src;
  3460. SCM str, chr;
  3461. scm_t_uint64 c_idx;
  3462. UNPACK_8_8_8 (op, dst, idx, src);
  3463. str = SP_REF (dst);
  3464. c_idx = SP_REF_U64 (idx);
  3465. chr = SP_REF (src);
  3466. VM_VALIDATE_STRING (str, "string-ref");
  3467. VM_VALIDATE_INDEX (c_idx, scm_i_string_length (str), "string-ref");
  3468. /* If needed we can speed this up and only SYNC_IP +
  3469. scm_i_string_writing if the string isn't already a non-shared
  3470. stringbuf. */
  3471. SYNC_IP ();
  3472. scm_i_string_start_writing (str);
  3473. scm_i_string_set_x (str, c_idx, SCM_CHAR (chr));
  3474. scm_i_string_stop_writing ();
  3475. NEXT (1);
  3476. }
  3477. VM_DEFINE_OP (193, unused_193, NULL, NOP)
  3478. VM_DEFINE_OP (194, unused_194, NULL, NOP)
  3479. VM_DEFINE_OP (195, unused_195, NULL, NOP)
  3480. VM_DEFINE_OP (196, unused_196, NULL, NOP)
  3481. VM_DEFINE_OP (197, unused_197, NULL, NOP)
  3482. VM_DEFINE_OP (198, unused_198, NULL, NOP)
  3483. VM_DEFINE_OP (199, unused_199, NULL, NOP)
  3484. VM_DEFINE_OP (200, unused_200, NULL, NOP)
  3485. VM_DEFINE_OP (201, unused_201, NULL, NOP)
  3486. VM_DEFINE_OP (202, unused_202, NULL, NOP)
  3487. VM_DEFINE_OP (203, unused_203, NULL, NOP)
  3488. VM_DEFINE_OP (204, unused_204, NULL, NOP)
  3489. VM_DEFINE_OP (205, unused_205, NULL, NOP)
  3490. VM_DEFINE_OP (206, unused_206, NULL, NOP)
  3491. VM_DEFINE_OP (207, unused_207, NULL, NOP)
  3492. VM_DEFINE_OP (208, unused_208, NULL, NOP)
  3493. VM_DEFINE_OP (209, unused_209, NULL, NOP)
  3494. VM_DEFINE_OP (210, unused_210, NULL, NOP)
  3495. VM_DEFINE_OP (211, unused_211, NULL, NOP)
  3496. VM_DEFINE_OP (212, unused_212, NULL, NOP)
  3497. VM_DEFINE_OP (213, unused_213, NULL, NOP)
  3498. VM_DEFINE_OP (214, unused_214, NULL, NOP)
  3499. VM_DEFINE_OP (215, unused_215, NULL, NOP)
  3500. VM_DEFINE_OP (216, unused_216, NULL, NOP)
  3501. VM_DEFINE_OP (217, unused_217, NULL, NOP)
  3502. VM_DEFINE_OP (218, unused_218, NULL, NOP)
  3503. VM_DEFINE_OP (219, unused_219, NULL, NOP)
  3504. VM_DEFINE_OP (220, unused_220, NULL, NOP)
  3505. VM_DEFINE_OP (221, unused_221, NULL, NOP)
  3506. VM_DEFINE_OP (222, unused_222, NULL, NOP)
  3507. VM_DEFINE_OP (223, unused_223, NULL, NOP)
  3508. VM_DEFINE_OP (224, unused_224, NULL, NOP)
  3509. VM_DEFINE_OP (225, unused_225, NULL, NOP)
  3510. VM_DEFINE_OP (226, unused_226, NULL, NOP)
  3511. VM_DEFINE_OP (227, unused_227, NULL, NOP)
  3512. VM_DEFINE_OP (228, unused_228, NULL, NOP)
  3513. VM_DEFINE_OP (229, unused_229, NULL, NOP)
  3514. VM_DEFINE_OP (230, unused_230, NULL, NOP)
  3515. VM_DEFINE_OP (231, unused_231, NULL, NOP)
  3516. VM_DEFINE_OP (232, unused_232, NULL, NOP)
  3517. VM_DEFINE_OP (233, unused_233, NULL, NOP)
  3518. VM_DEFINE_OP (234, unused_234, NULL, NOP)
  3519. VM_DEFINE_OP (235, unused_235, NULL, NOP)
  3520. VM_DEFINE_OP (236, unused_236, NULL, NOP)
  3521. VM_DEFINE_OP (237, unused_237, NULL, NOP)
  3522. VM_DEFINE_OP (238, unused_238, NULL, NOP)
  3523. VM_DEFINE_OP (239, unused_239, NULL, NOP)
  3524. VM_DEFINE_OP (240, unused_240, NULL, NOP)
  3525. VM_DEFINE_OP (241, unused_241, NULL, NOP)
  3526. VM_DEFINE_OP (242, unused_242, NULL, NOP)
  3527. VM_DEFINE_OP (243, unused_243, NULL, NOP)
  3528. VM_DEFINE_OP (244, unused_244, NULL, NOP)
  3529. VM_DEFINE_OP (245, unused_245, NULL, NOP)
  3530. VM_DEFINE_OP (246, unused_246, NULL, NOP)
  3531. VM_DEFINE_OP (247, unused_247, NULL, NOP)
  3532. VM_DEFINE_OP (248, unused_248, NULL, NOP)
  3533. VM_DEFINE_OP (249, unused_249, NULL, NOP)
  3534. VM_DEFINE_OP (250, unused_250, NULL, NOP)
  3535. VM_DEFINE_OP (251, unused_251, NULL, NOP)
  3536. VM_DEFINE_OP (252, unused_252, NULL, NOP)
  3537. VM_DEFINE_OP (253, unused_253, NULL, NOP)
  3538. VM_DEFINE_OP (254, unused_254, NULL, NOP)
  3539. VM_DEFINE_OP (255, unused_255, NULL, NOP)
  3540. {
  3541. vm_error_bad_instruction (op);
  3542. abort (); /* never reached */
  3543. }
  3544. END_DISPATCH_SWITCH;
  3545. }
  3546. #undef ABORT_CONTINUATION_HOOK
  3547. #undef ALIGNED_P
  3548. #undef APPLY_HOOK
  3549. #undef ARGS1
  3550. #undef ARGS2
  3551. #undef BEGIN_DISPATCH_SWITCH
  3552. #undef BINARY_INTEGER_OP
  3553. #undef BR_ARITHMETIC
  3554. #undef BR_BINARY
  3555. #undef BR_NARGS
  3556. #undef BR_UNARY
  3557. #undef BV_FIXABLE_INT_REF
  3558. #undef BV_FIXABLE_INT_SET
  3559. #undef BV_FLOAT_REF
  3560. #undef BV_FLOAT_SET
  3561. #undef BV_INT_REF
  3562. #undef BV_INT_SET
  3563. #undef CACHE_REGISTER
  3564. #undef END_DISPATCH_SWITCH
  3565. #undef FREE_VARIABLE_REF
  3566. #undef INIT
  3567. #undef INUM_MAX
  3568. #undef INUM_MIN
  3569. #undef FP_REF
  3570. #undef FP_SET
  3571. #undef FP_SLOT
  3572. #undef SP_REF
  3573. #undef SP_SET
  3574. #undef NEXT
  3575. #undef NEXT_HOOK
  3576. #undef NEXT_JUMP
  3577. #undef POP_CONTINUATION_HOOK
  3578. #undef PUSH_CONTINUATION_HOOK
  3579. #undef RETURN
  3580. #undef RUN_HOOK
  3581. #undef RUN_HOOK0
  3582. #undef RUN_HOOK1
  3583. #undef SYNC_IP
  3584. #undef UNPACK_8_8_8
  3585. #undef UNPACK_8_16
  3586. #undef UNPACK_16_8
  3587. #undef UNPACK_12_12
  3588. #undef UNPACK_24
  3589. #undef VARIABLE_BOUNDP
  3590. #undef VARIABLE_REF
  3591. #undef VARIABLE_SET
  3592. #undef VM_CHECK_FREE_VARIABLE
  3593. #undef VM_CHECK_OBJECT
  3594. #undef VM_CHECK_UNDERFLOW
  3595. #undef VM_DEFINE_OP
  3596. #undef VM_INSTRUCTION_TO_LABEL
  3597. #undef VM_USE_HOOKS
  3598. #undef VM_VALIDATE_ATOMIC_BOX
  3599. #undef VM_VALIDATE_BYTEVECTOR
  3600. #undef VM_VALIDATE_PAIR
  3601. #undef VM_VALIDATE_STRUCT
  3602. /*
  3603. (defun renumber-ops ()
  3604. "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
  3605. (interactive "")
  3606. (save-excursion
  3607. (let ((counter -1)) (goto-char (point-min))
  3608. (while (re-search-forward "^ *VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
  3609. (replace-match
  3610. (number-to-string (setq counter (1+ counter)))
  3611. t t nil 1)))))
  3612. (renumber-ops)
  3613. */
  3614. /*
  3615. Local Variables:
  3616. c-file-style: "gnu"
  3617. End:
  3618. */