vm-engine.c 100 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627
  1. /* Copyright 2001,2009-2015,2017-2021,2023
  2. Free Software Foundation, Inc.
  3. This file is part of Guile.
  4. Guile is free software: you can redistribute it and/or modify it
  5. under the terms of the GNU Lesser General Public License as published
  6. by the Free Software Foundation, either version 3 of the License, or
  7. (at your option) any later version.
  8. Guile is distributed in the hope that it will be useful, but WITHOUT
  9. ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
  11. License for more details.
  12. You should have received a copy of the GNU Lesser General Public
  13. License along with Guile. If not, see
  14. <https://www.gnu.org/licenses/>. */
  15. /* This file is included in vm.c multiple times. */
  16. #define UNPACK_8_8_8(op,a,b,c) \
  17. do \
  18. { \
  19. a = (op >> 8) & 0xff; \
  20. b = (op >> 16) & 0xff; \
  21. c = op >> 24; \
  22. } \
  23. while (0)
  24. #define UNPACK_8_16(op,a,b) \
  25. do \
  26. { \
  27. a = (op >> 8) & 0xff; \
  28. b = op >> 16; \
  29. } \
  30. while (0)
  31. #define UNPACK_12_12(op,a,b) \
  32. do \
  33. { \
  34. a = (op >> 8) & 0xfff; \
  35. b = op >> 20; \
  36. } \
  37. while (0)
  38. #define UNPACK_24(op,a) \
  39. do \
  40. { \
  41. a = op >> 8; \
  42. } \
  43. while (0)
  44. #define UNPACK_8_24(op,a,b) \
  45. do \
  46. { \
  47. a = op & 0xff; \
  48. b = op >> 8; \
  49. } \
  50. while (0)
  51. #define UNPACK_16_16(op,a,b) \
  52. do \
  53. { \
  54. a = op & 0xffff; \
  55. b = op >> 16; \
  56. } \
  57. while (0)
  58. /* Assign some registers by hand. There used to be a bigger list here,
  59. but it was never tested, and in the case of x86-32, was a source of
  60. compilation failures. It can be revived if it's useful, but my naive
  61. hope is that simply annotating the locals with "register" will be a
  62. sufficient hint to the compiler. */
  63. #if defined(__GNUC__) && ! defined(__clang__)
  64. # if defined __x86_64__
  65. /* GCC 4.6 chooses %rbp for IP_REG and %rbx for SP_REG, which works
  66. well. Tell it to keep the jump table in a r12, which is
  67. callee-saved. */
  68. # define JT_REG asm ("r12")
  69. # endif
  70. #endif
  71. #ifndef IP_REG
  72. # define IP_REG
  73. #endif
  74. #ifndef FP_REG
  75. # define FP_REG
  76. #endif
  77. #ifndef JT_REG
  78. # define JT_REG
  79. #endif
  80. #define VP (&thread->vm)
  81. #define VM_ASSERT(condition, handler) \
  82. do { \
  83. if (SCM_UNLIKELY (!(condition))) \
  84. { \
  85. SYNC_IP(); \
  86. handler; \
  87. } \
  88. } while (0)
  89. #ifdef VM_ENABLE_ASSERTIONS
  90. # define ASSERT(condition) VM_ASSERT (condition, abort())
  91. #else
  92. # define ASSERT(condition)
  93. #endif
  94. #if VM_USE_HOOKS
  95. #define RUN_HOOK(h) \
  96. do { \
  97. if (SCM_UNLIKELY (VP->h##_hook_enabled)) \
  98. { \
  99. SYNC_IP (); \
  100. invoke_##h##_hook (thread); \
  101. CACHE_SP (); \
  102. } \
  103. } while (0)
  104. #else
  105. #define RUN_HOOK(h)
  106. #endif
  107. #define APPLY_HOOK() RUN_HOOK (apply)
  108. #define RETURN_HOOK() RUN_HOOK (return)
  109. #define NEXT_HOOK() RUN_HOOK (next)
  110. #define ABORT_HOOK() RUN_HOOK (abort)
  111. /* Virtual Machine
  112. The VM has three state bits: the instruction pointer (IP), the frame
  113. pointer (FP), and the stack pointer (SP). We cache the IP in a
  114. machine register, local to the VM, because it is used extensively by
  115. the VM. We do the same for SP. The FP is used more by code outside
  116. the VM than by the VM itself, we don't bother caching it locally.
  117. Keeping VP->ip in sync with the local IP would be a big lose, as it
  118. is updated so often. Instead of updating VP->ip all the time, we
  119. call SYNC_IP whenever we would need to know the IP of the top frame.
  120. In practice, we need to SYNC_IP whenever we call out of the VM to a
  121. function that would like to walk the stack, perhaps as the result of
  122. an exception. On the other hand, we do always keep VP->sp in sync
  123. with the local SP.
  124. One more thing. We allow the stack to move, when it expands.
  125. Therefore if you call out to a C procedure that could call Scheme
  126. code, or otherwise push anything on the stack, you will need to
  127. CACHE_SP afterwards to restore the possibly-changed stack pointer. */
  128. #define SYNC_IP() VP->ip = (ip)
  129. #define CACHE_SP() sp = VP->sp
  130. #define CACHE_REGISTER() \
  131. do { \
  132. ip = VP->ip; \
  133. CACHE_SP (); \
  134. } while (0)
  135. #define CALL_INTRINSIC(x, args) \
  136. (((struct scm_vm_intrinsics *) (void*) intrinsics)->x args)
  137. /* Reserve stack space for a frame. Will check that there is sufficient
  138. stack space for N locals, including the procedure. Invoke after
  139. preparing the new frame and setting the fp and ip.
  140. If there is not enough space for this frame, we try to expand the
  141. stack, possibly relocating it somewhere else in the address space.
  142. Because of the possible relocation, no pointer into the stack besides
  143. FP is valid across an ALLOC_FRAME call. Be careful! */
  144. #define ALLOC_FRAME(n) \
  145. do { \
  146. sp = VP->fp - (n); \
  147. if (SCM_UNLIKELY (sp < VP->stack_limit)) \
  148. { \
  149. SYNC_IP (); \
  150. CALL_INTRINSIC (expand_stack, (thread, sp)); \
  151. CACHE_SP (); \
  152. } \
  153. else \
  154. VP->sp = sp; \
  155. } while (0)
  156. /* Reset the current frame to hold N locals. Used when we know that no
  157. stack expansion is needed. Note that in some cases this may lower
  158. SP, e.g. after a return but where there are more locals below, but we
  159. know it was preceded by an alloc-frame in that case, so no stack need
  160. be allocated. */
  161. #define RESET_FRAME(n) \
  162. do { \
  163. VP->sp = sp = VP->fp - (n); \
  164. } while (0)
  165. /* Compute the number of locals in the frame. At a call, this is equal
  166. to the number of actual arguments when a function is first called,
  167. plus one for the function. */
  168. #define FRAME_LOCALS_COUNT() (VP->fp - sp)
  169. #define FRAME_LOCALS_COUNT_FROM(slot) (FRAME_LOCALS_COUNT () - slot)
  170. #ifdef HAVE_LABELS_AS_VALUES
  171. # define BEGIN_DISPATCH_SWITCH /* */
  172. # define END_DISPATCH_SWITCH /* */
  173. # define NEXT(n) \
  174. do \
  175. { \
  176. ip += n; \
  177. NEXT_HOOK (); \
  178. op = *ip; \
  179. goto *jump_table[op & 0xff]; \
  180. } \
  181. while (0)
  182. # define VM_DEFINE_OP(opcode, tag, name, meta) \
  183. op_##tag:
  184. #else
  185. # define BEGIN_DISPATCH_SWITCH \
  186. vm_start: \
  187. NEXT_HOOK (); \
  188. op = *ip; \
  189. switch (op & 0xff) \
  190. {
  191. # define END_DISPATCH_SWITCH \
  192. }
  193. # define NEXT(n) \
  194. do \
  195. { \
  196. ip += n; \
  197. goto vm_start; \
  198. } \
  199. while (0)
  200. # define VM_DEFINE_OP(opcode, tag, name, meta) \
  201. op_##tag: \
  202. case opcode:
  203. #endif
  204. #define FP_SLOT(i) SCM_FRAME_SLOT (VP->fp, i)
  205. #define FP_REF(i) SCM_FRAME_LOCAL (VP->fp, i)
  206. #define FP_SET(i,o) SCM_FRAME_LOCAL (VP->fp, i) = o
  207. #define SP_REF_SLOT(i) (sp[i])
  208. #define SP_SET_SLOT(i,o) (sp[i] = o)
  209. #define SP_REF(i) (sp[i].as_scm)
  210. #define SP_SET(i,o) (sp[i].as_scm = o)
  211. #define SP_REF_F64(i) (sp[i].as_f64)
  212. #define SP_SET_F64(i,o) (sp[i].as_f64 = o)
  213. #define SP_REF_U64(i) (sp[i].as_u64)
  214. #define SP_SET_U64(i,o) (sp[i].as_u64 = o)
  215. #define SP_REF_S64(i) (sp[i].as_s64)
  216. #define SP_SET_S64(i,o) (sp[i].as_s64 = o)
  217. #define SP_REF_PTR(i) (sp[i].as_ptr)
  218. #define SP_SET_PTR(i,o) (sp[i].as_ptr = o)
  219. /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
  220. #define ALIGNED_P(ptr, type) \
  221. ((uintptr_t) (ptr) % alignof_type (type) == 0)
  222. static SCM
  223. VM_NAME (scm_thread *thread)
  224. {
  225. /* Instruction pointer: A pointer to the opcode that is currently
  226. running. */
  227. register uint32_t *ip IP_REG;
  228. /* Stack pointer: A pointer to the hot end of the stack, off of which
  229. we index arguments and local variables. Pushed at function calls,
  230. popped on returns. */
  231. register union scm_vm_stack_element *sp FP_REG;
  232. /* Current opcode: A cache of *ip. */
  233. register uint32_t op;
  234. void **intrinsics = (void**) &scm_vm_intrinsics;
  235. #ifdef HAVE_LABELS_AS_VALUES
  236. static const void *jump_table_[256] = {
  237. #define LABEL_ADDR(opcode, tag, name, meta) &&op_##tag,
  238. FOR_EACH_VM_OPERATION(LABEL_ADDR)
  239. #undef LABEL_ADDR
  240. };
  241. register const void **jump_table JT_REG;
  242. /* Attempt to keep JUMP_TABLE_POINTER in a register. This saves one
  243. load instruction at each instruction dispatch. */
  244. jump_table = jump_table_;
  245. #endif
  246. /* Load VM registers. */
  247. CACHE_REGISTER ();
  248. /* Start processing! */
  249. NEXT (0);
  250. BEGIN_DISPATCH_SWITCH;
  251. /* halt _:24
  252. *
  253. * Bring the VM to a halt, returning all the values from the stack.
  254. */
  255. VM_DEFINE_OP (0, halt, "halt", OP1 (X32))
  256. {
  257. size_t frame_size = 3;
  258. /* Empty frame, then values. */
  259. size_t first_value = frame_size;
  260. uint32_t nvals = FRAME_LOCALS_COUNT_FROM (first_value);
  261. union scm_vm_stack_element *fp;
  262. SCM ret;
  263. if (nvals == 1)
  264. ret = FP_REF (first_value);
  265. else
  266. {
  267. uint32_t n;
  268. SYNC_IP ();
  269. VM_ASSERT (nvals <= (UINTPTR_MAX >> 8), abort ());
  270. ret = scm_words ((nvals << 8) | scm_tc7_values, nvals + 1);
  271. for (n = 0; n < nvals; n++)
  272. SCM_SET_CELL_OBJECT (ret, n+1, FP_REF (first_value + n));
  273. }
  274. fp = VP->fp;
  275. VP->fp = SCM_FRAME_DYNAMIC_LINK (fp);
  276. VP->ip = SCM_FRAME_VIRTUAL_RETURN_ADDRESS (fp);
  277. VP->sp = SCM_FRAME_PREVIOUS_SP (fp);
  278. return ret;
  279. }
  280. /* instrument-entry _:24 data:32
  281. *
  282. * Increase execution counter for this function and potentially tier
  283. * up to the next JIT level. DATA is an offset to a structure
  284. * recording execution counts and the next-level JIT code
  285. * corresponding to this function. Also run the apply hook.
  286. */
  287. VM_DEFINE_OP (1, instrument_entry, "instrument-entry", OP2 (X32, N32))
  288. {
  289. #if ENABLE_JIT
  290. if (!VP->disable_mcode)
  291. {
  292. struct scm_jit_function_data *data;
  293. int32_t data_offset = ip[1];
  294. data = (struct scm_jit_function_data *) (ip + data_offset);
  295. if (data->mcode)
  296. {
  297. SYNC_IP ();
  298. scm_jit_enter_mcode (thread, data->mcode);
  299. CACHE_REGISTER ();
  300. NEXT (0);
  301. }
  302. if (data->counter >= scm_jit_counter_threshold)
  303. {
  304. const uint8_t *mcode;
  305. SYNC_IP ();
  306. mcode = scm_jit_compute_mcode (thread, data);
  307. if (mcode)
  308. {
  309. scm_jit_enter_mcode (thread, mcode);
  310. CACHE_REGISTER ();
  311. NEXT (0);
  312. }
  313. }
  314. else
  315. data->counter += SCM_JIT_COUNTER_ENTRY_INCREMENT;
  316. }
  317. #endif
  318. APPLY_HOOK ();
  319. NEXT (2);
  320. }
  321. /* instrument-loop _:24 data:32
  322. *
  323. * Increase execution counter for this function and potentially tier
  324. * up to the next JIT level. DATA is an offset to a structure
  325. * recording execution counts and the next-level JIT code
  326. * corresponding to this function.
  327. */
  328. VM_DEFINE_OP (2, instrument_loop, "instrument-loop", OP2 (X32, N32))
  329. {
  330. #if ENABLE_JIT
  331. if (!VP->disable_mcode)
  332. {
  333. int32_t data_offset = ip[1];
  334. struct scm_jit_function_data *data;
  335. data = (struct scm_jit_function_data *) (ip + data_offset);
  336. if (data->counter >= scm_jit_counter_threshold)
  337. {
  338. const uint8_t *mcode;
  339. SYNC_IP ();
  340. mcode = scm_jit_compute_mcode (thread, data);
  341. if (mcode)
  342. {
  343. scm_jit_enter_mcode (thread, mcode);
  344. CACHE_REGISTER ();
  345. NEXT (0);
  346. }
  347. }
  348. else
  349. data->counter += SCM_JIT_COUNTER_LOOP_INCREMENT;
  350. }
  351. #endif
  352. NEXT (2);
  353. }
  354. /* call proc:24 _:8 nlocals:24
  355. *
  356. * Call a procedure. PROC is the local corresponding to a procedure.
  357. * The three values below PROC will be overwritten by the saved call
  358. * frame data. The new frame will have space for NLOCALS locals: one
  359. * for the procedure, and the rest for the arguments which should
  360. * already have been pushed on.
  361. *
  362. * When the call returns, execution proceeds with the next
  363. * instruction. There may be any number of values on the return
  364. * stack; the precise number can be had by subtracting the address of
  365. * slot PROC-1 from the post-call SP.
  366. */
  367. VM_DEFINE_OP (3, call, "call", OP2 (X8_F24, X8_C24))
  368. {
  369. uint32_t proc, nlocals;
  370. union scm_vm_stack_element *old_fp, *new_fp;
  371. UNPACK_24 (op, proc);
  372. UNPACK_24 (ip[1], nlocals);
  373. old_fp = VP->fp;
  374. new_fp = SCM_FRAME_SLOT (old_fp, proc - 1);
  375. SCM_FRAME_SET_DYNAMIC_LINK (new_fp, old_fp);
  376. SCM_FRAME_SET_VIRTUAL_RETURN_ADDRESS (new_fp, ip + 2);
  377. SCM_FRAME_SET_MACHINE_RETURN_ADDRESS (new_fp, 0);
  378. VP->fp = new_fp;
  379. RESET_FRAME (nlocals);
  380. ip = CALL_INTRINSIC (get_callee_vcode, (thread));
  381. CACHE_SP ();
  382. NEXT (0);
  383. }
  384. /* call-label proc:24 _:8 nlocals:24 label:32
  385. *
  386. * Call a procedure in the same compilation unit.
  387. *
  388. * This instruction is just like "call", except that instead of
  389. * dereferencing PROC to find the call target, the call target is
  390. * known to be at LABEL, a signed 32-bit offset in 32-bit units from
  391. * the current IP. Since PROC is not used to compute the callee code,
  392. * it may be some other representation of the closure.
  393. */
  394. VM_DEFINE_OP (4, call_label, "call-label", OP3 (X8_F24, X8_C24, L32))
  395. {
  396. uint32_t proc, nlocals;
  397. int32_t label;
  398. union scm_vm_stack_element *old_fp, *new_fp;
  399. UNPACK_24 (op, proc);
  400. UNPACK_24 (ip[1], nlocals);
  401. label = ip[2];
  402. old_fp = VP->fp;
  403. new_fp = SCM_FRAME_SLOT (old_fp, proc - 1);
  404. SCM_FRAME_SET_DYNAMIC_LINK (new_fp, old_fp);
  405. SCM_FRAME_SET_VIRTUAL_RETURN_ADDRESS (new_fp, ip + 3);
  406. SCM_FRAME_SET_MACHINE_RETURN_ADDRESS (new_fp, 0);
  407. VP->fp = new_fp;
  408. RESET_FRAME (nlocals);
  409. ip += label;
  410. NEXT (0);
  411. }
  412. /* tail-call _:24
  413. *
  414. * Tail-call the procedure in slot 0 with the arguments in the current
  415. * stack frame. Requires that the procedure and all of the arguments
  416. * have already been shuffled into position.
  417. */
  418. VM_DEFINE_OP (5, tail_call, "tail-call", OP1 (X32))
  419. {
  420. ip = CALL_INTRINSIC (get_callee_vcode, (thread));
  421. CACHE_SP ();
  422. NEXT (0);
  423. }
  424. /* tail-call-label _:24 label:32
  425. *
  426. * Tail-call a known procedure. As call is to call-label, tail-call
  427. * is to tail-call-label.
  428. */
  429. VM_DEFINE_OP (6, tail_call_label, "tail-call-label", OP2 (X32, L32))
  430. {
  431. int32_t label;
  432. label = ip[1];
  433. ip += label;
  434. NEXT (0);
  435. }
  436. /* return-values _:24
  437. *
  438. * Return all values from a call frame.
  439. */
  440. VM_DEFINE_OP (7, return_values, "return-values", OP1 (X32))
  441. {
  442. union scm_vm_stack_element *old_fp;
  443. uint8_t *mcode;
  444. RETURN_HOOK ();
  445. old_fp = VP->fp;
  446. VP->fp = SCM_FRAME_DYNAMIC_LINK (old_fp);
  447. #if ENABLE_JIT
  448. if (!VP->disable_mcode)
  449. {
  450. mcode = SCM_FRAME_MACHINE_RETURN_ADDRESS (old_fp);
  451. if (mcode && mcode != scm_jit_return_to_interpreter_trampoline)
  452. {
  453. scm_jit_enter_mcode (thread, mcode);
  454. CACHE_REGISTER ();
  455. NEXT (0);
  456. }
  457. }
  458. #endif
  459. ip = SCM_FRAME_VIRTUAL_RETURN_ADDRESS (old_fp);
  460. NEXT (0);
  461. }
  462. /* receive dst:12 proc:12 _:8 nlocals:24
  463. *
  464. * Receive a single return value from a call whose procedure was in
  465. * PROC, asserting that the call actually returned at least one
  466. * value. Afterwards, resets the frame to NLOCALS locals.
  467. */
  468. VM_DEFINE_OP (8, receive, "receive", DOP2 (X8_F12_F12, X8_C24))
  469. {
  470. uint16_t dst, proc;
  471. uint32_t nlocals;
  472. UNPACK_12_12 (op, dst, proc);
  473. UNPACK_24 (ip[1], nlocals);
  474. VM_ASSERT (FRAME_LOCALS_COUNT () > proc,
  475. CALL_INTRINSIC (error_no_values, ()));
  476. FP_SET (dst, FP_REF (proc));
  477. RESET_FRAME (nlocals);
  478. NEXT (2);
  479. }
  480. /* receive-values proc:24 allow-extra?:1 _:7 nvalues:24
  481. *
  482. * Receive a return of multiple values from a call whose procedure was
  483. * in PROC. If fewer than NVALUES values were returned, signal an
  484. * error. Unless ALLOW-EXTRA? is true, require that the number of
  485. * return values equals NVALUES exactly. After receive-values has
  486. * run, the values can be copied down via `mov'.
  487. */
  488. VM_DEFINE_OP (9, receive_values, "receive-values", OP2 (X8_F24, B1_X7_C24))
  489. {
  490. uint32_t proc, nvalues;
  491. UNPACK_24 (op, proc);
  492. UNPACK_24 (ip[1], nvalues);
  493. if (ip[1] & 0x1)
  494. VM_ASSERT (FRAME_LOCALS_COUNT () >= proc + nvalues,
  495. CALL_INTRINSIC (error_not_enough_values, ()));
  496. else
  497. VM_ASSERT (FRAME_LOCALS_COUNT () == proc + nvalues,
  498. CALL_INTRINSIC (error_wrong_number_of_values, (nvalues)));
  499. NEXT (2);
  500. }
  501. /* assert-nargs-ee expected:24
  502. * assert-nargs-ge expected:24
  503. * assert-nargs-le expected:24
  504. *
  505. * If the number of actual arguments is not ==, >=, or <= EXPECTED,
  506. * respectively, signal an error.
  507. */
  508. VM_DEFINE_OP (10, assert_nargs_ee, "assert-nargs-ee", OP1 (X8_C24))
  509. {
  510. uint32_t expected;
  511. UNPACK_24 (op, expected);
  512. VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
  513. CALL_INTRINSIC (error_wrong_num_args, (thread)));
  514. NEXT (1);
  515. }
  516. VM_DEFINE_OP (11, assert_nargs_ge, "assert-nargs-ge", OP1 (X8_C24))
  517. {
  518. uint32_t expected;
  519. UNPACK_24 (op, expected);
  520. VM_ASSERT (FRAME_LOCALS_COUNT () >= expected,
  521. CALL_INTRINSIC (error_wrong_num_args, (thread)));
  522. NEXT (1);
  523. }
  524. VM_DEFINE_OP (12, assert_nargs_le, "assert-nargs-le", OP1 (X8_C24))
  525. {
  526. uint32_t expected;
  527. UNPACK_24 (op, expected);
  528. VM_ASSERT (FRAME_LOCALS_COUNT () <= expected,
  529. CALL_INTRINSIC (error_wrong_num_args, (thread)));
  530. NEXT (1);
  531. }
  532. /* assert-nargs-ee/locals expected:12 nlocals:12
  533. *
  534. * Equivalent to a sequence of assert-nargs-ee and reserve-locals. The
  535. * number of locals reserved is EXPECTED + NLOCALS.
  536. */
  537. VM_DEFINE_OP (13, assert_nargs_ee_locals, "assert-nargs-ee/locals", OP1 (X8_C12_C12))
  538. {
  539. uint16_t expected, nlocals;
  540. UNPACK_12_12 (op, expected, nlocals);
  541. VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
  542. CALL_INTRINSIC (error_wrong_num_args, (thread)));
  543. ALLOC_FRAME (expected + nlocals);
  544. NEXT (1);
  545. }
  546. /* arguments<=? expected:24
  547. *
  548. * Set the LESS_THAN, EQUAL, or NONE comparison result values if the
  549. * number of arguments is respectively less than, equal to, or greater
  550. * than EXPECTED.
  551. */
  552. VM_DEFINE_OP (14, check_arguments, "arguments<=?", OP1 (X8_C24))
  553. {
  554. uint8_t compare_result;
  555. uint32_t expected;
  556. ptrdiff_t nargs;
  557. UNPACK_24 (op, expected);
  558. nargs = FRAME_LOCALS_COUNT ();
  559. if (nargs < (ptrdiff_t) expected)
  560. compare_result = SCM_F_COMPARE_LESS_THAN;
  561. else if (nargs == (ptrdiff_t) expected)
  562. compare_result = SCM_F_COMPARE_EQUAL;
  563. else
  564. compare_result = SCM_F_COMPARE_NONE;
  565. VP->compare_result = compare_result;
  566. NEXT (1);
  567. }
  568. /* positional-arguments<=? nreq:24 _:8 expected:24
  569. *
  570. * Set the LESS_THAN, EQUAL, or NONE comparison result values if the
  571. * number of positional arguments is less than, equal to, or greater
  572. * than EXPECTED. The first NREQ arguments are positional arguments,
  573. * as are the subsequent arguments that are not keywords.
  574. */
  575. VM_DEFINE_OP (15, check_positional_arguments, "positional-arguments<=?", OP2 (X8_C24, X8_C24))
  576. {
  577. uint8_t compare_result;
  578. uint32_t nreq, expected;
  579. ptrdiff_t nargs, npos;
  580. UNPACK_24 (op, nreq);
  581. UNPACK_24 (ip[1], expected);
  582. nargs = FRAME_LOCALS_COUNT ();
  583. /* Precondition: at least NREQ arguments. */
  584. for (npos = nreq; npos < nargs && npos <= expected; npos++)
  585. if (scm_is_keyword (FP_REF (npos)))
  586. break;
  587. if (npos < (ptrdiff_t) expected)
  588. compare_result = SCM_F_COMPARE_LESS_THAN;
  589. else if (npos == (ptrdiff_t) expected)
  590. compare_result = SCM_F_COMPARE_EQUAL;
  591. else
  592. compare_result = SCM_F_COMPARE_NONE;
  593. VP->compare_result = compare_result;
  594. NEXT (2);
  595. }
  596. /* bind-kwargs nreq:24 flags:8 nreq-and-opt:24 _:8 ntotal:24 kw-offset:32
  597. *
  598. * flags := allow-other-keys:1 has-rest:1 _:6
  599. *
  600. * Find the last positional argument, and shuffle all the rest above
  601. * NTOTAL. Initialize the intervening locals to SCM_UNDEFINED. Then
  602. * load the constant at KW-OFFSET words from the current IP, and use it
  603. * to bind keyword arguments. If HAS-REST, collect all shuffled
  604. * arguments into a list, and store it in NREQ-AND-OPT. Finally, clear
  605. * the arguments that we shuffled up.
  606. *
  607. * A macro-mega-instruction.
  608. */
  609. VM_DEFINE_OP (16, bind_kwargs, "bind-kwargs", OP4 (X8_C24, C8_C24, X8_C24, N32))
  610. {
  611. uint32_t nreq, nreq_and_opt, ntotal, npositional;
  612. int32_t kw_offset;
  613. scm_t_bits kw_bits;
  614. SCM kw;
  615. uint8_t allow_other_keys, has_rest;
  616. UNPACK_24 (op, nreq);
  617. allow_other_keys = ip[1] & 0x1;
  618. has_rest = ip[1] & 0x2;
  619. UNPACK_24 (ip[1], nreq_and_opt);
  620. UNPACK_24 (ip[2], ntotal);
  621. kw_offset = ip[3];
  622. kw_bits = (scm_t_bits) (ip + kw_offset);
  623. VM_ASSERT (!(kw_bits & 0x7), abort());
  624. kw = SCM_PACK (kw_bits);
  625. /* Note that if nopt == 0 then npositional = nreq. */
  626. npositional = CALL_INTRINSIC (compute_kwargs_npositional,
  627. (thread, nreq, nreq_and_opt - nreq));
  628. SYNC_IP ();
  629. CALL_INTRINSIC (bind_kwargs,
  630. (thread, npositional, ntotal, kw, !has_rest,
  631. allow_other_keys));
  632. CACHE_SP ();
  633. if (has_rest)
  634. FP_SET (nreq_and_opt, CALL_INTRINSIC (cons_rest, (thread, ntotal)));
  635. RESET_FRAME (ntotal);
  636. NEXT (4);
  637. }
  638. /* bind-rest dst:24
  639. *
  640. * Collect any arguments at or above DST into a list, and store that
  641. * list at DST.
  642. */
  643. VM_DEFINE_OP (17, bind_rest, "bind-rest", DOP1 (X8_F24))
  644. {
  645. uint32_t dst, nargs;
  646. UNPACK_24 (op, dst);
  647. nargs = FRAME_LOCALS_COUNT ();
  648. if (nargs <= dst)
  649. {
  650. VM_ASSERT (nargs == dst, abort ());
  651. ALLOC_FRAME (dst + 1);
  652. SP_SET (0, SCM_EOL);
  653. }
  654. else
  655. {
  656. SYNC_IP ();
  657. SCM rest = CALL_INTRINSIC (cons_rest, (thread, dst));
  658. RESET_FRAME (dst + 1);
  659. SP_SET (0, rest);
  660. }
  661. NEXT (1);
  662. }
  663. /* alloc-frame nlocals:24
  664. *
  665. * Ensure that there is space on the stack for NLOCALS local variables.
  666. */
  667. VM_DEFINE_OP (18, alloc_frame, "alloc-frame", OP1 (X8_C24))
  668. {
  669. uint32_t nlocals;
  670. UNPACK_24 (op, nlocals);
  671. ALLOC_FRAME (nlocals);
  672. NEXT (1);
  673. }
  674. /* reset-frame nlocals:24
  675. *
  676. * Like alloc-frame, but doesn't check that the stack is big enough.
  677. * Used to reset the frame size to something less than the size that
  678. * was previously set via alloc-frame.
  679. */
  680. VM_DEFINE_OP (19, reset_frame, "reset-frame", OP1 (X8_C24))
  681. {
  682. uint32_t nlocals;
  683. UNPACK_24 (op, nlocals);
  684. RESET_FRAME (nlocals);
  685. NEXT (1);
  686. }
  687. /* mov dst:12 src:12
  688. *
  689. * Copy a value from one local slot to another.
  690. */
  691. VM_DEFINE_OP (20, mov, "mov", DOP1 (X8_S12_S12))
  692. {
  693. uint16_t dst;
  694. uint16_t src;
  695. UNPACK_12_12 (op, dst, src);
  696. /* FIXME: The compiler currently emits "mov" for SCM, F64, U64,
  697. and S64 variables. However SCM values are the usual case, and
  698. on a 32-bit machine it might be cheaper to move a SCM than to
  699. move a 64-bit number. */
  700. SP_SET_SLOT (dst, SP_REF_SLOT (src));
  701. NEXT (1);
  702. }
  703. /* long-mov dst:24 _:8 src:24
  704. *
  705. * Copy a value from one local slot to another.
  706. */
  707. VM_DEFINE_OP (21, long_mov, "long-mov", DOP2 (X8_S24, X8_S24))
  708. {
  709. uint32_t dst;
  710. uint32_t src;
  711. UNPACK_24 (op, dst);
  712. UNPACK_24 (ip[1], src);
  713. /* FIXME: The compiler currently emits "long-mov" for SCM, F64,
  714. U64, and S64 variables. However SCM values are the usual case,
  715. and on a 32-bit machine it might be cheaper to move a SCM than
  716. to move a 64-bit number. */
  717. SP_SET_SLOT (dst, SP_REF_SLOT (src));
  718. NEXT (2);
  719. }
  720. /* long-fmov dst:24 _:8 src:24
  721. *
  722. * Copy a value from one local slot to another. Slot indexes are
  723. * relative to the FP.
  724. */
  725. VM_DEFINE_OP (22, long_fmov, "long-fmov", DOP2 (X8_F24, X8_F24))
  726. {
  727. uint32_t dst;
  728. uint32_t src;
  729. UNPACK_24 (op, dst);
  730. UNPACK_24 (ip[1], src);
  731. FP_SET (dst, FP_REF (src));
  732. NEXT (2);
  733. }
  734. /* push src:24
  735. *
  736. * Push SRC onto the stack.
  737. */
  738. VM_DEFINE_OP (23, push, "push", OP1 (X8_S24))
  739. {
  740. uint32_t src;
  741. union scm_vm_stack_element val;
  742. /* FIXME: The compiler currently emits "push" for SCM, F64, U64,
  743. and S64 variables. However SCM values are the usual case, and
  744. on a 32-bit machine it might be cheaper to move a SCM than to
  745. move a 64-bit number. */
  746. UNPACK_24 (op, src);
  747. val = SP_REF_SLOT (src);
  748. ALLOC_FRAME (FRAME_LOCALS_COUNT () + 1);
  749. SP_SET_SLOT (0, val);
  750. NEXT (1);
  751. }
  752. /* pop dst:24
  753. *
  754. * Pop the stack, storing to DST.
  755. */
  756. VM_DEFINE_OP (24, pop, "pop", DOP1 (X8_S24))
  757. {
  758. uint32_t dst;
  759. union scm_vm_stack_element val;
  760. /* FIXME: The compiler currently emits "pop" for SCM, F64, U64,
  761. and S64 variables. However SCM values are the usual case, and
  762. on a 32-bit machine it might be cheaper to move a SCM than to
  763. move a 64-bit number. */
  764. UNPACK_24 (op, dst);
  765. val = SP_REF_SLOT (0);
  766. VP->sp = sp = sp + 1;
  767. SP_SET_SLOT (dst, val);
  768. NEXT (1);
  769. }
  770. /* drop count:24
  771. *
  772. * Drop some number of values from the stack.
  773. */
  774. VM_DEFINE_OP (25, drop, "drop", OP1 (X8_C24))
  775. {
  776. uint32_t count;
  777. UNPACK_24 (op, count);
  778. VP->sp = sp = sp + count;
  779. NEXT (1);
  780. }
  781. /* shuffle-down from:12 to:12
  782. *
  783. * Shuffle down values from FROM to TO, reducing the frame size by
  784. * (FROM-TO) slots. Part of the internal implementation of
  785. * call-with-values, values, and apply.
  786. */
  787. VM_DEFINE_OP (26, shuffle_down, "shuffle-down", OP1 (X8_F12_F12))
  788. {
  789. uint32_t n, from, to, nlocals;
  790. UNPACK_12_12 (op, from, to);
  791. VM_ASSERT (from > to, abort ());
  792. nlocals = FRAME_LOCALS_COUNT ();
  793. for (n = 0; from + n < nlocals; n++)
  794. FP_SET (to + n, FP_REF (from + n));
  795. RESET_FRAME (to + n);
  796. NEXT (1);
  797. }
  798. /* expand-apply-argument _:24
  799. *
  800. * Take the last local in a frame and expand it out onto the stack, as
  801. * for the last argument to "apply".
  802. */
  803. VM_DEFINE_OP (27, expand_apply_argument, "expand-apply-argument", OP1 (X32))
  804. {
  805. SYNC_IP ();
  806. CALL_INTRINSIC (expand_apply_argument, (thread));
  807. CACHE_SP ();
  808. NEXT (1);
  809. }
  810. /* subr-call idx:24
  811. *
  812. * Call a subr, passing all locals in this frame as arguments, and
  813. * storing the results on the stack, ready to be returned. This
  814. * instruction is part of the trampolines created in gsubr.c, and is
  815. * not generated by the compiler.
  816. */
  817. VM_DEFINE_OP (28, subr_call, "subr-call", OP1 (X8_C24))
  818. {
  819. SCM ret;
  820. uint32_t idx;
  821. UNPACK_24 (op, idx);
  822. SYNC_IP ();
  823. ret = scm_apply_subr (sp, idx, FRAME_LOCALS_COUNT ());
  824. if (SCM_UNLIKELY (scm_is_values (ret)))
  825. {
  826. CALL_INTRINSIC (unpack_values_object, (thread, ret));
  827. CACHE_SP ();
  828. NEXT (1);
  829. }
  830. else
  831. {
  832. RESET_FRAME (1);
  833. SP_SET (0, ret);
  834. NEXT (1);
  835. }
  836. }
  837. /* foreign-call cif-idx:12 ptr-idx:12
  838. *
  839. * Call a foreign function. Fetch the CIF and foreign pointer from
  840. * the CIF-IDX and PTR-IDX closure slots of the callee. Arguments are
  841. * taken from the stack, and results placed on the stack, ready to be
  842. * returned. This instruction is part of the trampolines created by
  843. * the FFI, and is not generated by the compiler.
  844. */
  845. VM_DEFINE_OP (29, foreign_call, "foreign-call", OP1 (X8_C12_C12))
  846. {
  847. uint16_t cif_idx, ptr_idx;
  848. SCM closure, cif, pointer;
  849. UNPACK_12_12 (op, cif_idx, ptr_idx);
  850. closure = FP_REF (0);
  851. cif = SCM_PROGRAM_FREE_VARIABLE_REF (closure, cif_idx);
  852. pointer = SCM_PROGRAM_FREE_VARIABLE_REF (closure, ptr_idx);
  853. SYNC_IP ();
  854. CALL_INTRINSIC (foreign_call, (thread, cif, pointer));
  855. CACHE_SP ();
  856. NEXT (1);
  857. }
  858. /* continuation-call contregs:24
  859. *
  860. * Return to a continuation, nonlocally. The arguments to the
  861. * continuation are taken from the stack. CONTREGS is a free variable
  862. * containing the reified continuation. This instruction is part of
  863. * the implementation of undelimited continuations, and is not
  864. * generated by the compiler.
  865. */
  866. VM_DEFINE_OP (30, continuation_call, "continuation-call", OP1 (X8_C24))
  867. {
  868. SCM contregs;
  869. uint32_t contregs_idx;
  870. UNPACK_24 (op, contregs_idx);
  871. contregs =
  872. SCM_PROGRAM_FREE_VARIABLE_REF (FP_REF (0), contregs_idx);
  873. SYNC_IP ();
  874. CALL_INTRINSIC (reinstate_continuation_x, (thread, contregs));
  875. /* no NEXT */
  876. abort ();
  877. }
  878. /* compose-continuation cont:24
  879. *
  880. * Compose a partial continuation with the current continuation. The
  881. * arguments to the continuation are taken from the stack. CONT is a
  882. * free variable containing the reified continuation. This
  883. * instruction is part of the implementation of partial continuations,
  884. * and is not generated by the compiler.
  885. */
  886. VM_DEFINE_OP (31, compose_continuation, "compose-continuation", OP1 (X8_C24))
  887. {
  888. SCM vmcont;
  889. uint32_t cont_idx;
  890. uint8_t *mcode;
  891. UNPACK_24 (op, cont_idx);
  892. vmcont = SCM_PROGRAM_FREE_VARIABLE_REF (FP_REF (0), cont_idx);
  893. SYNC_IP ();
  894. mcode = CALL_INTRINSIC (compose_continuation, (thread, vmcont));
  895. #if ENABLE_JIT
  896. if (mcode && !VP->disable_mcode)
  897. {
  898. scm_jit_enter_mcode (thread, mcode);
  899. CACHE_REGISTER ();
  900. NEXT (0);
  901. }
  902. else
  903. #endif
  904. {
  905. CACHE_REGISTER ();
  906. NEXT (0);
  907. }
  908. }
  909. /* capture-continuation dst:24
  910. *
  911. * Capture the current continuation. This instruction is part of the
  912. * implementation of `call/cc', and is not generated by the compiler.
  913. */
  914. VM_DEFINE_OP (32, capture_continuation, "capture-continuation", DOP1 (X8_S24))
  915. {
  916. uint32_t dst;
  917. UNPACK_24 (op, dst);
  918. SYNC_IP ();
  919. SP_SET (dst, CALL_INTRINSIC (capture_continuation, (thread)));
  920. NEXT (1);
  921. }
  922. /* abort _:24
  923. *
  924. * Abort to a prompt handler. The tag is expected in r1, and the rest
  925. * of the values in the frame are returned to the prompt handler.
  926. * This corresponds to a tail application of abort-to-prompt.
  927. */
  928. VM_DEFINE_OP (33, abort, "abort", OP1 (X32))
  929. {
  930. uint8_t *mcode = NULL;
  931. /* FIXME: Really we should capture the caller's registers. Until
  932. then, manually advance the IP so that when the prompt resumes,
  933. it continues with the next instruction. */
  934. ip++;
  935. SYNC_IP ();
  936. mcode = CALL_INTRINSIC (abort_to_prompt, (thread, mcode));
  937. /* If abort_to_prompt returned, that means there were no
  938. intervening C frames to jump over, so we just continue
  939. directly. */
  940. CACHE_REGISTER ();
  941. ABORT_HOOK ();
  942. #if ENABLE_JIT
  943. if (mcode && !VP->disable_mcode)
  944. {
  945. scm_jit_enter_mcode (thread, mcode);
  946. CACHE_REGISTER ();
  947. }
  948. #endif
  949. NEXT (0);
  950. }
  951. /* prompt tag:24 escape-only?:1 _:7 proc-slot:24 _:8 handler-offset:24
  952. *
  953. * Push a new prompt on the dynamic stack, with a tag from TAG and a
  954. * handler at HANDLER-OFFSET words from the current IP. The handler
  955. * will expect a multiple-value return as if from a call with the
  956. * procedure at PROC-SLOT.
  957. */
  958. VM_DEFINE_OP (34, prompt, "prompt", OP3 (X8_S24, B1_X7_F24, X8_L24))
  959. {
  960. uint32_t tag, proc_slot;
  961. int32_t offset;
  962. uint8_t escape_only_p;
  963. uint8_t *mra = NULL;
  964. UNPACK_24 (op, tag);
  965. escape_only_p = ip[1] & 0x1;
  966. UNPACK_24 (ip[1], proc_slot);
  967. offset = ip[2];
  968. offset >>= 8; /* Sign extension */
  969. /* Push the prompt onto the dynamic stack. */
  970. SYNC_IP ();
  971. CALL_INTRINSIC (push_prompt, (thread, escape_only_p, SP_REF (tag),
  972. VP->fp - proc_slot, ip + offset, mra));
  973. NEXT (3);
  974. }
  975. /* builtin-ref dst:12 idx:12
  976. *
  977. * Load a builtin stub by index into DST.
  978. */
  979. VM_DEFINE_OP (35, builtin_ref, "builtin-ref", DOP1 (X8_S12_C12))
  980. {
  981. uint16_t dst, idx;
  982. UNPACK_12_12 (op, dst, idx);
  983. SP_SET (dst, scm_vm_builtin_ref (idx));
  984. NEXT (1);
  985. }
  986. /* throw key:12 args:12
  987. *
  988. * Throw to KEY and ARGS. ARGS should be a list.
  989. */
  990. VM_DEFINE_OP (36, throw, "throw", OP1 (X8_S12_S12))
  991. {
  992. uint16_t a, b;
  993. SCM key, args;
  994. UNPACK_12_12 (op, a, b);
  995. key = SP_REF (a);
  996. args = SP_REF (b);
  997. SYNC_IP ();
  998. CALL_INTRINSIC (throw_, (key, args));
  999. abort (); /* never reached */
  1000. }
  1001. /* throw/value val:24 key-subr-and-message:32
  1002. *
  1003. * Raise an error, indicating VAL as the bad value.
  1004. * KEY-SUBR-AND-MESSAGE should be a vector, where the first element is
  1005. * the symbol to which to throw, the second is the procedure in which
  1006. * to signal the error (a string) or #f, and the third is a format
  1007. * string for the message, with one template.
  1008. */
  1009. VM_DEFINE_OP (37, throw_value, "throw/value", OP2 (X8_S24, N32))
  1010. {
  1011. uint32_t a;
  1012. int32_t offset;
  1013. scm_t_bits key_subr_and_message_bits;
  1014. SCM val, key_subr_and_message;
  1015. UNPACK_24 (op, a);
  1016. val = SP_REF (a);
  1017. offset = ip[1];
  1018. key_subr_and_message_bits = (scm_t_bits) (ip + offset);
  1019. VM_ASSERT (!(key_subr_and_message_bits & 0x7), abort());
  1020. key_subr_and_message = SCM_PACK (key_subr_and_message_bits);
  1021. SYNC_IP ();
  1022. CALL_INTRINSIC (throw_with_value, (val, key_subr_and_message));
  1023. abort (); /* never reached */
  1024. }
  1025. /* throw/value+data val:24 key-subr-and-message:32
  1026. *
  1027. * Raise an error, indicating VAL as the bad value.
  1028. * KEY-SUBR-AND-MESSAGE should be a vector, where the first element is
  1029. * the symbol to which to throw, the second is the procedure in which
  1030. * to signal the error (a string) or #f, and the third is a format
  1031. * string for the message, with one template.
  1032. */
  1033. VM_DEFINE_OP (38, throw_value_and_data, "throw/value+data", OP2 (X8_S24, N32))
  1034. {
  1035. uint32_t a;
  1036. int32_t offset;
  1037. scm_t_bits key_subr_and_message_bits;
  1038. SCM val, key_subr_and_message;
  1039. UNPACK_24 (op, a);
  1040. val = SP_REF (a);
  1041. offset = ip[1];
  1042. key_subr_and_message_bits = (scm_t_bits) (ip + offset);
  1043. VM_ASSERT (!(key_subr_and_message_bits & 0x7), abort());
  1044. key_subr_and_message = SCM_PACK (key_subr_and_message_bits);
  1045. SYNC_IP ();
  1046. CALL_INTRINSIC (throw_with_value_and_data, (val, key_subr_and_message));
  1047. abort (); /* never reached */
  1048. }
  1049. /* handle-interrupts _:24
  1050. *
  1051. * Handle pending interrupts.
  1052. */
  1053. VM_DEFINE_OP (39, handle_interrupts, "handle-interrupts", OP1 (X32))
  1054. {
  1055. if (SCM_LIKELY (scm_is_null
  1056. (scm_atomic_ref_scm (&thread->pending_asyncs))))
  1057. NEXT (1);
  1058. if (thread->block_asyncs > 0)
  1059. NEXT (1);
  1060. SYNC_IP ();
  1061. CALL_INTRINSIC (push_interrupt_frame, (thread, 0));
  1062. CACHE_SP ();
  1063. ip = scm_vm_intrinsics.handle_interrupt_code;
  1064. NEXT (0);
  1065. }
  1066. /* return-from-interrupt _:24
  1067. *
  1068. * Return from handling an interrupt, discarding any return values and
  1069. * stripping away the interrupt frame.
  1070. */
  1071. VM_DEFINE_OP (40, return_from_interrupt, "return-from-interrupt", OP1 (X32))
  1072. {
  1073. union scm_vm_stack_element *fp = VP->fp;
  1074. ip = SCM_FRAME_VIRTUAL_RETURN_ADDRESS (fp);
  1075. VP->fp = SCM_FRAME_DYNAMIC_LINK (fp);
  1076. VP->sp = sp = SCM_FRAME_PREVIOUS_SP (fp);
  1077. NEXT (0);
  1078. }
  1079. /* call-thread _:24 IDX:32
  1080. *
  1081. * Call the void-returning instrinsic with index IDX, passing the
  1082. * current scm_thread* as the argument.
  1083. */
  1084. VM_DEFINE_OP (41, call_thread, "call-thread", OP2 (X32, C32))
  1085. {
  1086. scm_t_thread_intrinsic intrinsic;
  1087. intrinsic = intrinsics[ip[1]];
  1088. SYNC_IP ();
  1089. intrinsic (thread);
  1090. CACHE_SP ();
  1091. NEXT (2);
  1092. }
  1093. /* call-thread-scm a:24 IDX:32
  1094. *
  1095. * Call the void-returning instrinsic with index IDX, passing the
  1096. * current scm_thread* and the SCM local A as arguments.
  1097. */
  1098. VM_DEFINE_OP (42, call_thread_scm, "call-thread-scm", OP2 (X8_S24, C32))
  1099. {
  1100. uint32_t a;
  1101. scm_t_thread_scm_intrinsic intrinsic;
  1102. UNPACK_24 (op, a);
  1103. intrinsic = intrinsics[ip[1]];
  1104. SYNC_IP ();
  1105. intrinsic (thread, SP_REF (a));
  1106. CACHE_SP ();
  1107. NEXT (2);
  1108. }
  1109. /* call-thread-scm-scm a:12 b:12 IDX:32
  1110. *
  1111. * Call the void-returning instrinsic with index IDX, passing the
  1112. * current scm_thread* and the SCM locals A and B as arguments.
  1113. */
  1114. VM_DEFINE_OP (43, call_thread_scm_scm, "call-thread-scm-scm", OP2 (X8_S12_S12, C32))
  1115. {
  1116. uint16_t a, b;
  1117. scm_t_thread_scm_scm_intrinsic intrinsic;
  1118. UNPACK_12_12 (op, a, b);
  1119. intrinsic = intrinsics[ip[1]];
  1120. SYNC_IP ();
  1121. intrinsic (thread, SP_REF (a), SP_REF (b));
  1122. CACHE_SP ();
  1123. NEXT (2);
  1124. }
  1125. /* call-scm-sz-u32 a:8 b:8 c:8 IDX:32
  1126. *
  1127. * Call the void-returning instrinsic with index IDX, passing the
  1128. * locals A, B, and C as arguments. A is a SCM value, while B and C
  1129. * are raw u64 values which fit into size_t and uint32_t types,
  1130. * respectively.
  1131. */
  1132. VM_DEFINE_OP (44, call_scm_sz_u32, "call-scm-sz-u32", OP2 (X8_S8_S8_S8, C32))
  1133. {
  1134. uint8_t a, b, c;
  1135. scm_t_scm_sz_u32_intrinsic intrinsic;
  1136. UNPACK_8_8_8 (op, a, b, c);
  1137. intrinsic = intrinsics[ip[1]];
  1138. SYNC_IP ();
  1139. intrinsic (SP_REF (a), SP_REF_U64 (b), SP_REF_U64 (c));
  1140. CACHE_SP ();
  1141. NEXT (2);
  1142. }
  1143. /* call-scm<-thread dst:24 IDX:32
  1144. *
  1145. * Call the SCM-returning instrinsic with index IDX, passing the
  1146. * current scm_thread* as argument. Place the SCM result in DST.
  1147. */
  1148. VM_DEFINE_OP (45, call_scm_from_thread, "call-scm<-thread", DOP2 (X8_S24, C32))
  1149. {
  1150. uint32_t dst;
  1151. scm_t_scm_from_thread_intrinsic intrinsic;
  1152. SCM res;
  1153. UNPACK_24 (op, dst);
  1154. intrinsic = intrinsics[ip[1]];
  1155. SYNC_IP ();
  1156. res = intrinsic (thread);
  1157. CACHE_SP ();
  1158. SP_SET (dst, res);
  1159. NEXT (2);
  1160. }
  1161. /* call-s64<-scm dst:12 a:12 IDX:32
  1162. *
  1163. * Call the int64_t-returning instrinsic with index IDX, passing the
  1164. * SCM local A as argument. Place the s64 result in DST.
  1165. */
  1166. VM_DEFINE_OP (46, call_s64_from_scm, "call-s64<-scm", DOP2 (X8_S12_S12, C32))
  1167. {
  1168. uint16_t dst, src;
  1169. scm_t_s64_from_scm_intrinsic intrinsic;
  1170. UNPACK_12_12 (op, dst, src);
  1171. intrinsic = intrinsics[ip[1]];
  1172. SYNC_IP ();
  1173. #if INDIRECT_INT64_INTRINSICS
  1174. intrinsic (& SP_REF_S64 (dst), SP_REF (src));
  1175. #else
  1176. {
  1177. int64_t res = intrinsic (SP_REF (src));
  1178. SP_SET_S64 (dst, res);
  1179. }
  1180. #endif
  1181. /* No CACHE_SP () after the intrinsic, as the indirect variants
  1182. have an out argument that points at the stack; stack relocation
  1183. during this kind of intrinsic is not supported! */
  1184. NEXT (2);
  1185. }
  1186. /* call-scm<-u64 dst:12 a:12 IDX:32
  1187. *
  1188. * Call the SCM-returning instrinsic with index IDX, passing the
  1189. * uint64_t local A as argument. Place the SCM result in DST.
  1190. */
  1191. VM_DEFINE_OP (47, call_scm_from_u64, "call-scm<-u64", DOP2 (X8_S12_S12, C32))
  1192. {
  1193. uint16_t dst, src;
  1194. SCM res;
  1195. scm_t_scm_from_u64_intrinsic intrinsic;
  1196. UNPACK_12_12 (op, dst, src);
  1197. intrinsic = intrinsics[ip[1]];
  1198. SYNC_IP ();
  1199. #if INDIRECT_INT64_INTRINSICS
  1200. res = intrinsic (& SP_REF_U64 (src));
  1201. #else
  1202. res = intrinsic (SP_REF_U64 (src));
  1203. #endif
  1204. SP_SET (dst, res);
  1205. /* No CACHE_SP () after the intrinsic, as the indirect variants
  1206. pass stack pointers directly; stack relocation during this kind
  1207. of intrinsic is not supported! */
  1208. NEXT (2);
  1209. }
  1210. /* call-scm<-s64 dst:12 a:12 IDX:32
  1211. *
  1212. * Call the SCM-returning instrinsic with index IDX, passing the
  1213. * int64_t local A as argument. Place the SCM result in DST.
  1214. */
  1215. VM_DEFINE_OP (48, call_scm_from_s64, "call-scm<-s64", DOP2 (X8_S12_S12, C32))
  1216. {
  1217. uint16_t dst, src;
  1218. SCM res;
  1219. scm_t_scm_from_s64_intrinsic intrinsic;
  1220. UNPACK_12_12 (op, dst, src);
  1221. intrinsic = intrinsics[ip[1]];
  1222. SYNC_IP ();
  1223. #if INDIRECT_INT64_INTRINSICS
  1224. res = intrinsic (& SP_REF_S64 (src));
  1225. #else
  1226. res = intrinsic (SP_REF_S64 (src));
  1227. #endif
  1228. CACHE_SP ();
  1229. SP_SET (dst, res);
  1230. NEXT (2);
  1231. }
  1232. /* call-scm<-scm dst:12 a:12 IDX:32
  1233. *
  1234. * Call the SCM-returning instrinsic with index IDX, passing the SCM
  1235. * local A as argument. Place the SCM result in DST.
  1236. */
  1237. VM_DEFINE_OP (49, call_scm_from_scm, "call-scm<-scm", DOP2 (X8_S12_S12, C32))
  1238. {
  1239. uint16_t dst, src;
  1240. SCM res;
  1241. scm_t_scm_from_scm_intrinsic intrinsic;
  1242. UNPACK_12_12 (op, dst, src);
  1243. intrinsic = intrinsics[ip[1]];
  1244. SYNC_IP ();
  1245. res = intrinsic (SP_REF (src));
  1246. CACHE_SP ();
  1247. SP_SET (dst, res);
  1248. NEXT (2);
  1249. }
  1250. /* call-f64<-scm dst:12 a:12 IDX:32
  1251. *
  1252. * Call the double-returning instrinsic with index IDX, passing the
  1253. * SCM local A as argument. Place the f64 result in DST.
  1254. */
  1255. VM_DEFINE_OP (50, call_f64_from_scm, "call-f64<-scm", DOP2 (X8_S12_S12, C32))
  1256. {
  1257. uint16_t dst, src;
  1258. double res;
  1259. scm_t_f64_from_scm_intrinsic intrinsic;
  1260. UNPACK_12_12 (op, dst, src);
  1261. intrinsic = intrinsics[ip[1]];
  1262. SYNC_IP ();
  1263. res = intrinsic (SP_REF (src));
  1264. CACHE_SP ();
  1265. SP_SET_F64 (dst, res);
  1266. NEXT (2);
  1267. }
  1268. /* call-u64<-scm dst:12 a:12 IDX:32
  1269. *
  1270. * Call the uint64_t-returning instrinsic with index IDX, passing the
  1271. * SCM local A as argument. Place the u64 result in DST.
  1272. */
  1273. VM_DEFINE_OP (51, call_u64_from_scm, "call-u64<-scm", DOP2 (X8_S12_S12, C32))
  1274. {
  1275. uint16_t dst, src;
  1276. scm_t_u64_from_scm_intrinsic intrinsic;
  1277. UNPACK_12_12 (op, dst, src);
  1278. intrinsic = intrinsics[ip[1]];
  1279. SYNC_IP ();
  1280. #if INDIRECT_INT64_INTRINSICS
  1281. intrinsic (& SP_REF_U64 (dst), SP_REF (src));
  1282. #else
  1283. {
  1284. uint64_t res = intrinsic (SP_REF (src));
  1285. SP_SET_U64 (dst, res);
  1286. }
  1287. #endif
  1288. /* No CACHE_SP () after the intrinsic, as the indirect variants
  1289. have an out argument that points at the stack; stack relocation
  1290. during this kind of intrinsic is not supported! */
  1291. NEXT (2);
  1292. }
  1293. /* call-scm<-scm-scm dst:8 a:8 b:8 IDX:32
  1294. *
  1295. * Call the SCM-returning instrinsic with index IDX, passing the SCM
  1296. * locals A and B as arguments. Place the SCM result in DST.
  1297. */
  1298. VM_DEFINE_OP (52, call_scm_from_scm_scm, "call-scm<-scm-scm", DOP2 (X8_S8_S8_S8, C32))
  1299. {
  1300. uint8_t dst, a, b;
  1301. SCM res;
  1302. scm_t_scm_from_scm_scm_intrinsic intrinsic;
  1303. UNPACK_8_8_8 (op, dst, a, b);
  1304. intrinsic = intrinsics[ip[1]];
  1305. SYNC_IP ();
  1306. res = intrinsic (SP_REF (a), SP_REF (b));
  1307. CACHE_SP ();
  1308. SP_SET (dst, res);
  1309. NEXT (2);
  1310. }
  1311. /* call-scm<-scm-uimm dst:8 a:8 b:8 IDX:32
  1312. *
  1313. * Call the SCM-returning instrinsic with index IDX, passing the SCM
  1314. * local A and the uint8_t immediate B as arguments. Place the SCM
  1315. * result in DST.
  1316. */
  1317. VM_DEFINE_OP (53, call_scm_from_scm_uimm, "call-scm<-scm-uimm", DOP2 (X8_S8_S8_C8, C32))
  1318. {
  1319. uint8_t dst, a, b;
  1320. SCM res;
  1321. scm_t_scm_from_scm_uimm_intrinsic intrinsic;
  1322. UNPACK_8_8_8 (op, dst, a, b);
  1323. intrinsic = intrinsics[ip[1]];
  1324. SYNC_IP ();
  1325. res = intrinsic (SP_REF (a), b);
  1326. CACHE_SP ();
  1327. SP_SET (dst, res);
  1328. NEXT (2);
  1329. }
  1330. /* call-scm<-thread-scm dst:12 a:12 IDX:32
  1331. *
  1332. * Call the SCM-returning instrinsic with index IDX, passing the
  1333. * current scm_thread* and SCM local A as arguments. Place the SCM
  1334. * result in DST.
  1335. */
  1336. VM_DEFINE_OP (54, call_scm_from_thread_scm, "call-scm<-thread-scm", DOP2 (X8_S12_S12, C32))
  1337. {
  1338. uint16_t dst, src;
  1339. scm_t_scm_from_thread_scm_intrinsic intrinsic;
  1340. SCM res;
  1341. UNPACK_12_12 (op, dst, src);
  1342. intrinsic = intrinsics[ip[1]];
  1343. SYNC_IP ();
  1344. res = intrinsic (thread, SP_REF (src));
  1345. CACHE_SP ();
  1346. SP_SET (dst, res);
  1347. NEXT (2);
  1348. }
  1349. /* call-scm<-scm-u64 dst:8 a:8 b:8 IDX:32
  1350. *
  1351. * Call the SCM-returning instrinsic with index IDX, passing SCM local
  1352. * A and u64 local B as arguments. Place the SCM result in DST.
  1353. */
  1354. VM_DEFINE_OP (55, call_scm_from_scm_u64, "call-scm<-scm-u64", DOP2 (X8_S8_S8_S8, C32))
  1355. {
  1356. uint8_t dst, a, b;
  1357. SCM res;
  1358. scm_t_scm_from_scm_u64_intrinsic intrinsic;
  1359. UNPACK_8_8_8 (op, dst, a, b);
  1360. intrinsic = intrinsics[ip[1]];
  1361. SYNC_IP ();
  1362. #if INDIRECT_INT64_INTRINSICS
  1363. res = intrinsic (SP_REF (a), & SP_REF_U64 (b));
  1364. #else
  1365. res = intrinsic (SP_REF (a), SP_REF_U64 (b));
  1366. #endif
  1367. CACHE_SP ();
  1368. SP_SET (dst, res);
  1369. NEXT (2);
  1370. }
  1371. /* make-short-immediate dst:8 low-bits:16
  1372. *
  1373. * Make an immediate whose low bits are LOW-BITS, and whose top bits are
  1374. * 0.
  1375. */
  1376. VM_DEFINE_OP (56, make_short_immediate, "make-short-immediate", DOP1 (X8_S8_I16))
  1377. {
  1378. uint8_t dst;
  1379. scm_t_bits val;
  1380. UNPACK_8_16 (op, dst, val);
  1381. SP_SET (dst, SCM_PACK (val));
  1382. NEXT (1);
  1383. }
  1384. /* make-long-immediate dst:24 low-bits:32
  1385. *
  1386. * Make an immediate whose low bits are LOW-BITS, and whose top bits are
  1387. * 0.
  1388. */
  1389. VM_DEFINE_OP (57, make_long_immediate, "make-long-immediate", DOP2 (X8_S24, I32))
  1390. {
  1391. uint32_t dst;
  1392. scm_t_bits val;
  1393. UNPACK_24 (op, dst);
  1394. val = ip[1];
  1395. SP_SET (dst, SCM_PACK (val));
  1396. NEXT (2);
  1397. }
  1398. /* make-long-long-immediate dst:24 high-bits:32 low-bits:32
  1399. *
  1400. * Make an immediate with HIGH-BITS and LOW-BITS.
  1401. */
  1402. VM_DEFINE_OP (58, make_long_long_immediate, "make-long-long-immediate", DOP3 (X8_S24, A32, B32))
  1403. {
  1404. uint32_t dst;
  1405. scm_t_bits val;
  1406. UNPACK_24 (op, dst);
  1407. #if SIZEOF_UINTPTR_T > 4
  1408. val = ip[1];
  1409. val <<= 32;
  1410. val |= ip[2];
  1411. #else
  1412. ASSERT (ip[1] == 0);
  1413. val = ip[2];
  1414. #endif
  1415. SP_SET (dst, SCM_PACK (val));
  1416. NEXT (3);
  1417. }
  1418. /* make-non-immediate dst:24 offset:32
  1419. *
  1420. * Load a pointer to statically allocated memory into DST. The
  1421. * object's memory is will be found OFFSET 32-bit words away from the
  1422. * current instruction pointer. OFFSET is a signed value. The
  1423. * intention here is that the compiler would produce an object file
  1424. * containing the words of a non-immediate object, and this
  1425. * instruction creates a pointer to that memory, effectively
  1426. * resurrecting that object.
  1427. *
  1428. * Whether the object is mutable or immutable depends on where it was
  1429. * allocated by the compiler, and loaded by the loader.
  1430. */
  1431. VM_DEFINE_OP (59, make_non_immediate, "make-non-immediate", DOP2 (X8_S24, N32))
  1432. {
  1433. uint32_t dst;
  1434. int32_t offset;
  1435. uint32_t* loc;
  1436. scm_t_bits unpacked;
  1437. UNPACK_24 (op, dst);
  1438. offset = ip[1];
  1439. loc = ip + offset;
  1440. unpacked = (scm_t_bits) loc;
  1441. VM_ASSERT (!(unpacked & 0x7), abort());
  1442. SP_SET (dst, SCM_PACK (unpacked));
  1443. NEXT (2);
  1444. }
  1445. /* load-label dst:24 offset:32
  1446. *
  1447. * Load a label OFFSET words away from the current IP and write it to
  1448. * DST. OFFSET is a signed 32-bit integer.
  1449. */
  1450. VM_DEFINE_OP (60, load_label, "load-label", DOP2 (X8_S24, L32))
  1451. {
  1452. uint32_t dst;
  1453. int32_t offset;
  1454. UNPACK_24 (op, dst);
  1455. offset = ip[1];
  1456. SP_SET_U64 (dst, (uintptr_t) (ip + offset));
  1457. NEXT (2);
  1458. }
  1459. /* load-f64 dst:24 high-bits:32 low-bits:32
  1460. *
  1461. * Make a double-precision floating-point value with HIGH-BITS and
  1462. * LOW-BITS.
  1463. */
  1464. VM_DEFINE_OP (61, load_f64, "load-f64", DOP3 (X8_S24, AF32, BF32))
  1465. {
  1466. uint32_t dst;
  1467. uint64_t val;
  1468. UNPACK_24 (op, dst);
  1469. val = ip[1];
  1470. val <<= 32;
  1471. val |= ip[2];
  1472. SP_SET_U64 (dst, val);
  1473. NEXT (3);
  1474. }
  1475. /* load-u64 dst:24 high-bits:32 low-bits:32
  1476. *
  1477. * Make an unsigned 64-bit integer with HIGH-BITS and LOW-BITS.
  1478. */
  1479. VM_DEFINE_OP (62, load_u64, "load-u64", DOP3 (X8_S24, AU32, BU32))
  1480. {
  1481. uint32_t dst;
  1482. uint64_t val;
  1483. UNPACK_24 (op, dst);
  1484. val = ip[1];
  1485. val <<= 32;
  1486. val |= ip[2];
  1487. SP_SET_U64 (dst, val);
  1488. NEXT (3);
  1489. }
  1490. /* load-s64 dst:24 high-bits:32 low-bits:32
  1491. *
  1492. * Make an unsigned 64-bit integer with HIGH-BITS and LOW-BITS.
  1493. */
  1494. VM_DEFINE_OP (63, load_s64, "load-s64", DOP3 (X8_S24, AS32, BS32))
  1495. {
  1496. uint32_t dst;
  1497. uint64_t val;
  1498. UNPACK_24 (op, dst);
  1499. val = ip[1];
  1500. val <<= 32;
  1501. val |= ip[2];
  1502. SP_SET_U64 (dst, val);
  1503. NEXT (3);
  1504. }
  1505. /* current-thread dst:24
  1506. *
  1507. * Write the current thread into DST.
  1508. */
  1509. VM_DEFINE_OP (64, current_thread, "current-thread", DOP1 (X8_S24))
  1510. {
  1511. uint32_t dst;
  1512. UNPACK_24 (op, dst);
  1513. SP_SET (dst, thread->handle);
  1514. NEXT (1);
  1515. }
  1516. /* allocate-words dst:12 count:12
  1517. *
  1518. * Allocate a fresh GC-traced object consisting of COUNT words and
  1519. * store it into DST. COUNT is a u64 local.
  1520. */
  1521. VM_DEFINE_OP (65, allocate_words, "allocate-words", DOP1 (X8_S12_S12))
  1522. {
  1523. uint16_t dst, size;
  1524. UNPACK_12_12 (op, dst, size);
  1525. SYNC_IP ();
  1526. SP_SET (dst, CALL_INTRINSIC (allocate_words, (thread, SP_REF_U64 (size))));
  1527. NEXT (1);
  1528. }
  1529. /* allocate-words/immediate dst:12 count:12
  1530. *
  1531. * Allocate a fresh GC-traced object consisting of COUNT words and
  1532. * store it into DST. COUNT is an immediate.
  1533. */
  1534. VM_DEFINE_OP (66, allocate_words_immediate, "allocate-words/immediate", DOP1 (X8_S12_C12))
  1535. {
  1536. uint16_t dst, size;
  1537. UNPACK_12_12 (op, dst, size);
  1538. SYNC_IP ();
  1539. SP_SET (dst, CALL_INTRINSIC (allocate_words, (thread, size)));
  1540. NEXT (1);
  1541. }
  1542. /* scm-ref dst:8 obj:8 idx:8
  1543. *
  1544. * Load the SCM object at word offset IDX from local OBJ, and store it
  1545. * to DST.
  1546. */
  1547. VM_DEFINE_OP (67, scm_ref, "scm-ref", DOP1 (X8_S8_S8_S8))
  1548. {
  1549. uint8_t dst, obj, idx;
  1550. UNPACK_8_8_8 (op, dst, obj, idx);
  1551. SP_SET (dst, SCM_CELL_OBJECT (SP_REF (obj), SP_REF_U64 (idx)));
  1552. NEXT (1);
  1553. }
  1554. /* scm-set! obj:8 idx:8 val:8
  1555. *
  1556. * Store the SCM local VAL into object OBJ at word offset IDX.
  1557. */
  1558. VM_DEFINE_OP (68, scm_set, "scm-set!", OP1 (X8_S8_S8_S8))
  1559. {
  1560. uint8_t obj, idx, val;
  1561. UNPACK_8_8_8 (op, obj, idx, val);
  1562. SCM_SET_CELL_OBJECT (SP_REF (obj), SP_REF_U64 (idx), SP_REF (val));
  1563. NEXT (1);
  1564. }
  1565. /* scm-ref/tag dst:8 obj:8 tag:8
  1566. *
  1567. * Load the first word of OBJ, subtract the immediate TAG, and store
  1568. * the resulting SCM to DST.
  1569. */
  1570. VM_DEFINE_OP (69, scm_ref_tag, "scm-ref/tag", DOP1 (X8_S8_S8_C8))
  1571. {
  1572. uint8_t dst, obj, tag;
  1573. UNPACK_8_8_8 (op, dst, obj, tag);
  1574. SP_SET (dst, SCM_PACK (SCM_CELL_WORD_0 (SP_REF (obj)) - tag));
  1575. NEXT (1);
  1576. }
  1577. /* scm-set!/tag obj:8 tag:8 val:8
  1578. *
  1579. * Set the first word of OBJ to the SCM value VAL plus the immediate
  1580. * value TAG.
  1581. */
  1582. VM_DEFINE_OP (70, scm_set_tag, "scm-set!/tag", OP1 (X8_S8_C8_S8))
  1583. {
  1584. uint8_t obj, tag, val;
  1585. UNPACK_8_8_8 (op, obj, tag, val);
  1586. SCM_SET_CELL_WORD_0 (SP_REF (obj), SCM_UNPACK (SP_REF (val)) + tag);
  1587. NEXT (1);
  1588. }
  1589. /* scm-ref/immediate dst:8 obj:8 idx:8
  1590. *
  1591. * Load the SCM object at word offset IDX from local OBJ, and store it
  1592. * to DST. IDX is a uint8_t immediate.
  1593. */
  1594. VM_DEFINE_OP (71, scm_ref_immediate, "scm-ref/immediate", DOP1 (X8_S8_S8_C8))
  1595. {
  1596. uint8_t dst, obj, idx;
  1597. UNPACK_8_8_8 (op, dst, obj, idx);
  1598. SP_SET (dst, SCM_CELL_OBJECT (SP_REF (obj), idx));
  1599. NEXT (1);
  1600. }
  1601. /* scm-set!/immediate obj:8 idx:8 val:8
  1602. *
  1603. * Store the SCM local VAL into object OBJ at word offset IDX. IDX is
  1604. * a uint8_t immediate.
  1605. */
  1606. VM_DEFINE_OP (72, scm_set_immediate, "scm-set!/immediate", OP1 (X8_S8_C8_S8))
  1607. {
  1608. uint8_t obj, idx, val;
  1609. UNPACK_8_8_8 (op, obj, idx, val);
  1610. SCM_SET_CELL_OBJECT (SP_REF (obj), idx, SP_REF (val));
  1611. NEXT (1);
  1612. }
  1613. /* word-ref dst:8 obj:8 idx:8
  1614. *
  1615. * Load the word at offset IDX from local OBJ, and store it to u64
  1616. * DST.
  1617. */
  1618. VM_DEFINE_OP (73, word_ref, "word-ref", DOP1 (X8_S8_S8_S8))
  1619. {
  1620. uint8_t dst, obj, idx;
  1621. UNPACK_8_8_8 (op, dst, obj, idx);
  1622. SP_SET_U64 (dst, SCM_CELL_WORD (SP_REF (obj), SP_REF_U64 (idx)));
  1623. NEXT (1);
  1624. }
  1625. /* word-set! obj:8 idx:8 val:8
  1626. *
  1627. * Store the u64 local VAL into object OBJ at word offset IDX.
  1628. */
  1629. VM_DEFINE_OP (74, word_set, "word-set!", OP1 (X8_S8_S8_S8))
  1630. {
  1631. uint8_t obj, idx, val;
  1632. UNPACK_8_8_8 (op, obj, idx, val);
  1633. SCM_SET_CELL_WORD (SP_REF (obj), SP_REF_U64 (idx), SP_REF_U64 (val));
  1634. NEXT (1);
  1635. }
  1636. /* word-ref/immediate dst:8 obj:8 idx:8
  1637. *
  1638. * Load the word at offset IDX from local OBJ, and store it to u64
  1639. * DST. IDX is a uint8_t immediate.
  1640. */
  1641. VM_DEFINE_OP (75, word_ref_immediate, "word-ref/immediate", DOP1 (X8_S8_S8_C8))
  1642. {
  1643. uint8_t dst, obj, idx;
  1644. UNPACK_8_8_8 (op, dst, obj, idx);
  1645. SP_SET_U64 (dst, SCM_CELL_WORD (SP_REF (obj), idx));
  1646. NEXT (1);
  1647. }
  1648. /* word-set!/immediate obj:8 idx:8 val:8
  1649. *
  1650. * Store the u64 local VAL into object OBJ at word offset IDX. IDX is
  1651. * a uint8_t immediate.
  1652. */
  1653. VM_DEFINE_OP (76, word_set_immediate, "word-set!/immediate", OP1 (X8_S8_C8_S8))
  1654. {
  1655. uint8_t obj, idx, val;
  1656. UNPACK_8_8_8 (op, obj, idx, val);
  1657. SCM_SET_CELL_WORD (SP_REF (obj), idx, SP_REF_U64 (val));
  1658. NEXT (1);
  1659. }
  1660. /* pointer-ref/immediate dst:8 obj:8 idx:8
  1661. *
  1662. * Load the pointer at offset IDX from local OBJ, and store it to DST.
  1663. * IDX is a uint8_t immediate.
  1664. */
  1665. VM_DEFINE_OP (77, pointer_ref_immediate, "pointer-ref/immediate", DOP1 (X8_S8_S8_C8))
  1666. {
  1667. uint8_t dst, obj, idx;
  1668. UNPACK_8_8_8 (op, dst, obj, idx);
  1669. SP_SET_PTR (dst, (void*) SCM_CELL_WORD (SP_REF (obj), idx));
  1670. NEXT (1);
  1671. }
  1672. /* pointer-set!/immediate obj:8 idx:8 val:8
  1673. *
  1674. * Store the pointer local VAL into object OBJ at offset IDX. IDX is
  1675. * a uint8_t immediate.
  1676. */
  1677. VM_DEFINE_OP (78, pointer_set_immediate, "pointer-set!/immediate", OP1 (X8_S8_C8_S8))
  1678. {
  1679. uint8_t obj, idx, val;
  1680. UNPACK_8_8_8 (op, obj, idx, val);
  1681. SCM_SET_CELL_WORD (SP_REF (obj), idx, (uintptr_t) SP_REF_PTR (val));
  1682. NEXT (1);
  1683. }
  1684. /* tail-pointer-ref/immediate dst:8 obj:8 idx:8
  1685. *
  1686. * Compute the address of word offset IDX from local OBJ, and store it
  1687. * to DST. IDX is a uint8_t immediate.
  1688. */
  1689. VM_DEFINE_OP (79, tail_pointer_ref_immediate, "tail-pointer-ref/immediate", DOP1 (X8_S8_S8_C8))
  1690. {
  1691. uint8_t dst, obj, idx;
  1692. UNPACK_8_8_8 (op, dst, obj, idx);
  1693. SP_SET_PTR (dst, ((scm_t_bits *) SCM2PTR (SP_REF (obj))) + idx);
  1694. NEXT (1);
  1695. }
  1696. /* atomic-scm-ref/immediate dst:8 obj:8 idx:8
  1697. *
  1698. * Atomically reference the SCM object at word offset IDX from local
  1699. * OBJ, and store it to DST, using the sequential consistency memory
  1700. * model. IDX is a uint8_t immediate.
  1701. */
  1702. VM_DEFINE_OP (80, atomic_scm_ref_immediate, "atomic-scm-ref/immediate", DOP1 (X8_S8_S8_C8))
  1703. {
  1704. uint8_t dst, obj, offset;
  1705. SCM *loc;
  1706. UNPACK_8_8_8 (op, dst, obj, offset);
  1707. loc = SCM_CELL_OBJECT_LOC (SP_REF (obj), offset);
  1708. SP_SET (dst, scm_atomic_ref_scm (loc));
  1709. NEXT (1);
  1710. }
  1711. /* atomic-scm-set!/immediate obj:8 idx:8 val:8
  1712. *
  1713. * Atomically store the SCM local VAL into object OBJ at word offset
  1714. * IDX, using the sequentially consistent memory model. IDX is a
  1715. * uint8_t immediate.
  1716. */
  1717. VM_DEFINE_OP (81, atomic_scm_set_immediate, "atomic-scm-set!/immediate", OP1 (X8_S8_C8_S8))
  1718. {
  1719. uint8_t obj, offset, val;
  1720. SCM *loc;
  1721. UNPACK_8_8_8 (op, obj, offset, val);
  1722. loc = SCM_CELL_OBJECT_LOC (SP_REF (obj), offset);
  1723. scm_atomic_set_scm (loc, SP_REF (val));
  1724. NEXT (1);
  1725. }
  1726. /* atomic-scm-swap!/immediate dst:24 _:8 obj:24 idx:8 val:24
  1727. *
  1728. * Atomically swap the SCM value stored in object OBJ at word offset
  1729. * IDX with VAL, using the sequentially consistent memory model. IDX
  1730. * is a uint8_t immediate. Return the previous value to DST.
  1731. */
  1732. VM_DEFINE_OP (82, atomic_scm_swap_immediate, "atomic-scm-swap!/immediate", DOP3 (X8_S24, X8_S24, C8_S24))
  1733. {
  1734. uint32_t dst, obj, val;
  1735. uint8_t offset;
  1736. SCM *loc;
  1737. UNPACK_24 (op, dst);
  1738. UNPACK_24 (ip[1], obj);
  1739. UNPACK_8_24 (ip[2], offset, val);
  1740. loc = SCM_CELL_OBJECT_LOC (SP_REF (obj), offset);
  1741. SP_SET (dst, scm_atomic_swap_scm (loc, SP_REF (val)));
  1742. NEXT (3);
  1743. }
  1744. /* atomic-scm-compare-and-swap!/immediate dst:24 _:8 obj:24 idx:8 expected:24 _:8 desired:24
  1745. *
  1746. * Atomically swap the SCM value stored in object OBJ at word offset
  1747. * IDX with DESIRED, if and only if the value that was there was
  1748. * EXPECTED, using the sequentially consistent memory model. IDX is a
  1749. * uint8_t immediate. Return the value that was stored at IDX from
  1750. * OBJ in DST.
  1751. */
  1752. VM_DEFINE_OP (83, atomic_scm_compare_and_swap_immediate, "atomic-scm-compare-and-swap!/immediate", DOP4 (X8_S24, X8_S24, C8_S24, X8_S24))
  1753. {
  1754. uint32_t dst, obj, expected, desired;
  1755. uint8_t offset;
  1756. SCM *loc;
  1757. SCM got;
  1758. UNPACK_24 (op, dst);
  1759. UNPACK_24 (ip[1], obj);
  1760. UNPACK_8_24 (ip[2], offset, expected);
  1761. UNPACK_24 (ip[3], desired);
  1762. loc = SCM_CELL_OBJECT_LOC (SP_REF (obj), offset);
  1763. got = scm_atomic_compare_and_swap_scm (loc, SP_REF (expected),
  1764. SP_REF (desired));
  1765. SP_SET (dst, got);
  1766. NEXT (4);
  1767. }
  1768. /* static-ref dst:24 offset:32
  1769. *
  1770. * Load a SCM value into DST. The SCM value will be fetched from
  1771. * memory, OFFSET 32-bit words away from the current instruction
  1772. * pointer. OFFSET is a signed value.
  1773. *
  1774. * The intention is for this instruction to be used to load constants
  1775. * that the compiler is unable to statically allocate, like symbols.
  1776. * These values would be initialized when the object file loads.
  1777. */
  1778. VM_DEFINE_OP (84, static_ref, "static-ref", DOP2 (X8_S24, R32))
  1779. {
  1780. uint32_t dst;
  1781. int32_t offset;
  1782. uint32_t* loc;
  1783. uintptr_t loc_bits;
  1784. UNPACK_24 (op, dst);
  1785. offset = ip[1];
  1786. loc = ip + offset;
  1787. loc_bits = (uintptr_t) loc;
  1788. VM_ASSERT (ALIGNED_P (loc, SCM), abort());
  1789. SP_SET (dst, *((SCM *) loc_bits));
  1790. NEXT (2);
  1791. }
  1792. /* static-set! src:24 offset:32
  1793. *
  1794. * Store a SCM value into memory, OFFSET 32-bit words away from the
  1795. * current instruction pointer. OFFSET is a signed value.
  1796. */
  1797. VM_DEFINE_OP (85, static_set, "static-set!", OP2 (X8_S24, LO32))
  1798. {
  1799. uint32_t src;
  1800. int32_t offset;
  1801. uint32_t* loc;
  1802. UNPACK_24 (op, src);
  1803. offset = ip[1];
  1804. loc = ip + offset;
  1805. VM_ASSERT (ALIGNED_P (loc, SCM), abort());
  1806. *((SCM *) loc) = SP_REF (src);
  1807. NEXT (2);
  1808. }
  1809. /* static-patch! _:24 dst-offset:32 src-offset:32
  1810. *
  1811. * Patch a pointer at DST-OFFSET to point to SRC-OFFSET. Both offsets
  1812. * are signed 32-bit values, indicating a memory address as a number
  1813. * of 32-bit words away from the current instruction pointer.
  1814. */
  1815. VM_DEFINE_OP (86, static_patch, "static-patch!", OP3 (X32, LO32, L32))
  1816. {
  1817. int32_t dst_offset, src_offset;
  1818. void *src;
  1819. void** dst_loc;
  1820. dst_offset = ip[1];
  1821. src_offset = ip[2];
  1822. dst_loc = (void **) (ip + dst_offset);
  1823. src = ip + src_offset;
  1824. VM_ASSERT (ALIGNED_P (dst_loc, void*), abort());
  1825. *dst_loc = src;
  1826. NEXT (3);
  1827. }
  1828. /* tag-char dst:12 src:12
  1829. *
  1830. * Make a SCM character whose integer value is the u64 in SRC, and
  1831. * store it in DST.
  1832. */
  1833. VM_DEFINE_OP (87, tag_char, "tag-char", DOP1 (X8_S12_S12))
  1834. {
  1835. uint16_t dst, src;
  1836. UNPACK_12_12 (op, dst, src);
  1837. SP_SET (dst,
  1838. SCM_MAKE_ITAG8 ((scm_t_bits) (scm_t_wchar) SP_REF_U64 (src),
  1839. scm_tc8_char));
  1840. NEXT (1);
  1841. }
  1842. /* untag-char dst:12 src:12
  1843. *
  1844. * Extract the integer value from the SCM character SRC, and store the
  1845. * resulting u64 in DST.
  1846. */
  1847. VM_DEFINE_OP (88, untag_char, "untag-char", DOP1 (X8_S12_S12))
  1848. {
  1849. uint16_t dst, src;
  1850. UNPACK_12_12 (op, dst, src);
  1851. SP_SET_U64 (dst, SCM_CHAR (SP_REF (src)));
  1852. NEXT (1);
  1853. }
  1854. /* tag-fixnum dst:12 src:12
  1855. *
  1856. * Make a SCM integer whose value is the s64 in SRC, and store it in
  1857. * DST.
  1858. */
  1859. VM_DEFINE_OP (89, tag_fixnum, "tag-fixnum", DOP1 (X8_S12_S12))
  1860. {
  1861. uint16_t dst, src;
  1862. UNPACK_12_12 (op, dst, src);
  1863. SP_SET (dst, SCM_I_MAKINUM (SP_REF_S64 (src)));
  1864. NEXT (1);
  1865. }
  1866. /* untag-fixnum dst:12 src:12
  1867. *
  1868. * Extract the integer value from the SCM integer SRC, and store the
  1869. * resulting s64 in DST.
  1870. */
  1871. VM_DEFINE_OP (90, untag_fixnum, "untag-fixnum", DOP1 (X8_S12_S12))
  1872. {
  1873. uint16_t dst, src;
  1874. UNPACK_12_12 (op, dst, src);
  1875. SP_SET_S64 (dst, SCM_I_INUM (SP_REF (src)));
  1876. NEXT (1);
  1877. }
  1878. /* uadd dst:8 a:8 b:8
  1879. *
  1880. * Add A to B, and place the result in DST. The operands and the
  1881. * result are unboxed unsigned 64-bit integers. Overflow will wrap
  1882. * around.
  1883. */
  1884. VM_DEFINE_OP (91, uadd, "uadd", DOP1 (X8_S8_S8_S8))
  1885. {
  1886. uint8_t dst, a, b;
  1887. UNPACK_8_8_8 (op, dst, a, b);
  1888. SP_SET_U64 (dst, SP_REF_U64 (a) + SP_REF_U64 (b));
  1889. NEXT (1);
  1890. }
  1891. /* usub dst:8 a:8 b:8
  1892. *
  1893. * Subtract B from A, and place the result in DST. The operands and
  1894. * the result are unboxed unsigned 64-bit integers. Overflow will
  1895. * wrap around.
  1896. */
  1897. VM_DEFINE_OP (92, usub, "usub", DOP1 (X8_S8_S8_S8))
  1898. {
  1899. uint8_t dst, a, b;
  1900. UNPACK_8_8_8 (op, dst, a, b);
  1901. SP_SET_U64 (dst, SP_REF_U64 (a) - SP_REF_U64 (b));
  1902. NEXT (1);
  1903. }
  1904. /* umul dst:8 a:8 b:8
  1905. *
  1906. * Multiply A and B, and place the result in DST. The operands and
  1907. * the result are unboxed unsigned 64-bit integers. Overflow will
  1908. * wrap around.
  1909. */
  1910. VM_DEFINE_OP (93, umul, "umul", DOP1 (X8_S8_S8_S8))
  1911. {
  1912. uint8_t dst, a, b;
  1913. UNPACK_8_8_8 (op, dst, a, b);
  1914. SP_SET_U64 (dst, SP_REF_U64 (a) * SP_REF_U64 (b));
  1915. NEXT (1);
  1916. }
  1917. /* uadd/immediate dst:8 src:8 imm:8
  1918. *
  1919. * Add the unsigned 64-bit value from SRC with the unsigned 8-bit
  1920. * value IMM and place the raw unsigned 64-bit result in DST.
  1921. * Overflow will wrap around.
  1922. */
  1923. VM_DEFINE_OP (94, uadd_immediate, "uadd/immediate", DOP1 (X8_S8_S8_C8))
  1924. {
  1925. uint8_t dst, src, imm;
  1926. uint64_t x;
  1927. UNPACK_8_8_8 (op, dst, src, imm);
  1928. x = SP_REF_U64 (src);
  1929. SP_SET_U64 (dst, x + (uint64_t) imm);
  1930. NEXT (1);
  1931. }
  1932. /* usub/immediate dst:8 src:8 imm:8
  1933. *
  1934. * Subtract the unsigned 8-bit value IMM from the unsigned 64-bit
  1935. * value in SRC and place the raw unsigned 64-bit result in DST.
  1936. * Overflow will wrap around.
  1937. */
  1938. VM_DEFINE_OP (95, usub_immediate, "usub/immediate", DOP1 (X8_S8_S8_C8))
  1939. {
  1940. uint8_t dst, src, imm;
  1941. uint64_t x;
  1942. UNPACK_8_8_8 (op, dst, src, imm);
  1943. x = SP_REF_U64 (src);
  1944. SP_SET_U64 (dst, x - (uint64_t) imm);
  1945. NEXT (1);
  1946. }
  1947. /* umul/immediate dst:8 src:8 imm:8
  1948. *
  1949. * Multiply the unsigned 64-bit value from SRC by the unsigned 8-bit
  1950. * value IMM and place the raw unsigned 64-bit result in DST.
  1951. * Overflow will wrap around.
  1952. */
  1953. VM_DEFINE_OP (96, umul_immediate, "umul/immediate", DOP1 (X8_S8_S8_C8))
  1954. {
  1955. uint8_t dst, src, imm;
  1956. uint64_t x;
  1957. UNPACK_8_8_8 (op, dst, src, imm);
  1958. x = SP_REF_U64 (src);
  1959. SP_SET_U64 (dst, x * (uint64_t) imm);
  1960. NEXT (1);
  1961. }
  1962. /* ulogand dst:8 a:8 b:8
  1963. *
  1964. * Place the bitwise AND of the u64 values in A and B into DST.
  1965. */
  1966. VM_DEFINE_OP (97, ulogand, "ulogand", DOP1 (X8_S8_S8_S8))
  1967. {
  1968. uint8_t dst, a, b;
  1969. UNPACK_8_8_8 (op, dst, a, b);
  1970. SP_SET_U64 (dst, SP_REF_U64 (a) & SP_REF_U64 (b));
  1971. NEXT (1);
  1972. }
  1973. /* ulogior dst:8 a:8 b:8
  1974. *
  1975. * Place the bitwise inclusive OR of the u64 values in A and B into
  1976. * DST.
  1977. */
  1978. VM_DEFINE_OP (98, ulogior, "ulogior", DOP1 (X8_S8_S8_S8))
  1979. {
  1980. uint8_t dst, a, b;
  1981. UNPACK_8_8_8 (op, dst, a, b);
  1982. SP_SET_U64 (dst, SP_REF_U64 (a) | SP_REF_U64 (b));
  1983. NEXT (1);
  1984. }
  1985. /* ulogsub dst:8 a:8 b:8
  1986. *
  1987. * Place the (A & ~B) of the u64 values A and B into DST.
  1988. */
  1989. VM_DEFINE_OP (99, ulogsub, "ulogsub", DOP1 (X8_S8_S8_S8))
  1990. {
  1991. uint8_t dst, a, b;
  1992. UNPACK_8_8_8 (op, dst, a, b);
  1993. SP_SET_U64 (dst, SP_REF_U64 (a) & ~SP_REF_U64 (b));
  1994. NEXT (1);
  1995. }
  1996. /* ulogxor dst:8 a:8 b:8
  1997. *
  1998. * Place the bitwise exclusive OR of the u64 values in A and B into
  1999. * DST.
  2000. */
  2001. VM_DEFINE_OP (100, ulogxor, "ulogxor", DOP1 (X8_S8_S8_S8))
  2002. {
  2003. uint8_t dst, a, b;
  2004. UNPACK_8_8_8 (op, dst, a, b);
  2005. SP_SET_U64 (dst, SP_REF_U64 (a) ^ SP_REF_U64 (b));
  2006. NEXT (1);
  2007. }
  2008. /* ursh dst:8 a:8 b:8
  2009. *
  2010. * Shift the u64 value in A right by B bits, and place the result in
  2011. * DST. Only the lower 6 bits of B are used.
  2012. */
  2013. VM_DEFINE_OP (101, ursh, "ursh", DOP1 (X8_S8_S8_S8))
  2014. {
  2015. uint8_t dst, a, b;
  2016. UNPACK_8_8_8 (op, dst, a, b);
  2017. SP_SET_U64 (dst, SP_REF_U64 (a) >> (SP_REF_U64 (b) & 63));
  2018. NEXT (1);
  2019. }
  2020. /* srsh dst:8 a:8 b:8
  2021. *
  2022. * Shift the s64 value in A right by B bits, and place the result in
  2023. * DST. Only the lower 6 bits of B are used.
  2024. */
  2025. VM_DEFINE_OP (102, srsh, "srsh", DOP1 (X8_S8_S8_S8))
  2026. {
  2027. uint8_t dst, a, b;
  2028. UNPACK_8_8_8 (op, dst, a, b);
  2029. SP_SET_S64 (dst, SCM_SRS (SP_REF_S64 (a), (SP_REF_U64 (b) & 63)));
  2030. NEXT (1);
  2031. }
  2032. /* ulsh dst:8 a:8 b:8
  2033. *
  2034. * Shift the u64 value in A left by B bits, and place the result in
  2035. * DST. Only the lower 6 bits of B are used.
  2036. */
  2037. VM_DEFINE_OP (103, ulsh, "ulsh", DOP1 (X8_S8_S8_S8))
  2038. {
  2039. uint8_t dst, a, b;
  2040. UNPACK_8_8_8 (op, dst, a, b);
  2041. SP_SET_U64 (dst, SP_REF_U64 (a) << (SP_REF_U64 (b) & 63));
  2042. NEXT (1);
  2043. }
  2044. /* ursh/immediate dst:8 a:8 b:8
  2045. *
  2046. * Shift the u64 value in A right by the immediate B bits, and place
  2047. * the result in DST. Only the lower 6 bits of B are used.
  2048. */
  2049. VM_DEFINE_OP (104, ursh_immediate, "ursh/immediate", DOP1 (X8_S8_S8_C8))
  2050. {
  2051. uint8_t dst, a, b;
  2052. UNPACK_8_8_8 (op, dst, a, b);
  2053. SP_SET_U64 (dst, SP_REF_U64 (a) >> (b & 63));
  2054. NEXT (1);
  2055. }
  2056. /* srsh/immediate dst:8 a:8 b:8
  2057. *
  2058. * Shift the s64 value in A right by the immediate B bits, and place
  2059. * the result in DST. Only the lower 6 bits of B are used.
  2060. */
  2061. VM_DEFINE_OP (105, srsh_immediate, "srsh/immediate", DOP1 (X8_S8_S8_C8))
  2062. {
  2063. uint8_t dst, a, b;
  2064. UNPACK_8_8_8 (op, dst, a, b);
  2065. SP_SET_S64 (dst, SCM_SRS (SP_REF_S64 (a), b & 63));
  2066. NEXT (1);
  2067. }
  2068. /* ulsh/immediate dst:8 a:8 b:8
  2069. *
  2070. * Shift the u64 value in A left by the immediate B bits, and place
  2071. * the result in DST. Only the lower 6 bits of B are used.
  2072. */
  2073. VM_DEFINE_OP (106, ulsh_immediate, "ulsh/immediate", DOP1 (X8_S8_S8_C8))
  2074. {
  2075. uint8_t dst, a, b;
  2076. UNPACK_8_8_8 (op, dst, a, b);
  2077. SP_SET_U64 (dst, SP_REF_U64 (a) << (b & 63));
  2078. NEXT (1);
  2079. }
  2080. /* fadd dst:8 a:8 b:8
  2081. *
  2082. * Add A to B, and place the result in DST. The operands and the
  2083. * result are unboxed double-precision floating-point numbers.
  2084. */
  2085. VM_DEFINE_OP (107, fadd, "fadd", DOP1 (X8_S8_S8_S8))
  2086. {
  2087. uint8_t dst, a, b;
  2088. UNPACK_8_8_8 (op, dst, a, b);
  2089. SP_SET_F64 (dst, SP_REF_F64 (a) + SP_REF_F64 (b));
  2090. NEXT (1);
  2091. }
  2092. /* fsub dst:8 a:8 b:8
  2093. *
  2094. * Subtract B from A, and place the result in DST. The operands and
  2095. * the result are unboxed double-precision floating-point numbers.
  2096. */
  2097. VM_DEFINE_OP (108, fsub, "fsub", DOP1 (X8_S8_S8_S8))
  2098. {
  2099. uint8_t dst, a, b;
  2100. UNPACK_8_8_8 (op, dst, a, b);
  2101. SP_SET_F64 (dst, SP_REF_F64 (a) - SP_REF_F64 (b));
  2102. NEXT (1);
  2103. }
  2104. /* fmul dst:8 a:8 b:8
  2105. *
  2106. * Multiply A and B, and place the result in DST. The operands and
  2107. * the result are unboxed double-precision floating-point numbers.
  2108. */
  2109. VM_DEFINE_OP (109, fmul, "fmul", DOP1 (X8_S8_S8_S8))
  2110. {
  2111. uint8_t dst, a, b;
  2112. UNPACK_8_8_8 (op, dst, a, b);
  2113. SP_SET_F64 (dst, SP_REF_F64 (a) * SP_REF_F64 (b));
  2114. NEXT (1);
  2115. }
  2116. /* fdiv dst:8 a:8 b:8
  2117. *
  2118. * Divide A by B, and place the result in DST. The operands and the
  2119. * result are unboxed double-precision floating-point numbers.
  2120. */
  2121. VM_DEFINE_OP (110, fdiv, "fdiv", DOP1 (X8_S8_S8_S8))
  2122. {
  2123. uint8_t dst, a, b;
  2124. UNPACK_8_8_8 (op, dst, a, b);
  2125. SP_SET_F64 (dst, SP_REF_F64 (a) / SP_REF_F64 (b));
  2126. NEXT (1);
  2127. }
  2128. /* u64=? a:12 b:12
  2129. *
  2130. * Set the comparison result to EQUAL if the u64 values A and B are
  2131. * the same, or NONE otherwise.
  2132. */
  2133. VM_DEFINE_OP (111, u64_numerically_equal, "u64=?", OP1 (X8_S12_S12))
  2134. {
  2135. uint16_t a, b;
  2136. uint64_t x, y;
  2137. UNPACK_12_12 (op, a, b);
  2138. x = SP_REF_U64 (a);
  2139. y = SP_REF_U64 (b);
  2140. VP->compare_result = x == y ? SCM_F_COMPARE_EQUAL : SCM_F_COMPARE_NONE;
  2141. NEXT (1);
  2142. }
  2143. /* u64<? a:12 b:12
  2144. *
  2145. * Set the comparison result to LESS_THAN if the u64 value A is less
  2146. * than the u64 value B are the same, or NONE otherwise.
  2147. */
  2148. VM_DEFINE_OP (112, u64_less, "u64<?", OP1 (X8_S12_S12))
  2149. {
  2150. uint16_t a, b;
  2151. uint64_t x, y;
  2152. UNPACK_12_12 (op, a, b);
  2153. x = SP_REF_U64 (a);
  2154. y = SP_REF_U64 (b);
  2155. VP->compare_result = x < y ? SCM_F_COMPARE_LESS_THAN : SCM_F_COMPARE_NONE;
  2156. NEXT (1);
  2157. }
  2158. /* s64<? a:12 b:12
  2159. *
  2160. * Set the comparison result to LESS_THAN if the s64 value A is less
  2161. * than the s64 value B are the same, or NONE otherwise.
  2162. */
  2163. VM_DEFINE_OP (113, s64_less, "s64<?", OP1 (X8_S12_S12))
  2164. {
  2165. uint16_t a, b;
  2166. int64_t x, y;
  2167. UNPACK_12_12 (op, a, b);
  2168. x = SP_REF_S64 (a);
  2169. y = SP_REF_S64 (b);
  2170. VP->compare_result = x < y ? SCM_F_COMPARE_LESS_THAN : SCM_F_COMPARE_NONE;
  2171. NEXT (1);
  2172. }
  2173. /* s64-imm=? a:12 b:12
  2174. *
  2175. * Set the comparison result to EQUAL if the s64 value A is equal to
  2176. * the immediate s64 value B, or NONE otherwise.
  2177. */
  2178. VM_DEFINE_OP (114, s64_imm_numerically_equal, "s64-imm=?", OP1 (X8_S12_Z12))
  2179. {
  2180. uint16_t a;
  2181. int64_t x, y;
  2182. a = (op >> 8) & 0xfff;
  2183. x = SP_REF_S64 (a);
  2184. y = ((int32_t) op) >> 20; /* Sign extension. */
  2185. VP->compare_result = x == y ? SCM_F_COMPARE_EQUAL : SCM_F_COMPARE_NONE;
  2186. NEXT (1);
  2187. }
  2188. /* u64-imm<? a:12 b:12
  2189. *
  2190. * Set the comparison result to LESS_THAN if the u64 value A is less
  2191. * than the immediate u64 value B, or NONE otherwise.
  2192. */
  2193. VM_DEFINE_OP (115, u64_imm_less, "u64-imm<?", OP1 (X8_S12_C12))
  2194. {
  2195. uint16_t a;
  2196. uint64_t x, y;
  2197. UNPACK_12_12 (op, a, y);
  2198. x = SP_REF_U64 (a);
  2199. VP->compare_result = x < y ? SCM_F_COMPARE_LESS_THAN : SCM_F_COMPARE_NONE;
  2200. NEXT (1);
  2201. }
  2202. /* imm-u64<? a:12 b:12
  2203. *
  2204. * Set the comparison result to LESS_THAN if the u64 immediate B is
  2205. * less than the u64 value A, or NONE otherwise.
  2206. */
  2207. VM_DEFINE_OP (116, imm_u64_less, "imm-u64<?", OP1 (X8_S12_C12))
  2208. {
  2209. uint16_t a;
  2210. uint64_t x, y;
  2211. UNPACK_12_12 (op, a, x);
  2212. y = SP_REF_U64 (a);
  2213. VP->compare_result = x < y ? SCM_F_COMPARE_LESS_THAN : SCM_F_COMPARE_NONE;
  2214. NEXT (1);
  2215. }
  2216. /* s64-imm<? a:12 b:12
  2217. *
  2218. * Set the comparison result to LESS_THAN if the s64 value A is less
  2219. * than the immediate s64 value B, or NONE otherwise.
  2220. */
  2221. VM_DEFINE_OP (117, s64_imm_less, "s64-imm<?", OP1 (X8_S12_Z12))
  2222. {
  2223. uint16_t a;
  2224. int64_t x, y;
  2225. a = (op >> 8) & 0xfff;
  2226. x = SP_REF_S64 (a);
  2227. y = ((int32_t) op) >> 20; /* Sign extension. */
  2228. VP->compare_result = x < y ? SCM_F_COMPARE_LESS_THAN : SCM_F_COMPARE_NONE;
  2229. NEXT (1);
  2230. }
  2231. /* imm-s64<? a:12 b:12
  2232. *
  2233. * Set the comparison result to LESS_THAN if the s64 immediate B is
  2234. * less than the s64 value A, or NONE otherwise.
  2235. */
  2236. VM_DEFINE_OP (118, imm_s64_less, "imm-s64<?", OP1 (X8_S12_Z12))
  2237. {
  2238. uint16_t a;
  2239. int64_t x, y;
  2240. a = (op >> 8) & 0xfff;
  2241. y = SP_REF_S64 (a);
  2242. x = ((int32_t) op) >> 20; /* Sign extension. */
  2243. VP->compare_result = x < y ? SCM_F_COMPARE_LESS_THAN : SCM_F_COMPARE_NONE;
  2244. NEXT (1);
  2245. }
  2246. /* f64=? a:12 b:12
  2247. *
  2248. * Set the comparison result to EQUAL if the f64 value A is equal to
  2249. * the f64 value B, or NONE otherwise.
  2250. */
  2251. VM_DEFINE_OP (119, f64_numerically_equal, "f64=?", OP1 (X8_S12_S12))
  2252. {
  2253. uint16_t a, b;
  2254. double x, y;
  2255. UNPACK_12_12 (op, a, b);
  2256. x = SP_REF_F64 (a);
  2257. y = SP_REF_F64 (b);
  2258. if (x == y)
  2259. VP->compare_result = SCM_F_COMPARE_EQUAL;
  2260. else
  2261. /* This is also the case for NaN. */
  2262. VP->compare_result = SCM_F_COMPARE_NONE;
  2263. NEXT (1);
  2264. }
  2265. /* f64<? a:12 b:12
  2266. *
  2267. * Set the comparison result to LESS_THAN if the f64 value A is less
  2268. * than the f64 value B, NONE if A is greater than or equal to B, or
  2269. * INVALID otherwise.
  2270. */
  2271. VM_DEFINE_OP (120, f64_less, "f64<?", OP1 (X8_S12_S12))
  2272. {
  2273. uint16_t a, b;
  2274. double x, y;
  2275. UNPACK_12_12 (op, a, b);
  2276. x = SP_REF_F64 (a);
  2277. y = SP_REF_F64 (b);
  2278. if (x < y)
  2279. VP->compare_result = SCM_F_COMPARE_LESS_THAN;
  2280. else if (x >= y)
  2281. VP->compare_result = SCM_F_COMPARE_NONE;
  2282. else
  2283. /* NaN. */
  2284. VP->compare_result = SCM_F_COMPARE_INVALID;
  2285. NEXT (1);
  2286. }
  2287. /* =? a:12 b:12
  2288. *
  2289. * Set the comparison result to EQUAL if the SCM values A and B are
  2290. * numerically equal, in the sense of "=". Set to NONE otherwise.
  2291. */
  2292. VM_DEFINE_OP (121, numerically_equal, "=?", OP1 (X8_S12_S12))
  2293. {
  2294. uint16_t a, b;
  2295. SCM x, y;
  2296. UNPACK_12_12 (op, a, b);
  2297. x = SP_REF (a);
  2298. y = SP_REF (b);
  2299. SYNC_IP ();
  2300. if (CALL_INTRINSIC (numerically_equal_p, (x, y)))
  2301. VP->compare_result = SCM_F_COMPARE_EQUAL;
  2302. else
  2303. VP->compare_result = SCM_F_COMPARE_NONE;
  2304. CACHE_SP ();
  2305. NEXT (1);
  2306. }
  2307. /* heap-numbers-equal? a:12 b:12
  2308. *
  2309. * Set the comparison result to EQUAL if the SCM values A and B are
  2310. * numerically equal, in the sense of "=". Set to NONE otherwise. It
  2311. * is known that both A and B are heap numbers.
  2312. */
  2313. VM_DEFINE_OP (122, heap_numbers_equal, "heap-numbers-equal?", OP1 (X8_S12_S12))
  2314. {
  2315. uint16_t a, b;
  2316. SCM x, y;
  2317. UNPACK_12_12 (op, a, b);
  2318. x = SP_REF (a);
  2319. y = SP_REF (b);
  2320. SYNC_IP ();
  2321. if (CALL_INTRINSIC (heap_numbers_equal_p, (x, y)))
  2322. VP->compare_result = SCM_F_COMPARE_EQUAL;
  2323. else
  2324. VP->compare_result = SCM_F_COMPARE_NONE;
  2325. CACHE_SP ();
  2326. NEXT (1);
  2327. }
  2328. /* <? a:12 b:12
  2329. *
  2330. * Set the comparison result to LESS_THAN if the SCM value A is less
  2331. * than the SCM value B, NONE if A is greater than or equal to B, or
  2332. * INVALID otherwise.
  2333. */
  2334. VM_DEFINE_OP (123, less, "<?", OP1 (X8_S12_S12))
  2335. {
  2336. uint16_t a, b;
  2337. SCM x, y;
  2338. UNPACK_12_12 (op, a, b);
  2339. x = SP_REF (a);
  2340. y = SP_REF (b);
  2341. SYNC_IP ();
  2342. VP->compare_result = CALL_INTRINSIC (less_p, (x, y));
  2343. CACHE_SP ();
  2344. NEXT (1);
  2345. }
  2346. /* immediate-tag=? obj:24 mask:16 tag:16
  2347. *
  2348. * Set the comparison result to EQUAL if the result of a bitwise AND
  2349. * between the bits of SCM value A and the immediate MASK is TAG, or
  2350. * NONE otherwise.
  2351. */
  2352. VM_DEFINE_OP (124, immediate_tag_equals, "immediate-tag=?", OP2 (X8_S24, C16_C16))
  2353. {
  2354. uint32_t a;
  2355. uint16_t mask, expected;
  2356. SCM x;
  2357. UNPACK_24 (op, a);
  2358. UNPACK_16_16 (ip[1], mask, expected);
  2359. x = SP_REF (a);
  2360. if ((SCM_UNPACK (x) & mask) == expected)
  2361. VP->compare_result = SCM_F_COMPARE_EQUAL;
  2362. else
  2363. VP->compare_result = SCM_F_COMPARE_NONE;
  2364. NEXT (2);
  2365. }
  2366. /* heap-tag=? obj:24 mask:16 tag:16
  2367. *
  2368. * Set the comparison result to EQUAL if the result of a bitwise AND
  2369. * between the first word of SCM value A and the immediate MASK is
  2370. * TAG, or NONE otherwise.
  2371. */
  2372. VM_DEFINE_OP (125, heap_tag_equals, "heap-tag=?", OP2 (X8_S24, C16_C16))
  2373. {
  2374. uint32_t a;
  2375. uint16_t mask, expected;
  2376. SCM x;
  2377. UNPACK_24 (op, a);
  2378. UNPACK_16_16 (ip[1], mask, expected);
  2379. x = SP_REF (a);
  2380. if ((SCM_CELL_TYPE (x) & mask) == expected)
  2381. VP->compare_result = SCM_F_COMPARE_EQUAL;
  2382. else
  2383. VP->compare_result = SCM_F_COMPARE_NONE;
  2384. NEXT (2);
  2385. }
  2386. /* eq? a:12 b:12
  2387. *
  2388. * Set the comparison result to EQUAL if the SCM values A and B are
  2389. * eq?, or NONE otherwise.
  2390. */
  2391. VM_DEFINE_OP (126, eq, "eq?", OP1 (X8_S12_S12))
  2392. {
  2393. uint16_t a, b;
  2394. SCM x, y;
  2395. UNPACK_12_12 (op, a, b);
  2396. x = SP_REF (a);
  2397. y = SP_REF (b);
  2398. if (scm_is_eq (x, y))
  2399. VP->compare_result = SCM_F_COMPARE_EQUAL;
  2400. else
  2401. VP->compare_result = SCM_F_COMPARE_NONE;
  2402. NEXT (1);
  2403. }
  2404. /* j offset:24
  2405. *
  2406. * Add OFFSET, a signed 24-bit number, to the current instruction
  2407. * pointer.
  2408. */
  2409. VM_DEFINE_OP (127, j, "j", OP1 (X8_L24))
  2410. {
  2411. int32_t offset = op;
  2412. offset >>= 8; /* Sign-extending shift. */
  2413. NEXT (offset);
  2414. }
  2415. /* jl offset:24
  2416. *
  2417. * If the last comparison result is equal to SCM_F_COMPARE_LESS_THAN, add
  2418. * OFFSET, a signed 24-bit number, to the current instruction pointer.
  2419. */
  2420. VM_DEFINE_OP (128, jl, "jl", OP1 (X8_L24))
  2421. {
  2422. if (VP->compare_result == SCM_F_COMPARE_LESS_THAN)
  2423. {
  2424. int32_t offset = op;
  2425. offset >>= 8; /* Sign-extending shift. */
  2426. NEXT (offset);
  2427. }
  2428. else
  2429. NEXT (1);
  2430. }
  2431. /* je offset:24
  2432. *
  2433. * If the last comparison result was EQUAL, then add OFFSET, a signed
  2434. * 24-bit number, to the current instruction pointer.
  2435. */
  2436. VM_DEFINE_OP (129, je, "je", OP1 (X8_L24))
  2437. {
  2438. if (VP->compare_result == SCM_F_COMPARE_EQUAL)
  2439. {
  2440. int32_t offset = op;
  2441. offset >>= 8; /* Sign-extending shift. */
  2442. NEXT (offset);
  2443. }
  2444. else
  2445. NEXT (1);
  2446. }
  2447. /* jnl offset:24
  2448. *
  2449. * If the last comparison result was not LESS_THAN, then add OFFSET, a
  2450. * signed 24-bit number, to the current instruction pointer.
  2451. */
  2452. VM_DEFINE_OP (130, jnl, "jnl", OP1 (X8_L24))
  2453. {
  2454. if (VP->compare_result != SCM_F_COMPARE_LESS_THAN)
  2455. {
  2456. int32_t offset = op;
  2457. offset >>= 8; /* Sign-extending shift. */
  2458. NEXT (offset);
  2459. }
  2460. else
  2461. NEXT (1);
  2462. }
  2463. /* jne offset:24
  2464. *
  2465. * If the last comparison result was not EQUAL, then add OFFSET, a
  2466. * signed 24-bit number, to the current instruction pointer.
  2467. */
  2468. VM_DEFINE_OP (131, jne, "jne", OP1 (X8_L24))
  2469. {
  2470. if (VP->compare_result != SCM_F_COMPARE_EQUAL)
  2471. {
  2472. int32_t offset = op;
  2473. offset >>= 8; /* Sign-extending shift. */
  2474. NEXT (offset);
  2475. }
  2476. else
  2477. NEXT (1);
  2478. }
  2479. /* jge offset:24
  2480. *
  2481. * If the last comparison result was NONE, then add OFFSET, a signed
  2482. * 24-bit number, to the current instruction pointer.
  2483. *
  2484. * This is intended for use after a "<?" comparison, and is different
  2485. * from "jnl" in the way it handles not-a-number (NaN) values: "<?"
  2486. * sets INVALID instead of NONE if either value is a NaN. For exact
  2487. * numbers, "jge" is the same as "jnl".
  2488. */
  2489. VM_DEFINE_OP (132, jge, "jge", OP1 (X8_L24))
  2490. {
  2491. if (VP->compare_result == SCM_F_COMPARE_NONE)
  2492. {
  2493. int32_t offset = op;
  2494. offset >>= 8; /* Sign-extending shift. */
  2495. NEXT (offset);
  2496. }
  2497. else
  2498. NEXT (1);
  2499. }
  2500. /* jnge offset:24
  2501. *
  2502. * If the last comparison result was not NONE, then add OFFSET, a
  2503. * signed 24-bit number, to the current instruction pointer.
  2504. *
  2505. * This is intended for use after a "<?" comparison, and is different
  2506. * from "jl" in the way it handles not-a-number (NaN) values: "<?"
  2507. * sets INVALID instead of NONE if either value is a NaN. For exact
  2508. * numbers, "jnge" is the same as "jl".
  2509. */
  2510. VM_DEFINE_OP (133, jnge, "jnge", OP1 (X8_L24))
  2511. {
  2512. if (VP->compare_result != SCM_F_COMPARE_NONE)
  2513. {
  2514. int32_t offset = op;
  2515. offset >>= 8; /* Sign-extending shift. */
  2516. NEXT (offset);
  2517. }
  2518. else
  2519. NEXT (1);
  2520. }
  2521. #define PTR_REF(type, slot) \
  2522. do { \
  2523. uint8_t dst, a, b; \
  2524. char *ptr; \
  2525. size_t idx; \
  2526. type val; \
  2527. UNPACK_8_8_8 (op, dst, a, b); \
  2528. ptr = SP_REF_PTR (a); \
  2529. idx = SP_REF_U64 (b); \
  2530. memcpy (&val, ptr + idx, sizeof (val)); \
  2531. SP_SET_ ## slot (dst, val); \
  2532. NEXT (1); \
  2533. } while (0)
  2534. #define PTR_SET(type, slot) \
  2535. do { \
  2536. uint8_t a, b, c; \
  2537. char *ptr; \
  2538. size_t idx; \
  2539. type val; \
  2540. UNPACK_8_8_8 (op, a, b, c); \
  2541. ptr = SP_REF_PTR (a); \
  2542. idx = SP_REF_U64 (b); \
  2543. val = SP_REF_ ## slot (c); \
  2544. memcpy (ptr + idx, &val, sizeof (val)); \
  2545. NEXT (1); \
  2546. } while (0)
  2547. /* u8-ref dst:8 ptr:8 idx:8
  2548. *
  2549. * Load the u8 at byte offset IDX from pointer PTR, and store it to
  2550. * u64 DST.
  2551. */
  2552. VM_DEFINE_OP (134, u8_ref, "u8-ref", DOP1 (X8_S8_S8_S8))
  2553. PTR_REF (uint8_t, U64);
  2554. /* u16-ref dst:8 ptr:8 idx:8
  2555. *
  2556. * Load the u16 at byte offset IDX from pointer PTR, and store it to
  2557. * u64 DST.
  2558. */
  2559. VM_DEFINE_OP (135, u16_ref, "u16-ref", DOP1 (X8_S8_S8_S8))
  2560. PTR_REF (uint16_t, U64);
  2561. /* u32-ref dst:8 ptr:8 idx:8
  2562. *
  2563. * Load the u32 at byte offset IDX from pointer PTR, and store it to
  2564. * u64 DST.
  2565. */
  2566. VM_DEFINE_OP (136, u32_ref, "u32-ref", DOP1 (X8_S8_S8_S8))
  2567. PTR_REF (uint32_t, U64);
  2568. /* u64-ref dst:8 ptr:8 idx:8
  2569. *
  2570. * Load the u64 at byte offset IDX from pointer PTR, and store it to
  2571. * u64 DST.
  2572. */
  2573. VM_DEFINE_OP (137, u64_ref, "u64-ref", DOP1 (X8_S8_S8_S8))
  2574. PTR_REF (uint64_t, U64);
  2575. /* u8-set! ptr:8 idx:8 val:8
  2576. *
  2577. * Store the u64 value VAL into the u8 at byte offset IDX from pointer
  2578. * PTR.
  2579. */
  2580. VM_DEFINE_OP (138, u8_set, "u8-set!", OP1 (X8_S8_S8_S8))
  2581. PTR_SET (uint8_t, U64);
  2582. /* u16-set! ptr:8 idx:8 val:8
  2583. *
  2584. * Store the u64 value VAL into the u16 at byte offset IDX from
  2585. * pointer PTR.
  2586. */
  2587. VM_DEFINE_OP (139, u16_set, "u16-set!", OP1 (X8_S8_S8_S8))
  2588. PTR_SET (uint16_t, U64);
  2589. /* u32-set! ptr:8 idx:8 val:8
  2590. *
  2591. * Store the u64 value VAL into the u32 at byte offset IDX from
  2592. * pointer PTR.
  2593. */
  2594. VM_DEFINE_OP (140, u32_set, "u32-set!", OP1 (X8_S8_S8_S8))
  2595. PTR_SET (uint32_t, U64);
  2596. /* u64-set! ptr:8 idx:8 val:8
  2597. *
  2598. * Store the u64 value VAL into the u64 at byte offset IDX from
  2599. * pointer PTR.
  2600. */
  2601. VM_DEFINE_OP (141, u64_set, "u64-set!", OP1 (X8_S8_S8_S8))
  2602. PTR_SET (uint64_t, U64);
  2603. /* s8-ref dst:8 ptr:8 idx:8
  2604. *
  2605. * Load the s8 at byte offset IDX from pointer PTR, and store it to
  2606. * s64 DST.
  2607. */
  2608. VM_DEFINE_OP (142, s8_ref, "s8-ref", DOP1 (X8_S8_S8_S8))
  2609. PTR_REF (int8_t, S64);
  2610. /* s16-ref dst:8 ptr:8 idx:8
  2611. *
  2612. * Load the s16 at byte offset IDX from pointer PTR, and store it to
  2613. * s64 DST.
  2614. */
  2615. VM_DEFINE_OP (143, s16_ref, "s16-ref", DOP1 (X8_S8_S8_S8))
  2616. PTR_REF (int16_t, S64);
  2617. /* s32-ref dst:8 ptr:8 idx:8
  2618. *
  2619. * Load the s32 at byte offset IDX from pointer PTR, and store it to
  2620. * s64 DST.
  2621. */
  2622. VM_DEFINE_OP (144, s32_ref, "s32-ref", DOP1 (X8_S8_S8_S8))
  2623. PTR_REF (int32_t, S64);
  2624. /* s64-ref dst:8 ptr:8 idx:8
  2625. *
  2626. * Load the s64 at byte offset IDX from pointer PTR, and store it to
  2627. * s64 DST.
  2628. */
  2629. VM_DEFINE_OP (145, s64_ref, "s64-ref", DOP1 (X8_S8_S8_S8))
  2630. PTR_REF (int64_t, S64);
  2631. /* s8-set! ptr:8 idx:8 val:8
  2632. *
  2633. * Store the s64 value VAL into the s8 at byte offset IDX from pointer
  2634. * PTR.
  2635. */
  2636. VM_DEFINE_OP (146, s8_set, "s8-set!", OP1 (X8_S8_S8_S8))
  2637. PTR_SET (int8_t, S64);
  2638. /* s16-set! ptr:8 idx:8 val:8
  2639. *
  2640. * Store the s64 value VAL into the s16 at byte offset IDX from
  2641. * pointer PTR.
  2642. */
  2643. VM_DEFINE_OP (147, s16_set, "s16-set!", OP1 (X8_S8_S8_S8))
  2644. PTR_SET (int16_t, S64);
  2645. /* s32-set! ptr:8 idx:8 val:8
  2646. *
  2647. * Store the s64 value VAL into the s32 at byte offset IDX from
  2648. * pointer PTR.
  2649. */
  2650. VM_DEFINE_OP (148, s32_set, "s32-set!", OP1 (X8_S8_S8_S8))
  2651. PTR_SET (int32_t, S64);
  2652. /* s64-set! ptr:8 idx:8 val:8
  2653. *
  2654. * Store the s64 value VAL into the s64 at byte offset IDX from
  2655. * pointer PTR.
  2656. */
  2657. VM_DEFINE_OP (149, s64_set, "s64-set!", OP1 (X8_S8_S8_S8))
  2658. PTR_SET (int64_t, S64);
  2659. /* f32-ref dst:8 ptr:8 idx:8
  2660. *
  2661. * Load the f32 at byte offset IDX from pointer PTR, and store it to
  2662. * f64 DST.
  2663. */
  2664. VM_DEFINE_OP (150, f32_ref, "f32-ref", DOP1 (X8_S8_S8_S8))
  2665. PTR_REF (float, F64);
  2666. /* f64-ref dst:8 ptr:8 idx:8
  2667. *
  2668. * Load the f64 at byte offset IDX from pointer PTR, and store it to
  2669. * f64 DST.
  2670. */
  2671. VM_DEFINE_OP (151, f64_ref, "f64-ref", DOP1 (X8_S8_S8_S8))
  2672. PTR_REF (double, F64);
  2673. /* f32-set! ptr:8 idx:8 val:8
  2674. *
  2675. * Store the f64 value VAL into the f32 at byte offset IDX from
  2676. * pointer PTR.
  2677. */
  2678. VM_DEFINE_OP (152, f32_set, "f32-set!", OP1 (X8_S8_S8_S8))
  2679. PTR_SET (float, F64);
  2680. /* s64-set! ptr:8 idx:8 val:8
  2681. *
  2682. * Store the f64 value VAL into the f8 at byte offset IDX from pointer
  2683. * PTR.
  2684. */
  2685. VM_DEFINE_OP (153, f64_set, "f64-set!", OP1 (X8_S8_S8_S8))
  2686. PTR_SET (double, F64);
  2687. /* bind-optionals nargs:24
  2688. *
  2689. * Expand the current frame to have NARGS locals, filling in any fresh
  2690. * values with SCM_UNDEFINED.
  2691. */
  2692. VM_DEFINE_OP (154, bind_optionals, "bind-optionals", DOP1 (X8_F24))
  2693. {
  2694. uint32_t nlocals, nargs;
  2695. UNPACK_24 (op, nlocals);
  2696. nargs = FRAME_LOCALS_COUNT ();
  2697. if (nargs < nlocals)
  2698. {
  2699. ALLOC_FRAME (nlocals);
  2700. while (nargs < nlocals)
  2701. FP_SET (nargs++, SCM_UNDEFINED);
  2702. }
  2703. NEXT (1);
  2704. }
  2705. /* call-f64<-f64 dst:12 src:12 IDX:32
  2706. *
  2707. * Call the double-returning instrinsic with index IDX, passing SCM
  2708. * local SRC as argument. Place the double result in DST.
  2709. */
  2710. VM_DEFINE_OP (155, call_f64_from_f64, "call-f64<-f64", DOP2 (X8_S12_S12, C32))
  2711. {
  2712. uint16_t dst, src;
  2713. scm_t_f64_from_f64_intrinsic intrinsic;
  2714. UNPACK_12_12 (op, dst, src);
  2715. intrinsic = intrinsics[ip[1]];
  2716. /* We assume these instructions can't throw an exception. */
  2717. SP_SET_F64 (dst, intrinsic (SP_REF_F64 (src)));
  2718. NEXT (2);
  2719. }
  2720. /* call-f64<-f64-f64 dst:8 a:8 b:8 IDX:32
  2721. *
  2722. * Call the double-returning instrinsic with index IDX, passing SCM
  2723. * locals A and B as arguments. Place the double result in DST.
  2724. */
  2725. VM_DEFINE_OP (156, call_f64_from_f64_f64, "call-f64<-f64-f64", DOP2 (X8_S8_S8_S8, C32))
  2726. {
  2727. uint8_t dst, a, b;
  2728. scm_t_f64_from_f64_f64_intrinsic intrinsic;
  2729. UNPACK_8_8_8 (op, dst, a, b);
  2730. intrinsic = intrinsics[ip[1]];
  2731. /* We assume these instructions can't throw an exception. */
  2732. SP_SET_F64 (dst, intrinsic (SP_REF_F64 (a), SP_REF_F64 (b)));
  2733. NEXT (2);
  2734. }
  2735. /* allocate-pointerless-words dst:12 count:12
  2736. *
  2737. * Allocate a fresh object consisting of COUNT words and store it into
  2738. * DST. The result will not be traced by GC. COUNT is a u64 local.
  2739. */
  2740. VM_DEFINE_OP (157, allocate_pointerless_words, "allocate-pointerless-words", DOP1 (X8_S12_S12))
  2741. {
  2742. uint16_t dst, size;
  2743. UNPACK_12_12 (op, dst, size);
  2744. SYNC_IP ();
  2745. SP_SET (dst, CALL_INTRINSIC (allocate_pointerless_words,
  2746. (thread, SP_REF_U64 (size))));
  2747. NEXT (1);
  2748. }
  2749. /* allocate-words/immediate dst:12 count:12
  2750. *
  2751. * Allocate a fresh object consisting of COUNT words and store it into
  2752. * DST. The result will not be traced by GC. COUNT is an immediate.
  2753. */
  2754. VM_DEFINE_OP (158, allocate_pointerless_words_immediate, "allocate-pointerless-words/immediate", DOP1 (X8_S12_C12))
  2755. {
  2756. uint16_t dst, size;
  2757. UNPACK_12_12 (op, dst, size);
  2758. SYNC_IP ();
  2759. SP_SET (dst, CALL_INTRINSIC (allocate_pointerless_words, (thread, size)));
  2760. NEXT (1);
  2761. }
  2762. /* s64->f64 dst:12 src:12
  2763. *
  2764. * Convert an s64 value to a double-precision floating-point value.
  2765. */
  2766. VM_DEFINE_OP (159, s64_to_f64, "s64->f64", DOP1 (X8_S12_S12))
  2767. {
  2768. uint16_t dst, src;
  2769. UNPACK_12_12 (op, dst, src);
  2770. SP_SET_F64 (dst, (double) SP_REF_S64 (src));
  2771. NEXT (1);
  2772. }
  2773. /* call-scm-scm a:12 b:12 IDX:32
  2774. *
  2775. * Call the void-returning instrinsic with index IDX, passing SCM
  2776. * locals A and B as arguments.
  2777. */
  2778. VM_DEFINE_OP (160, call_scm_scm, "call-scm-scm", OP2 (X8_S12_S12, C32))
  2779. {
  2780. uint16_t a, b;
  2781. scm_t_scm_scm_intrinsic intrinsic;
  2782. UNPACK_12_12 (op, a, b);
  2783. intrinsic = intrinsics[ip[1]];
  2784. SYNC_IP ();
  2785. intrinsic (SP_REF (a), SP_REF (b));
  2786. NEXT (2);
  2787. }
  2788. /* call-scm-scm-scm a:8 b:8 c:8 IDX:32
  2789. *
  2790. * Call the void-returning instrinsic with index IDX, passing SCM
  2791. * locals A, B, and C as arguments.
  2792. */
  2793. VM_DEFINE_OP (161, call_scm_scm_scm, "call-scm-scm-scm", OP2 (X8_S8_S8_S8, C32))
  2794. {
  2795. uint8_t a, b, c;
  2796. scm_t_scm_scm_scm_intrinsic intrinsic;
  2797. UNPACK_8_8_8 (op, a, b, c);
  2798. intrinsic = intrinsics[ip[1]];
  2799. SYNC_IP ();
  2800. intrinsic (SP_REF (a), SP_REF (b), SP_REF (c));
  2801. NEXT (2);
  2802. }
  2803. /* call-scm-uimm-scm a:8 b:8 c:8 IDX:32
  2804. *
  2805. * Call the void-returning instrinsic with index IDX, passing SCM
  2806. * local A, uint8 B, and SCM local C as arguments.
  2807. */
  2808. VM_DEFINE_OP (162, call_scm_uimm_scm, "call-scm-uimm-scm", OP2 (X8_S8_C8_S8, C32))
  2809. {
  2810. uint8_t a, b, c;
  2811. scm_t_scm_uimm_scm_intrinsic intrinsic;
  2812. UNPACK_8_8_8 (op, a, b, c);
  2813. intrinsic = intrinsics[ip[1]];
  2814. SYNC_IP ();
  2815. intrinsic (SP_REF (a), b, SP_REF (c));
  2816. NEXT (2);
  2817. }
  2818. /* jtable idx:24 len:32 (_:8 offset:24)...
  2819. *
  2820. * Branch to an entry in a table, as in C's switch statement. IDX is
  2821. * a u64 local, and the immediate LEN indicates the number of entries
  2822. * in the table, and should be greater than or equal to 1. The last
  2823. * entry in the table is the "catch-all" entry. The OFFSET... values
  2824. * are in the usual L24 encoding, indicating a memory address as a
  2825. * number of 32-bit words away from the current instruction pointer.
  2826. */
  2827. VM_DEFINE_OP (163, jtable, "jtable", OP2 (X8_S24, V32_X8_L24))
  2828. {
  2829. uint32_t idx, len;
  2830. const uint32_t *offsets;
  2831. UNPACK_24 (op, idx);
  2832. len = ip[1];
  2833. offsets = ip + 2;
  2834. uint64_t i = SP_REF_U64 (idx);
  2835. VM_ASSERT (len > 0, abort());
  2836. int32_t offset = offsets[i < len ? i : len - 1];
  2837. offset >>= 8; /* Sign-extending shift. */
  2838. NEXT (offset);
  2839. }
  2840. /* make-immediate dst:8 low-bits:16
  2841. *
  2842. * Make an immediate whose low bits are LOW-BITS, and whose top bits
  2843. * are sign-extended.
  2844. */
  2845. VM_DEFINE_OP (164, make_immediate, "make-immediate", DOP1 (X8_S8_ZI16))
  2846. {
  2847. uint8_t dst;
  2848. int16_t val;
  2849. UNPACK_8_16 (op, dst, val);
  2850. SP_SET (dst, SCM_PACK ((scm_t_signed_bits) val));
  2851. NEXT (1);
  2852. }
  2853. /* eq-immediate? a:8 low-bits:16
  2854. *
  2855. * Set the comparison result to EQUAL if the SCM value A is equal to
  2856. * the immediate whose low bits are LOW-BITS, and whose top bits are
  2857. * sign-extended.
  2858. */
  2859. VM_DEFINE_OP (165, eq_immediate, "eq-immediate?", OP1 (X8_S8_ZI16))
  2860. {
  2861. uint8_t a;
  2862. int16_t val;
  2863. UNPACK_8_16 (op, a, val);
  2864. if (scm_is_eq (SP_REF (a), SCM_PACK ((scm_t_signed_bits) val)))
  2865. VP->compare_result = SCM_F_COMPARE_EQUAL;
  2866. else
  2867. VP->compare_result = SCM_F_COMPARE_NONE;
  2868. NEXT (1);
  2869. }
  2870. /* call-scm<-scmn-scmn dst:24 a:32 b:32 idx:32
  2871. *
  2872. * Call the SCM-returning instrinsic with index IDX, passing the SCM
  2873. * values A and B as arguments. A and B are non-immediates, located
  2874. * at a constant offset from the instruction. Place the SCM result in
  2875. * DST.
  2876. */
  2877. VM_DEFINE_OP (166, call_scm_from_scmn_scmn, "call-scm<-scmn-scmn", DOP4 (X8_S24, N32, N32, C32))
  2878. {
  2879. uint32_t dst;
  2880. SCM a, b;
  2881. scm_t_scm_from_scmn_scmn_intrinsic intrinsic;
  2882. UNPACK_24 (op, dst);
  2883. {
  2884. int32_t offset = ip[1];
  2885. uint32_t* loc = ip + offset;
  2886. scm_t_bits unpacked = (scm_t_bits) loc;
  2887. VM_ASSERT (!(unpacked & 0x7), abort());
  2888. a = SCM_PACK (unpacked);
  2889. }
  2890. {
  2891. int32_t offset = ip[2];
  2892. uint32_t* loc = ip + offset;
  2893. scm_t_bits unpacked = (scm_t_bits) loc;
  2894. VM_ASSERT (!(unpacked & 0x7), abort());
  2895. b = SCM_PACK (unpacked);
  2896. }
  2897. intrinsic = intrinsics[ip[3]];
  2898. SYNC_IP ();
  2899. SCM res = intrinsic (a, b);
  2900. CACHE_SP ();
  2901. SP_SET (dst, res);
  2902. NEXT (4);
  2903. }
  2904. /* unreachable _:24
  2905. *
  2906. * Abort the process. Guile's compiler emits these bytecodes where it
  2907. * knows that control cannot continue, for example after a call to
  2908. * non-continuing `raise-exception'.
  2909. */
  2910. VM_DEFINE_OP (167, unreachable, "unreachable", OP1 (X32))
  2911. {
  2912. abort (); /* never reached */
  2913. }
  2914. VM_DEFINE_OP (168, unused_168, NULL, NOP)
  2915. VM_DEFINE_OP (169, unused_169, NULL, NOP)
  2916. VM_DEFINE_OP (170, unused_170, NULL, NOP)
  2917. VM_DEFINE_OP (171, unused_171, NULL, NOP)
  2918. VM_DEFINE_OP (172, unused_172, NULL, NOP)
  2919. VM_DEFINE_OP (173, unused_173, NULL, NOP)
  2920. VM_DEFINE_OP (174, unused_174, NULL, NOP)
  2921. VM_DEFINE_OP (175, unused_175, NULL, NOP)
  2922. VM_DEFINE_OP (176, unused_176, NULL, NOP)
  2923. VM_DEFINE_OP (177, unused_177, NULL, NOP)
  2924. VM_DEFINE_OP (178, unused_178, NULL, NOP)
  2925. VM_DEFINE_OP (179, unused_179, NULL, NOP)
  2926. VM_DEFINE_OP (180, unused_180, NULL, NOP)
  2927. VM_DEFINE_OP (181, unused_181, NULL, NOP)
  2928. VM_DEFINE_OP (182, unused_182, NULL, NOP)
  2929. VM_DEFINE_OP (183, unused_183, NULL, NOP)
  2930. VM_DEFINE_OP (184, unused_184, NULL, NOP)
  2931. VM_DEFINE_OP (185, unused_185, NULL, NOP)
  2932. VM_DEFINE_OP (186, unused_186, NULL, NOP)
  2933. VM_DEFINE_OP (187, unused_187, NULL, NOP)
  2934. VM_DEFINE_OP (188, unused_188, NULL, NOP)
  2935. VM_DEFINE_OP (189, unused_189, NULL, NOP)
  2936. VM_DEFINE_OP (190, unused_190, NULL, NOP)
  2937. VM_DEFINE_OP (191, unused_191, NULL, NOP)
  2938. VM_DEFINE_OP (192, unused_192, NULL, NOP)
  2939. VM_DEFINE_OP (193, unused_193, NULL, NOP)
  2940. VM_DEFINE_OP (194, unused_194, NULL, NOP)
  2941. VM_DEFINE_OP (195, unused_195, NULL, NOP)
  2942. VM_DEFINE_OP (196, unused_196, NULL, NOP)
  2943. VM_DEFINE_OP (197, unused_197, NULL, NOP)
  2944. VM_DEFINE_OP (198, unused_198, NULL, NOP)
  2945. VM_DEFINE_OP (199, unused_199, NULL, NOP)
  2946. VM_DEFINE_OP (200, unused_200, NULL, NOP)
  2947. VM_DEFINE_OP (201, unused_201, NULL, NOP)
  2948. VM_DEFINE_OP (202, unused_202, NULL, NOP)
  2949. VM_DEFINE_OP (203, unused_203, NULL, NOP)
  2950. VM_DEFINE_OP (204, unused_204, NULL, NOP)
  2951. VM_DEFINE_OP (205, unused_205, NULL, NOP)
  2952. VM_DEFINE_OP (206, unused_206, NULL, NOP)
  2953. VM_DEFINE_OP (207, unused_207, NULL, NOP)
  2954. VM_DEFINE_OP (208, unused_208, NULL, NOP)
  2955. VM_DEFINE_OP (209, unused_209, NULL, NOP)
  2956. VM_DEFINE_OP (210, unused_210, NULL, NOP)
  2957. VM_DEFINE_OP (211, unused_211, NULL, NOP)
  2958. VM_DEFINE_OP (212, unused_212, NULL, NOP)
  2959. VM_DEFINE_OP (213, unused_213, NULL, NOP)
  2960. VM_DEFINE_OP (214, unused_214, NULL, NOP)
  2961. VM_DEFINE_OP (215, unused_215, NULL, NOP)
  2962. VM_DEFINE_OP (216, unused_216, NULL, NOP)
  2963. VM_DEFINE_OP (217, unused_217, NULL, NOP)
  2964. VM_DEFINE_OP (218, unused_218, NULL, NOP)
  2965. VM_DEFINE_OP (219, unused_219, NULL, NOP)
  2966. VM_DEFINE_OP (220, unused_220, NULL, NOP)
  2967. VM_DEFINE_OP (221, unused_221, NULL, NOP)
  2968. VM_DEFINE_OP (222, unused_222, NULL, NOP)
  2969. VM_DEFINE_OP (223, unused_223, NULL, NOP)
  2970. VM_DEFINE_OP (224, unused_224, NULL, NOP)
  2971. VM_DEFINE_OP (225, unused_225, NULL, NOP)
  2972. VM_DEFINE_OP (226, unused_226, NULL, NOP)
  2973. VM_DEFINE_OP (227, unused_227, NULL, NOP)
  2974. VM_DEFINE_OP (228, unused_228, NULL, NOP)
  2975. VM_DEFINE_OP (229, unused_229, NULL, NOP)
  2976. VM_DEFINE_OP (230, unused_230, NULL, NOP)
  2977. VM_DEFINE_OP (231, unused_231, NULL, NOP)
  2978. VM_DEFINE_OP (232, unused_232, NULL, NOP)
  2979. VM_DEFINE_OP (233, unused_233, NULL, NOP)
  2980. VM_DEFINE_OP (234, unused_234, NULL, NOP)
  2981. VM_DEFINE_OP (235, unused_235, NULL, NOP)
  2982. VM_DEFINE_OP (236, unused_236, NULL, NOP)
  2983. VM_DEFINE_OP (237, unused_237, NULL, NOP)
  2984. VM_DEFINE_OP (238, unused_238, NULL, NOP)
  2985. VM_DEFINE_OP (239, unused_239, NULL, NOP)
  2986. VM_DEFINE_OP (240, unused_240, NULL, NOP)
  2987. VM_DEFINE_OP (241, unused_241, NULL, NOP)
  2988. VM_DEFINE_OP (242, unused_242, NULL, NOP)
  2989. VM_DEFINE_OP (243, unused_243, NULL, NOP)
  2990. VM_DEFINE_OP (244, unused_244, NULL, NOP)
  2991. VM_DEFINE_OP (245, unused_245, NULL, NOP)
  2992. VM_DEFINE_OP (246, unused_246, NULL, NOP)
  2993. VM_DEFINE_OP (247, unused_247, NULL, NOP)
  2994. VM_DEFINE_OP (248, unused_248, NULL, NOP)
  2995. VM_DEFINE_OP (249, unused_249, NULL, NOP)
  2996. VM_DEFINE_OP (250, unused_250, NULL, NOP)
  2997. VM_DEFINE_OP (251, unused_251, NULL, NOP)
  2998. VM_DEFINE_OP (252, unused_252, NULL, NOP)
  2999. VM_DEFINE_OP (253, unused_253, NULL, NOP)
  3000. VM_DEFINE_OP (254, unused_254, NULL, NOP)
  3001. VM_DEFINE_OP (255, unused_255, NULL, NOP)
  3002. {
  3003. vm_error_bad_instruction (op);
  3004. abort (); /* never reached */
  3005. }
  3006. END_DISPATCH_SWITCH;
  3007. }
  3008. #undef ABORT_HOOK
  3009. #undef ALIGNED_P
  3010. #undef APPLY_HOOK
  3011. #undef BEGIN_DISPATCH_SWITCH
  3012. #undef CACHE_REGISTER
  3013. #undef END_DISPATCH_SWITCH
  3014. #undef FP_REF
  3015. #undef FP_SET
  3016. #undef FP_SLOT
  3017. #undef SP_REF
  3018. #undef SP_SET
  3019. #undef NEXT
  3020. #undef NEXT_HOOK
  3021. #undef RETURN_HOOK
  3022. #undef RUN_HOOK
  3023. #undef SYNC_IP
  3024. #undef UNPACK_8_8_8
  3025. #undef UNPACK_8_16
  3026. #undef UNPACK_12_12
  3027. #undef UNPACK_24
  3028. #undef VM_DEFINE_OP
  3029. #undef VM_INSTRUCTION_TO_LABEL
  3030. #undef VM_USE_HOOKS
  3031. #undef VP
  3032. /*
  3033. (defun renumber-ops ()
  3034. "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
  3035. (interactive "")
  3036. (save-excursion
  3037. (let ((counter -1)) (goto-char (point-min))
  3038. (while (re-search-forward "^ *VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
  3039. (replace-match
  3040. (number-to-string (setq counter (1+ counter)))
  3041. t t nil 1)))))
  3042. (renumber-ops)
  3043. */