cc_core.c 112 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141
  1. /* Copyright (C) 2016 Jeremiah Orians
  2. * Copyright (C) 2018 Jan (janneke) Nieuwenhuizen <janneke@gnu.org>
  3. * Copyright (C) 2020 deesix <deesix@tuta.io>
  4. * Copyright (C) 2021 Andrius Štikonas <andrius@stikonas.eu>
  5. * This file is part of M2-Planet.
  6. *
  7. * M2-Planet is free software: you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation, either version 3 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * M2-Planet is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with M2-Planet. If not, see <http://www.gnu.org/licenses/>.
  19. */
  20. #include "cc.h"
  21. #include "gcc_req.h"
  22. #include <stdint.h>
  23. /* Global lists */
  24. struct token_list* global_symbol_list;
  25. struct token_list* global_function_list;
  26. struct token_list* global_constant_list;
  27. /* Core lists for this file */
  28. struct token_list* function;
  29. /* What we are currently working on */
  30. struct type* current_target;
  31. char* break_target_head;
  32. char* break_target_func;
  33. char* break_target_num;
  34. char* continue_target_head;
  35. struct token_list* break_frame;
  36. int current_count;
  37. int Address_of;
  38. /* Imported functions */
  39. char* int2str(int x, int base, int signed_p);
  40. int strtoint(char *a);
  41. char* parse_string(char* string);
  42. int escape_lookup(char* c);
  43. void require(int bool, char* error);
  44. struct token_list* reverse_list(struct token_list* head);
  45. struct type* mirror_type(struct type* source, char* name);
  46. struct type* add_primitive(struct type* a);
  47. struct token_list* emit(char *s, struct token_list* head)
  48. {
  49. struct token_list* t = calloc(1, sizeof(struct token_list));
  50. require(NULL != t, "Exhausted memory while generating token to emit\n");
  51. t->next = head;
  52. t->s = s;
  53. return t;
  54. }
  55. void emit_out(char* s)
  56. {
  57. output_list = emit(s, output_list);
  58. }
  59. struct token_list* uniqueID(char* s, struct token_list* l, char* num)
  60. {
  61. l = emit("\n", emit(num, emit("_", emit(s, l))));
  62. return l;
  63. }
  64. void uniqueID_out(char* s, char* num)
  65. {
  66. output_list = uniqueID(s, output_list, num);
  67. }
  68. struct token_list* sym_declare(char *s, struct type* t, struct token_list* list)
  69. {
  70. struct token_list* a = calloc(1, sizeof(struct token_list));
  71. require(NULL != a, "Exhausted memory while attempting to declare a symbol\n");
  72. a->next = list;
  73. a->s = s;
  74. a->type = t;
  75. return a;
  76. }
  77. struct token_list* sym_lookup(char *s, struct token_list* symbol_list)
  78. {
  79. struct token_list* i;
  80. for(i = symbol_list; NULL != i; i = i->next)
  81. {
  82. if(match(i->s, s)) return i;
  83. }
  84. return NULL;
  85. }
  86. void line_error_token(struct token_list *token)
  87. {
  88. if(NULL == token)
  89. {
  90. fputs("EOF reached inside of line_error\n", stderr);
  91. fputs("problem at end of file\n", stderr);
  92. return;
  93. }
  94. fputs(token->filename, stderr);
  95. fputs(":", stderr);
  96. fputs(int2str(token->linenumber, 10, TRUE), stderr);
  97. fputs(":", stderr);
  98. }
  99. void line_error()
  100. {
  101. line_error_token(global_token);
  102. }
  103. void require_match(char* message, char* required)
  104. {
  105. if(NULL == global_token)
  106. {
  107. line_error();
  108. fputs("EOF reached inside of require match\n", stderr);
  109. fputs("problem at end of file\n", stderr);
  110. fputs(message, stderr);
  111. exit(EXIT_FAILURE);
  112. }
  113. if(!match(global_token->s, required))
  114. {
  115. line_error();
  116. fputs(message, stderr);
  117. exit(EXIT_FAILURE);
  118. }
  119. global_token = global_token->next;
  120. }
  121. void maybe_bootstrap_error(char* feature)
  122. {
  123. if (BOOTSTRAP_MODE)
  124. {
  125. line_error();
  126. fputs(feature, stderr);
  127. fputs(" is not supported in --bootstrap-mode\n", stderr);
  128. exit(EXIT_FAILURE);
  129. }
  130. }
  131. void expression();
  132. void function_call(char* s, int bool)
  133. {
  134. require_match("ERROR in process_expression_list\nNo ( was found\n", "(");
  135. require(NULL != global_token, "Improper function call\n");
  136. int passed = 0;
  137. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  138. {
  139. emit_out("PUSHR R13 R15\t# Prevent overwriting in recursion\n");
  140. emit_out("PUSHR R14 R15\t# Protect the old base pointer\n");
  141. emit_out("COPY R13 R15\t# Copy new base pointer\n");
  142. }
  143. else if(X86 == Architecture)
  144. {
  145. emit_out("push_edi\t# Prevent overwriting in recursion\n");
  146. emit_out("push_ebp\t# Protect the old base pointer\n");
  147. emit_out("mov_edi,esp\t# Copy new base pointer\n");
  148. }
  149. else if(AMD64 == Architecture)
  150. {
  151. emit_out("push_rdi\t# Prevent overwriting in recursion\n");
  152. emit_out("push_rbp\t# Protect the old base pointer\n");
  153. emit_out("mov_rdi,rsp\t# Copy new base pointer\n");
  154. }
  155. else if(ARMV7L == Architecture)
  156. {
  157. emit_out("{R11} PUSH_ALWAYS\t# Prevent overwriting in recursion\n");
  158. emit_out("{BP} PUSH_ALWAYS\t# Protect the old base pointer\n");
  159. emit_out("'0' SP R11 NO_SHIFT MOVE_ALWAYS\t# Copy new base pointer\n");
  160. }
  161. else if(AARCH64 == Architecture)
  162. {
  163. emit_out("PUSH_X16\t# Protect a tmp register we're going to use\n");
  164. emit_out("PUSH_LR\t# Protect the old return pointer (link)\n");
  165. emit_out("PUSH_BP\t# Protect the old base pointer\n");
  166. emit_out("SET_X16_FROM_SP\t# The base pointer to-be\n");
  167. }
  168. else if(RISCV32 == Architecture)
  169. {
  170. emit_out("rd_sp rs1_sp !-12 addi\t# Allocate stack\n");
  171. emit_out("rs1_sp rs2_ra @4 sw\t# Protect the old return pointer\n");
  172. emit_out("rs1_sp rs2_fp sw\t# Protect the old frame pointer\n");
  173. emit_out("rs1_sp rs2_tp @8 sw\t# Protect temp register we are going to use\n");
  174. emit_out("rd_tp rs1_sp mv\t# The base pointer to-be\n");
  175. }
  176. else if(RISCV64 == Architecture)
  177. {
  178. emit_out("rd_sp rs1_sp !-24 addi\t# Allocate stack\n");
  179. emit_out("rs1_sp rs2_ra @8 sd\t# Protect the old return pointer\n");
  180. emit_out("rs1_sp rs2_fp sd\t# Protect the old frame pointer\n");
  181. emit_out("rs1_sp rs2_tp @16 sd\t# Protect temp register we are going to use\n");
  182. emit_out("rd_tp rs1_sp mv\t# The base pointer to-be\n");
  183. }
  184. if(global_token->s[0] != ')')
  185. {
  186. expression();
  187. require(NULL != global_token, "incomplete function call, received EOF instead of )\n");
  188. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#_process_expression1\n");
  189. else if(X86 == Architecture) emit_out("push_eax\t#_process_expression1\n");
  190. else if(AMD64 == Architecture) emit_out("push_rax\t#_process_expression1\n");
  191. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#_process_expression1\n");
  192. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#_process_expression1\n");
  193. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\nrs1_sp rs2_a0 sw\t#_process_expression1\n");
  194. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\nrs1_sp rs2_a0 sd\t#_process_expression1\n");
  195. passed = 1;
  196. while(global_token->s[0] == ',')
  197. {
  198. global_token = global_token->next;
  199. require(NULL != global_token, "incomplete function call, received EOF instead of argument\n");
  200. expression();
  201. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#_process_expression2\n");
  202. else if(X86 == Architecture) emit_out("push_eax\t#_process_expression2\n");
  203. else if(AMD64 == Architecture) emit_out("push_rax\t#_process_expression2\n");
  204. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#_process_expression2\n");
  205. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#_process_expression2\n");
  206. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\nrs1_sp rs2_a0 sw\t#_process_expression2\n");
  207. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\nrs1_sp rs2_a0 sd\t#_process_expression2\n");
  208. passed = passed + 1;
  209. }
  210. }
  211. require_match("ERROR in process_expression_list\nNo ) was found\n", ")");
  212. if(TRUE == bool)
  213. {
  214. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  215. {
  216. emit_out("LOAD R0 R14 ");
  217. emit_out(s);
  218. emit_out("\nMOVE R14 R13\n");
  219. emit_out("CALL R0 R15\n");
  220. }
  221. else if(X86 == Architecture)
  222. {
  223. emit_out("lea_eax,[ebp+DWORD] %");
  224. emit_out(s);
  225. emit_out("\nmov_eax,[eax]\n");
  226. emit_out("mov_ebp,edi\n");
  227. emit_out("call_eax\n");
  228. }
  229. else if(AMD64 == Architecture)
  230. {
  231. emit_out("lea_rax,[rbp+DWORD] %");
  232. emit_out(s);
  233. emit_out("\nmov_rax,[rax]\n");
  234. emit_out("mov_rbp,rdi\n");
  235. emit_out("call_rax\n");
  236. }
  237. else if(ARMV7L == Architecture)
  238. {
  239. emit_out("!");
  240. emit_out(s);
  241. emit_out(" R0 SUB BP ARITH_ALWAYS\n");
  242. emit_out("!0 R0 LOAD32 R0 MEMORY\n");
  243. emit_out("{LR} PUSH_ALWAYS\t# Protect the old link register\n");
  244. emit_out("'0' R11 BP NO_SHIFT MOVE_ALWAYS\n");
  245. emit_out("'3' R0 CALL_REG_ALWAYS\n");
  246. emit_out("{LR} POP_ALWAYS\t# Prevent overwrite\n");
  247. }
  248. else if(AARCH64 == Architecture)
  249. {
  250. emit_out("SET_X0_FROM_BP\n");
  251. emit_out("LOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  252. emit_out(s);
  253. emit_out("\nSUB_X0_X0_X1\n");
  254. emit_out("DEREF_X0\n");
  255. emit_out("SET_BP_FROM_X16\n");
  256. emit_out("SET_X16_FROM_X0\n");
  257. emit_out("BLR_X16\n");
  258. }
  259. else if(RISCV32 == Architecture)
  260. {
  261. emit_out("rd_a0 rs1_fp !");
  262. emit_out(s);
  263. emit_out(" addi\n");
  264. emit_out("rd_a0 rs1_a0 lw\n");
  265. emit_out("rd_fp rs1_tp mv\n");
  266. emit_out("rd_ra rs1_a0 jalr\n");
  267. }
  268. else if(RISCV64 == Architecture)
  269. {
  270. emit_out("rd_a0 rs1_fp !");
  271. emit_out(s);
  272. emit_out(" addi\n");
  273. emit_out("rd_a0 rs1_a0 ld\n");
  274. emit_out("rd_fp rs1_tp mv\n");
  275. emit_out("rd_ra rs1_a0 jalr\n");
  276. }
  277. }
  278. else
  279. {
  280. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture))
  281. {
  282. emit_out("MOVE R14 R13\n");
  283. emit_out("LOADR R0 4\nJUMP 4\n&FUNCTION_");
  284. emit_out(s);
  285. emit_out("\nCALL R0 R15\n");
  286. }
  287. else if(X86 == Architecture)
  288. {
  289. emit_out("mov_ebp,edi\n");
  290. emit_out("call %FUNCTION_");
  291. emit_out(s);
  292. emit_out("\n");
  293. }
  294. else if(AMD64 == Architecture)
  295. {
  296. emit_out("mov_rbp,rdi\n");
  297. emit_out("call %FUNCTION_");
  298. emit_out(s);
  299. emit_out("\n");
  300. }
  301. else if(ARMV7L == Architecture)
  302. {
  303. emit_out("{LR} PUSH_ALWAYS\t# Protect the old link register\n");
  304. emit_out("'0' R11 BP NO_SHIFT MOVE_ALWAYS\n");
  305. emit_out("^~FUNCTION_");
  306. emit_out(s);
  307. emit_out(" CALL_ALWAYS\n");
  308. emit_out("{LR} POP_ALWAYS\t# Restore the old link register\n");
  309. }
  310. else if(AARCH64 == Architecture)
  311. {
  312. emit_out("SET_BP_FROM_X16\n");
  313. emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FUNCTION_");
  314. emit_out(s);
  315. emit_out("\n");
  316. emit_out("BLR_X16\n");
  317. }
  318. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  319. {
  320. emit_out("rd_fp rs1_tp mv\n");
  321. emit_out("rd_ra $FUNCTION_");
  322. emit_out(s);
  323. emit_out(" jal\n");
  324. }
  325. }
  326. for(; passed > 0; passed = passed - 1)
  327. {
  328. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _process_expression_locals\n");
  329. else if(X86 == Architecture) emit_out("pop_ebx\t# _process_expression_locals\n");
  330. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _process_expression_locals\n");
  331. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _process_expression_locals\n");
  332. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _process_expression_locals\n");
  333. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\t# _process_expression_locals\nrd_sp rs1_sp !4 addi\n");
  334. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\t# _process_expression_locals\nrd_sp rs1_sp !8 addi\n");
  335. }
  336. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  337. {
  338. emit_out("POPR R14 R15\t# Restore old base pointer\n");
  339. emit_out("POPR R13 R15\t# Prevent overwrite\n");
  340. }
  341. else if(X86 == Architecture)
  342. {
  343. emit_out("pop_ebp\t# Restore old base pointer\n");
  344. emit_out("pop_edi\t# Prevent overwrite\n");
  345. }
  346. else if(AMD64 == Architecture)
  347. {
  348. emit_out("pop_rbp\t# Restore old base pointer\n");
  349. emit_out("pop_rdi\t# Prevent overwrite\n");
  350. }
  351. else if(ARMV7L == Architecture)
  352. {
  353. emit_out("{BP} POP_ALWAYS\t# Restore old base pointer\n");
  354. emit_out("{R11} POP_ALWAYS\t# Prevent overwrite\n");
  355. }
  356. else if(AARCH64 == Architecture)
  357. {
  358. emit_out("POP_BP\t# Restore the old base pointer\n");
  359. emit_out("POP_LR\t# Restore the old return pointer (link)\n");
  360. emit_out("POP_X16\t# Restore a register we used as tmp\n");
  361. }
  362. else if(RISCV32 == Architecture)
  363. {
  364. emit_out("rd_fp rs1_sp lw\t# Restore old frame pointer\n");
  365. emit_out("rd_tp rs1_sp !8 lw\t# Restore temp register\n");
  366. emit_out("rd_ra rs1_sp !4 lw\t# Restore return address\n");
  367. emit_out("rd_sp rs1_sp !12 addi\t# Deallocate stack\n");
  368. }
  369. else if(RISCV64 == Architecture)
  370. {
  371. emit_out("rd_fp rs1_sp ld\t# Restore old frame pointer\n");
  372. emit_out("rd_tp rs1_sp !16 ld\t# Restore temp register\n");
  373. emit_out("rd_ra rs1_sp !8 ld\t# Restore return address\n");
  374. emit_out("rd_sp rs1_sp !24 addi\t# Deallocate stack\n");
  375. }
  376. }
  377. void constant_load(char* s)
  378. {
  379. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADI R0 ");
  380. else if(X86 == Architecture) emit_out("mov_eax, %");
  381. else if(AMD64 == Architecture) emit_out("mov_rax, %");
  382. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  383. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  384. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  385. {
  386. emit_out("rd_a0 ~");
  387. emit_out(s);
  388. emit_out(" lui\nrd_a0 rs1_a0 !");
  389. }
  390. emit_out(s);
  391. if(RISCV32 == Architecture) emit_out(" addi\n");
  392. else if(RISCV64 == Architecture) emit_out(" addiw\n");
  393. emit_out("\n");
  394. }
  395. char* load_value_signed(unsigned size)
  396. {
  397. if(size == 1)
  398. {
  399. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD8 R0 R0 0\n";
  400. else if(X86 == Architecture) return "movsx_eax,BYTE_PTR_[eax]\n";
  401. else if(AMD64 == Architecture) return "movsx_rax,BYTE_PTR_[rax]\n";
  402. else if(ARMV7L == Architecture) return "LOADS8 R0 LOAD R0 HALF_MEMORY\n";
  403. else if(AARCH64 == Architecture) return "LDRSB_X0_[X0]\n";
  404. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lb\n";
  405. }
  406. else if(size == 2)
  407. {
  408. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD16 R0 R0 0\n";
  409. else if(X86 == Architecture) return "movsx_eax,WORD_PTR_[eax]\n";
  410. else if(AMD64 == Architecture) return "movsx_rax,WORD_PTR_[rax]\n";
  411. else if(ARMV7L == Architecture) return "LOADS16 R0 LOAD R0 HALF_MEMORY\n";
  412. else if(AARCH64 == Architecture) return "LDRSH_X0_[X0]\n";
  413. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lh\n";
  414. }
  415. else if(size == 4)
  416. {
  417. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD R0 R0 0\n";
  418. else if(X86 == Architecture) return "mov_eax,[eax]\n";
  419. else if(AMD64 == Architecture) return "movsx_rax,DWORD_PTR_[rax]\n";
  420. else if(ARMV7L == Architecture) return "!0 R0 LOAD32 R0 MEMORY\n";
  421. else if(AARCH64 == Architecture) return "LDR_W0_[X0]\n";
  422. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lw\n";
  423. }
  424. else if(size == 8)
  425. {
  426. if(AMD64 == Architecture) return "mov_rax,[rax]\n";
  427. else if(AARCH64 == Architecture) return "DEREF_X0\n";
  428. else if(RISCV64 == Architecture) return "rd_a0 rs1_a0 ld\n";
  429. }
  430. line_error();
  431. fputs(" Got unsupported size ", stderr);
  432. fputs(int2str(size, 10, TRUE), stderr);
  433. fputs(" when trying to load value.\n", stderr);
  434. exit(EXIT_FAILURE);
  435. }
  436. char* load_value_unsigned(unsigned size)
  437. {
  438. if(size == 1)
  439. {
  440. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOADU8 R0 R0 0\n";
  441. else if(X86 == Architecture) return "movzx_eax,BYTE_PTR_[eax]\n";
  442. else if(AMD64 == Architecture) return "movzx_rax,BYTE_PTR_[rax]\n";
  443. else if(ARMV7L == Architecture) return "!0 R0 LOAD R0 MEMORY\n";
  444. else if(AARCH64 == Architecture) return "DEREF_X0_BYTE\n";
  445. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lbu\n";
  446. }
  447. else if(size == 2)
  448. {
  449. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOADU16 R0 R0 0\n";
  450. else if(X86 == Architecture) return "movzx_eax,WORD_PTR_[eax]\n";
  451. else if(AMD64 == Architecture) return "movzx_rax,WORD_PTR_[rax]\n";
  452. else if(ARMV7L == Architecture) return "NO_OFFSET R0 LOAD R0 HALF_MEMORY\n";
  453. else if(AARCH64 == Architecture) return "LDRH_W0_[X0]\n";
  454. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lhu\n";
  455. }
  456. else if(size == 4)
  457. {
  458. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD R0 R0 0\n";
  459. else if(X86 == Architecture) return "mov_eax,[eax]\n";
  460. else if(AMD64 == Architecture) return "mov_eax,[rax]\n";
  461. else if(ARMV7L == Architecture) return "!0 R0 LOAD32 R0 MEMORY\n";
  462. else if(AARCH64 == Architecture) return "LDR_W0_[X0]\n";
  463. else if(RISCV32 == Architecture) return "rd_a0 rs1_a0 lw\n";
  464. else if(RISCV64 == Architecture) return "rd_a0 rs1_a0 lwu\n";
  465. }
  466. else if(size == 8)
  467. {
  468. if(AMD64 == Architecture) return "mov_rax,[rax]\n";
  469. else if(AARCH64 == Architecture) return "DEREF_X0\n";
  470. else if(RISCV64 == Architecture) return "rd_a0 rs1_a0 ld\n";
  471. }
  472. line_error();
  473. fputs(" Got unsupported size ", stderr);
  474. fputs(int2str(size, 10, TRUE), stderr);
  475. fputs(" when trying to load value.\n", stderr);
  476. exit(EXIT_FAILURE);
  477. }
  478. char* load_value(unsigned size, int is_signed)
  479. {
  480. if(is_signed) return load_value_signed(size);
  481. return load_value_unsigned(size);
  482. }
  483. char* store_value(unsigned size)
  484. {
  485. if(size == 1)
  486. {
  487. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "STORE8 R0 R1 0\n";
  488. else if(X86 == Architecture) return "mov_[ebx],al\n";
  489. else if(AMD64 == Architecture) return "mov_[rbx],al\n";
  490. else if(ARMV7L == Architecture) return "!0 R0 STORE8 R1 MEMORY\n";
  491. else if(AARCH64 == Architecture) return "STR_BYTE_W0_[X1]\n";
  492. else if(RISCV32 == Architecture) return "rs1_a1 rs2_a0 sb\n";
  493. else if(RISCV64 == Architecture) return "rs1_a1 rs2_a0 sb\n";
  494. }
  495. else if(size == 2)
  496. {
  497. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "STORE16 R0 R1 0\n";
  498. else if(X86 == Architecture) return "mov_[ebx],ax\n";
  499. else if(AMD64 == Architecture) return "mov_[rbx],ax\n";
  500. else if(ARMV7L == Architecture) return "NO_OFFSET R0 STORE16 R1 HALF_MEMORY\n";
  501. else if(AARCH64 == Architecture) return "STRH_W0_[X1]\n";
  502. else if(RISCV32 == Architecture || RISCV64 == Architecture) return "rs1_a1 rs2_a0 sh\n";
  503. }
  504. else if(size == 4)
  505. {
  506. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "STORE R0 R1 0\n";
  507. else if(X86 == Architecture) return "mov_[ebx],eax\n";
  508. else if(AMD64 == Architecture) return "mov_[rbx],eax\n";
  509. else if(ARMV7L == Architecture) return "!0 R0 STORE32 R1 MEMORY\n";
  510. else if(AARCH64 == Architecture) return "STR_W0_[X1]\n";
  511. else if(RISCV32 == Architecture || RISCV64 == Architecture) return "rs1_a1 rs2_a0 sw\n";
  512. }
  513. else if(size == 8)
  514. {
  515. if(AMD64 == Architecture) return "mov_[rbx],rax\n";
  516. else if(AARCH64 == Architecture) return "STR_X0_[X1]\n";
  517. else if(RISCV64 == Architecture) return "rs1_a1 rs2_a0 sd\n";
  518. }
  519. /* Should not happen but print error message. */
  520. fputs("Got unsupported size ", stderr);
  521. fputs(int2str(size, 10, TRUE), stderr);
  522. fputs(" when storing number in register.\n", stderr);
  523. line_error();
  524. exit(EXIT_FAILURE);
  525. }
  526. int is_compound_assignment(char* token)
  527. {
  528. if(match("+=", token)) return TRUE;
  529. else if(match("-=", token)) return TRUE;
  530. else if(match("*=", token)) return TRUE;
  531. else if(match("/=", token)) return TRUE;
  532. else if(match("%=", token)) return TRUE;
  533. else if(match("<<=", token)) return TRUE;
  534. else if(match(">>=", token)) return TRUE;
  535. else if(match("&=", token)) return TRUE;
  536. else if(match("^=", token)) return TRUE;
  537. else if(match("|=", token)) return TRUE;
  538. return FALSE;
  539. }
  540. void postfix_expr_stub();
  541. void variable_load(struct token_list* a, int num_dereference)
  542. {
  543. require(NULL != global_token, "incomplete variable load received\n");
  544. if((match("FUNCTION", a->type->name) || match("FUNCTION*", a->type->name)) && match("(", global_token->s))
  545. {
  546. function_call(int2str(a->depth, 10, TRUE), TRUE);
  547. return;
  548. }
  549. current_target = a->type;
  550. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("ADDI R0 R14 ");
  551. else if(X86 == Architecture) emit_out("lea_eax,[ebp+DWORD] %");
  552. else if(AMD64 == Architecture) emit_out("lea_rax,[rbp+DWORD] %");
  553. else if(ARMV7L == Architecture) emit_out("!");
  554. else if(AARCH64 == Architecture) emit_out("SET_X0_FROM_BP\nLOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  555. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_fp !");
  556. emit_out(int2str(a->depth, 10, TRUE));
  557. if(ARMV7L == Architecture) emit_out(" R0 SUB BP ARITH_ALWAYS");
  558. else if(AARCH64 == Architecture) emit_out("\nSUB_X0_X0_X1\n");
  559. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  560. emit_out("\n");
  561. if(TRUE == Address_of) return;
  562. if(match(".", global_token->s))
  563. {
  564. postfix_expr_stub();
  565. return;
  566. }
  567. if(!match("=", global_token->s) && !is_compound_assignment(global_token->s))
  568. {
  569. emit_out(load_value(current_target->size, current_target->is_signed));
  570. }
  571. while (num_dereference > 0)
  572. {
  573. current_target = current_target->type;
  574. emit_out(load_value(current_target->size, current_target->is_signed));
  575. num_dereference = num_dereference - 1;
  576. }
  577. }
  578. void function_load(struct token_list* a)
  579. {
  580. require(NULL != global_token, "incomplete function load\n");
  581. if(match("(", global_token->s))
  582. {
  583. function_call(a->s, FALSE);
  584. return;
  585. }
  586. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture)) emit_out("LOADR R0 4\nJUMP 4\n&FUNCTION_");
  587. else if(X86 == Architecture) emit_out("mov_eax, &FUNCTION_");
  588. else if(AMD64 == Architecture) emit_out("lea_rax,[rip+DWORD] %FUNCTION_");
  589. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n&FUNCTION_");
  590. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n&FUNCTION_");
  591. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 ~FUNCTION_");
  592. emit_out(a->s);
  593. if(RISCV32 == Architecture)
  594. {
  595. emit_out(" auipc\n");
  596. emit_out("rd_a0 rs1_a0 !FUNCTION_");
  597. emit_out(a->s);
  598. emit_out(" addi");
  599. }
  600. else if(RISCV64 == Architecture)
  601. {
  602. emit_out(" auipc\n");
  603. emit_out("rd_a0 rs1_a0 !FUNCTION_");
  604. emit_out(a->s);
  605. emit_out(" addiw");
  606. }
  607. emit_out("\n");
  608. }
  609. void global_load(struct token_list* a)
  610. {
  611. current_target = a->type;
  612. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture)) emit_out("LOADR R0 4\nJUMP 4\n&GLOBAL_");
  613. else if(X86 == Architecture) emit_out("mov_eax, &GLOBAL_");
  614. else if(AMD64 == Architecture) emit_out("lea_rax,[rip+DWORD] %GLOBAL_");
  615. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n&GLOBAL_");
  616. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n&GLOBAL_");
  617. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 ~GLOBAL_");
  618. emit_out(a->s);
  619. if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  620. {
  621. emit_out(" auipc\n");
  622. emit_out("rd_a0 rs1_a0 !GLOBAL_");
  623. emit_out(a->s);
  624. emit_out(" addi");
  625. }
  626. emit_out("\n");
  627. require(NULL != global_token, "unterminated global load\n");
  628. if(TRUE == Address_of) return;
  629. if(match(".", global_token->s))
  630. {
  631. postfix_expr_stub();
  632. return;
  633. }
  634. if(match("=", global_token->s) || is_compound_assignment(global_token->s)) return;
  635. emit_out(load_value(register_size, current_target->is_signed));
  636. }
  637. /*
  638. * primary-expr:
  639. * FAILURE
  640. * "String"
  641. * 'Char'
  642. * [0-9]*
  643. * [a-z,A-Z]*
  644. * ( expression )
  645. */
  646. void primary_expr_failure()
  647. {
  648. require(NULL != global_token, "hit EOF when expecting primary expression\n");
  649. line_error();
  650. fputs("Received ", stderr);
  651. fputs(global_token->s, stderr);
  652. fputs(" in primary_expr\n", stderr);
  653. exit(EXIT_FAILURE);
  654. }
  655. void primary_expr_string()
  656. {
  657. char* number_string = int2str(current_count, 10, TRUE);
  658. current_count = current_count + 1;
  659. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture)) emit_out("LOADR R0 4\nJUMP 4\n&STRING_");
  660. else if(X86 == Architecture) emit_out("mov_eax, &STRING_");
  661. else if(AMD64 == Architecture) emit_out("lea_rax,[rip+DWORD] %STRING_");
  662. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n&STRING_");
  663. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n&STRING_");
  664. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 ~STRING_");
  665. uniqueID_out(function->s, number_string);
  666. if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  667. {
  668. emit_out("auipc\n");
  669. emit_out("rd_a0 rs1_a0 !STRING_");
  670. uniqueID_out(function->s, number_string);
  671. emit_out("addi\n");
  672. }
  673. /* The target */
  674. strings_list = emit(":STRING_", strings_list);
  675. strings_list = uniqueID(function->s, strings_list, number_string);
  676. /* catch case of just "foo" from segfaulting */
  677. require(NULL != global_token->next, "a string by itself is not valid C\n");
  678. /* Parse the string */
  679. if('"' != global_token->next->s[0])
  680. {
  681. strings_list = emit(parse_string(global_token->s), strings_list);
  682. global_token = global_token->next;
  683. }
  684. else
  685. {
  686. char* s = calloc(MAX_STRING, sizeof(char));
  687. /* prefix leading string */
  688. s[0] = '"';
  689. int i = 1;
  690. int j;
  691. while('"' == global_token->s[0])
  692. {
  693. /* Step past the leading '"' */
  694. j = 1;
  695. /* Copy the rest of the string as is */
  696. while(0 != global_token->s[j])
  697. {
  698. require(i < MAX_STRING, "concat string exceeded max string length\n");
  699. s[i] = global_token->s[j];
  700. i = i + 1;
  701. j = j + 1;
  702. }
  703. /* Move on to the next token */
  704. global_token = global_token->next;
  705. require(NULL != global_token, "multi-string null is not valid C\n");
  706. }
  707. /* Now use it */
  708. strings_list = emit(parse_string(s), strings_list);
  709. }
  710. }
  711. void primary_expr_char()
  712. {
  713. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADI R0 ");
  714. else if(X86 == Architecture) emit_out("mov_eax, %");
  715. else if(AMD64 == Architecture) emit_out("mov_rax, %");
  716. else if(ARMV7L == Architecture) emit_out("!");
  717. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  718. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 !");
  719. emit_out(int2str(escape_lookup(global_token->s + 1), 10, TRUE));
  720. if(ARMV7L == Architecture) emit_out(" R0 LOADI8_ALWAYS");
  721. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  722. emit_out("\n");
  723. global_token = global_token->next;
  724. }
  725. int hex2char(int c)
  726. {
  727. if((c >= 0) && (c <= 9)) return (c + 48);
  728. else if((c >= 10) && (c <= 15)) return (c + 55);
  729. else return -1;
  730. }
  731. char* number_to_hex(int a, int bytes)
  732. {
  733. require(bytes > 0, "number to hex must have a positive number of bytes greater than zero\n");
  734. char* result = calloc(1 + (bytes << 1), sizeof(char));
  735. if(NULL == result)
  736. {
  737. fputs("calloc failed in number_to_hex\n", stderr);
  738. exit(EXIT_FAILURE);
  739. }
  740. int i = 0;
  741. int divisor = (bytes << 3);
  742. require(divisor > 0, "unexpected wrap around in number_to_hex\n");
  743. /* Simply collect numbers until divisor is gone */
  744. while(0 != divisor)
  745. {
  746. divisor = divisor - 4;
  747. result[i] = hex2char((a >> divisor) & 0xF);
  748. i = i + 1;
  749. }
  750. return result;
  751. }
  752. void primary_expr_number(char* s)
  753. {
  754. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  755. {
  756. int size = strtoint(s);
  757. if((32767 > size) && (size > -32768))
  758. {
  759. emit_out("LOADI R0 ");
  760. emit_out(s);
  761. }
  762. else
  763. {
  764. emit_out("LOADR R0 4\nJUMP 4\n'");
  765. emit_out(number_to_hex(size, register_size));
  766. emit_out("'");
  767. }
  768. }
  769. else if(X86 == Architecture)
  770. {
  771. emit_out("mov_eax, %");
  772. emit_out(s);
  773. }
  774. else if(AMD64 == Architecture)
  775. {
  776. emit_out("mov_rax, %");
  777. emit_out(s);
  778. }
  779. else if(ARMV7L == Architecture)
  780. {
  781. emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  782. emit_out(s);
  783. }
  784. else if(AARCH64 == Architecture)
  785. {
  786. emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  787. emit_out(s);
  788. }
  789. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  790. {
  791. int size = strtoint(s);
  792. if((2047 > size) && (size > -2048))
  793. {
  794. emit_out("rd_a0 !");
  795. emit_out(s);
  796. emit_out(" addi");
  797. }
  798. else if (0 == (size >> 30))
  799. {
  800. emit_out("rd_a0 ~");
  801. emit_out(s);
  802. emit_out(" lui\n");
  803. emit_out("rd_a0 rs1_a0 !");
  804. emit_out(s);
  805. emit_out(" addi");
  806. }
  807. else
  808. {
  809. int high = size >> 30;
  810. int low = ((size >> 30) << 30) ^ size;
  811. emit_out("rd_a0 ~");
  812. emit_out(int2str(high, 10, TRUE));
  813. emit_out(" lui\n");
  814. emit_out("rd_a0 rs1_a0 !");
  815. emit_out(int2str(high, 10, TRUE));
  816. emit_out(" addi\n");
  817. emit_out("rd_a0 rs1_a0 rs2_x30 slli\n");
  818. emit_out("rd_t1 ~");
  819. emit_out(int2str(low, 10, TRUE));
  820. emit_out(" lui\n");
  821. emit_out("rd_t1 rs1_t1 !");
  822. emit_out(int2str(low, 10, TRUE));
  823. emit_out(" addi\n");
  824. emit_out("rd_a0 rs1_a0 rs2_t1 or\n");
  825. }
  826. }
  827. emit_out("\n");
  828. }
  829. void primary_expr_variable()
  830. {
  831. int num_dereference = 0;
  832. while(global_token->s[0] == '*') {
  833. global_token = global_token->next;
  834. require(NULL != global_token, "Walked off the end of a variable dereference\n");
  835. num_dereference = num_dereference + 1;
  836. }
  837. char* s = global_token->s;
  838. global_token = global_token->next;
  839. struct token_list* a = sym_lookup(s, global_constant_list);
  840. if(NULL != a)
  841. {
  842. constant_load(a->arguments->s);
  843. return;
  844. }
  845. a = sym_lookup(s, function->locals);
  846. if(NULL != a)
  847. {
  848. variable_load(a, num_dereference);
  849. return;
  850. }
  851. a = sym_lookup(s, function->arguments);
  852. if(NULL != a)
  853. {
  854. variable_load(a, num_dereference);
  855. return;
  856. }
  857. a = sym_lookup(s, global_function_list);
  858. if(NULL != a)
  859. {
  860. function_load(a);
  861. return;
  862. }
  863. a = sym_lookup(s, global_symbol_list);
  864. if(NULL != a)
  865. {
  866. global_load(a);
  867. return;
  868. }
  869. line_error();
  870. fputs(s ,stderr);
  871. fputs(" is not a defined symbol\n", stderr);
  872. exit(EXIT_FAILURE);
  873. }
  874. void primary_expr();
  875. struct type* promote_type(struct type* a, struct type* b)
  876. {
  877. require(NULL != b, "impossible case 1 in promote_type\n");
  878. require(NULL != a, "impossible case 2 in promote_type\n");
  879. if(a == b) return a;
  880. struct type* i;
  881. for(i = global_types; NULL != i; i = i->next)
  882. {
  883. if(a->name == i->name) break;
  884. if(b->name == i->name) break;
  885. if(a->name == i->indirect->name) break;
  886. if(b->name == i->indirect->name) break;
  887. if(a->name == i->indirect->indirect->name) break;
  888. if(b->name == i->indirect->indirect->name) break;
  889. }
  890. require(NULL != i, "impossible case 3 in promote_type\n");
  891. return i;
  892. }
  893. void common_recursion(FUNCTION f)
  894. {
  895. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#_common_recursion\n");
  896. else if(X86 == Architecture) emit_out("push_eax\t#_common_recursion\n");
  897. else if(AMD64 == Architecture) emit_out("push_rax\t#_common_recursion\n");
  898. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#_common_recursion\n");
  899. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#_common_recursion\n");
  900. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\t# _common_recursion\nrs1_sp rs2_a0 sw\n");
  901. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\t# _common_recursion\nrs1_sp rs2_a0 sd\n");
  902. struct type* last_type = current_target;
  903. global_token = global_token->next;
  904. require(NULL != global_token, "Received EOF in common_recursion\n");
  905. f();
  906. current_target = promote_type(current_target, last_type);
  907. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _common_recursion\n");
  908. else if(X86 == Architecture) emit_out("pop_ebx\t# _common_recursion\n");
  909. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _common_recursion\n");
  910. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _common_recursion\n");
  911. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _common_recursion\n");
  912. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\nrd_sp rs1_sp !4 addi\t# _common_recursion\n");
  913. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\nrd_sp rs1_sp !8 addi\t# _common_recursion\n");
  914. }
  915. void general_recursion(FUNCTION f, char* s, char* name, FUNCTION iterate)
  916. {
  917. require(NULL != global_token, "Received EOF in general_recursion\n");
  918. if(match(name, global_token->s))
  919. {
  920. common_recursion(f);
  921. emit_out(s);
  922. iterate();
  923. }
  924. }
  925. void arithmetic_recursion(FUNCTION f, char* s1, char* s2, char* name, FUNCTION iterate)
  926. {
  927. require(NULL != global_token, "Received EOF in arithmetic_recursion\n");
  928. if(match(name, global_token->s))
  929. {
  930. common_recursion(f);
  931. if(NULL == current_target)
  932. {
  933. emit_out(s1);
  934. }
  935. else if(current_target->is_signed)
  936. {
  937. emit_out(s1);
  938. }
  939. else
  940. {
  941. emit_out(s2);
  942. }
  943. iterate();
  944. }
  945. }
  946. /*
  947. * postfix-expr:
  948. * primary-expr
  949. * postfix-expr [ expression ]
  950. * postfix-expr ( expression-list-opt )
  951. * postfix-expr -> member
  952. * postfix-expr . member
  953. */
  954. struct type* lookup_member(struct type* parent, char* name);
  955. void postfix_expr_arrow()
  956. {
  957. emit_out("# looking up offset\n");
  958. global_token = global_token->next;
  959. require(NULL != global_token, "naked -> not allowed\n");
  960. struct type* i = lookup_member(current_target, global_token->s);
  961. current_target = i->type;
  962. global_token = global_token->next;
  963. require(NULL != global_token, "Unterminated -> expression not allowed\n");
  964. if(0 != i->offset)
  965. {
  966. emit_out("# -> offset calculation\n");
  967. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  968. {
  969. emit_out("ADDUI R0 R0 ");
  970. emit_out(int2str(i->offset, 10, TRUE));
  971. emit_out("\n");
  972. }
  973. else if(X86 == Architecture)
  974. {
  975. emit_out("mov_ebx, %");
  976. emit_out(int2str(i->offset, 10, TRUE));
  977. emit_out("\nadd_eax,ebx\n");
  978. }
  979. else if(AMD64 == Architecture)
  980. {
  981. emit_out("mov_rbx, %");
  982. emit_out(int2str(i->offset, 10, TRUE));
  983. emit_out("\nadd_rax,rbx\n");
  984. }
  985. else if(ARMV7L == Architecture)
  986. {
  987. emit_out("!0 R1 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  988. emit_out(int2str(i->offset, 10, TRUE));
  989. emit_out("\n'0' R0 R0 ADD R1 ARITH2_ALWAYS\n");
  990. }
  991. else if(AARCH64 == Architecture)
  992. {
  993. emit_out("LOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  994. emit_out(int2str(i->offset, 10, TRUE));
  995. emit_out("\nADD_X0_X1_X0\n");
  996. }
  997. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  998. {
  999. emit_out("rd_a1 !");
  1000. emit_out(int2str(i->offset, 10, TRUE));
  1001. emit_out(" addi\n");
  1002. emit_out("rd_a0 rs1_a1 rs2_a0 add\n");
  1003. }
  1004. }
  1005. /* We don't yet support assigning structs to structs */
  1006. if((!match("=", global_token->s) && !is_compound_assignment(global_token->s) && (register_size >= i->size)))
  1007. {
  1008. emit_out(load_value(i->size, i->is_signed));
  1009. }
  1010. }
  1011. void postfix_expr_dot()
  1012. {
  1013. maybe_bootstrap_error("Member access using .");
  1014. emit_out("# looking up offset\n");
  1015. global_token = global_token->next;
  1016. require(NULL != global_token, "naked . not allowed\n");
  1017. struct type* i = lookup_member(current_target, global_token->s);
  1018. current_target = i->type;
  1019. global_token = global_token->next;
  1020. require(NULL != global_token, "Unterminated . expression not allowed\n");
  1021. if(0 != i->offset)
  1022. {
  1023. emit_out("# . offset calculation\n");
  1024. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1025. {
  1026. emit_out("ADDUI R0 R0 ");
  1027. emit_out(int2str(i->offset, 10, TRUE));
  1028. emit_out("\n");
  1029. }
  1030. else if(X86 == Architecture)
  1031. {
  1032. emit_out("mov_ebx, %");
  1033. emit_out(int2str(i->offset, 10, TRUE));
  1034. emit_out("\nadd_eax,ebx\n");
  1035. }
  1036. else if(AMD64 == Architecture)
  1037. {
  1038. emit_out("mov_rbx, %");
  1039. emit_out(int2str(i->offset, 10, TRUE));
  1040. emit_out("\nadd_rax,rbx\n");
  1041. }
  1042. else if(ARMV7L == Architecture)
  1043. {
  1044. emit_out("!0 R1 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  1045. emit_out(int2str(i->offset, 10, TRUE));
  1046. emit_out("\n'0' R0 R0 ADD R1 ARITH2_ALWAYS\n");
  1047. }
  1048. else if(AARCH64 == Architecture)
  1049. {
  1050. emit_out("LOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  1051. emit_out(int2str(i->offset, 10, TRUE));
  1052. emit_out("\nADD_X0_X1_X0\n");
  1053. }
  1054. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1055. {
  1056. emit_out("rd_a1 !");
  1057. emit_out(int2str(i->offset, 10, TRUE));
  1058. emit_out(" addi\n");
  1059. emit_out("rd_a0 rs1_a1 rs2_a0 add\n");
  1060. }
  1061. }
  1062. if(match("=", global_token->s) || is_compound_assignment(global_token->s)) return;
  1063. if(match("[", global_token->s)) return;
  1064. emit_out(load_value(current_target->size, current_target->is_signed));
  1065. }
  1066. void postfix_expr_array()
  1067. {
  1068. struct type* array = current_target;
  1069. common_recursion(expression);
  1070. current_target = array;
  1071. require(NULL != current_target, "Arrays only apply to variables\n");
  1072. char* assign = load_value(register_size, current_target->is_signed);
  1073. /* Add support for Ints */
  1074. if(match("char*", current_target->name))
  1075. {
  1076. assign = load_value(1, TRUE);
  1077. }
  1078. else
  1079. {
  1080. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R1 R15\nLOADI R1 ");
  1081. else if(X86 == Architecture) emit_out("push_ebx\nmov_ebx, %");
  1082. else if(AMD64 == Architecture) emit_out("push_rbx\nmov_rbx, %");
  1083. else if(ARMV7L == Architecture) emit_out("{R1} PUSH_ALWAYS\n!0 R1 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  1084. else if(AARCH64 == Architecture) emit_out("PUSH_X1\nLOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  1085. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a2 rs1_a1 addi\nrd_a1 !");
  1086. emit_out(int2str(current_target->type->size, 10, TRUE));
  1087. if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  1088. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("\nMULU R0 R1 R0\nPOPR R1 R15\n");
  1089. else if(X86 == Architecture) emit_out("\nmul_ebx\npop_ebx\n");
  1090. else if(AMD64 == Architecture) emit_out("\nmul_rbx\npop_rbx\n");
  1091. else if(ARMV7L == Architecture) emit_out("\n'9' R0 '0' R1 MUL R0 ARITH2_ALWAYS\n{R1} POP_ALWAYS\n");
  1092. else if(AARCH64 == Architecture) emit_out("\nMUL_X0_X1_X0\nPOP_X1\n");
  1093. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("\nrd_a0 rs1_a1 rs2_a0 mul\nrd_a1 rs1_a2 addi\n");
  1094. }
  1095. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("ADD R0 R0 R1\n");
  1096. else if(X86 == Architecture) emit_out("add_eax,ebx\n");
  1097. else if(AMD64 == Architecture) emit_out("add_rax,rbx\n");
  1098. else if(ARMV7L == Architecture) emit_out("'0' R0 R0 ADD R1 ARITH2_ALWAYS\n");
  1099. else if(AARCH64 == Architecture) emit_out("ADD_X0_X1_X0\n");
  1100. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a1 rs2_a0 add\n");
  1101. require_match("ERROR in postfix_expr\nMissing ]\n", "]");
  1102. require(NULL != global_token, "truncated array expression\n");
  1103. if(match("=", global_token->s) || is_compound_assignment(global_token->s) || match(".", global_token->s))
  1104. {
  1105. assign = "";
  1106. }
  1107. if(match("[", global_token->s))
  1108. {
  1109. current_target = current_target->type;
  1110. }
  1111. emit_out(assign);
  1112. }
  1113. /*
  1114. * unary-expr:
  1115. * &postfix-expr
  1116. * - postfix-expr
  1117. * !postfix-expr
  1118. * sizeof ( type )
  1119. */
  1120. struct type* type_name();
  1121. void unary_expr_sizeof()
  1122. {
  1123. global_token = global_token->next;
  1124. require(NULL != global_token, "Received EOF when starting sizeof\n");
  1125. require_match("ERROR in unary_expr\nMissing (\n", "(");
  1126. struct type* a = type_name();
  1127. require_match("ERROR in unary_expr\nMissing )\n", ")");
  1128. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADUI R0 ");
  1129. else if(X86 == Architecture) emit_out("mov_eax, %");
  1130. else if(AMD64 == Architecture) emit_out("mov_rax, %");
  1131. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  1132. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  1133. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 !");
  1134. emit_out(int2str(a->size, 10, TRUE));
  1135. if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  1136. emit_out("\n");
  1137. }
  1138. void postfix_expr_stub()
  1139. {
  1140. require(NULL != global_token, "Unexpected EOF, improperly terminated primary expression\n");
  1141. if(match("[", global_token->s))
  1142. {
  1143. postfix_expr_array();
  1144. postfix_expr_stub();
  1145. }
  1146. if(match("->", global_token->s))
  1147. {
  1148. postfix_expr_arrow();
  1149. postfix_expr_stub();
  1150. }
  1151. if(match(".", global_token->s))
  1152. {
  1153. postfix_expr_dot();
  1154. postfix_expr_stub();
  1155. }
  1156. }
  1157. void postfix_expr()
  1158. {
  1159. primary_expr();
  1160. postfix_expr_stub();
  1161. }
  1162. /*
  1163. * additive-expr:
  1164. * postfix-expr
  1165. * additive-expr * postfix-expr
  1166. * additive-expr / postfix-expr
  1167. * additive-expr % postfix-expr
  1168. * additive-expr + postfix-expr
  1169. * additive-expr - postfix-expr
  1170. * additive-expr << postfix-expr
  1171. * additive-expr >> postfix-expr
  1172. */
  1173. void additive_expr_stub_a()
  1174. {
  1175. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1176. {
  1177. arithmetic_recursion(postfix_expr, "MUL R0 R1 R0\n", "MULU R0 R1 R0\n", "*", additive_expr_stub_a);
  1178. arithmetic_recursion(postfix_expr, "DIV R0 R1 R0\n", "DIVU R0 R1 R0\n", "/", additive_expr_stub_a);
  1179. arithmetic_recursion(postfix_expr, "MOD R0 R1 R0\n", "MODU R0 R1 R0\n", "%", additive_expr_stub_a);
  1180. }
  1181. else if(X86 == Architecture)
  1182. {
  1183. arithmetic_recursion(postfix_expr, "imul_ebx\n", "mul_ebx\n", "*", additive_expr_stub_a);
  1184. arithmetic_recursion(postfix_expr, "xchg_ebx,eax\ncdq\nidiv_ebx\n", "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\n", "/", additive_expr_stub_a);
  1185. arithmetic_recursion(postfix_expr, "xchg_ebx,eax\ncdq\nidiv_ebx\nmov_eax,edx\n", "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\nmov_eax,edx\n", "%", additive_expr_stub_a);
  1186. }
  1187. else if(AMD64 == Architecture)
  1188. {
  1189. arithmetic_recursion(postfix_expr, "imul_rbx\n", "mul_rbx\n", "*", additive_expr_stub_a);
  1190. arithmetic_recursion(postfix_expr, "xchg_rbx,rax\ncqo\nidiv_rbx\n", "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\n", "/", additive_expr_stub_a);
  1191. arithmetic_recursion(postfix_expr, "xchg_rbx,rax\ncqo\nidiv_rbx\nmov_rax,rdx\n", "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\nmov_rax,rdx\n", "%", additive_expr_stub_a);
  1192. }
  1193. else if(ARMV7L == Architecture)
  1194. {
  1195. arithmetic_recursion(postfix_expr, "'9' R0 '0' R1 MULS R0 ARITH2_ALWAYS\n", "'9' R0 '0' R1 MUL R0 ARITH2_ALWAYS\n", "*", additive_expr_stub_a);
  1196. arithmetic_recursion(postfix_expr, "{LR} PUSH_ALWAYS\n^~divides CALL_ALWAYS\n{LR} POP_ALWAYS\n", "{LR} PUSH_ALWAYS\n^~divide CALL_ALWAYS\n{LR} POP_ALWAYS\n", "/", additive_expr_stub_a);
  1197. arithmetic_recursion(postfix_expr, "{LR} PUSH_ALWAYS\n^~moduluss CALL_ALWAYS\n{LR} POP_ALWAYS\n", "{LR} PUSH_ALWAYS\n^~modulus CALL_ALWAYS\n{LR} POP_ALWAYS\n", "%", additive_expr_stub_a);
  1198. }
  1199. else if(AARCH64 == Architecture)
  1200. {
  1201. general_recursion(postfix_expr, "MUL_X0_X1_X0\n", "*", additive_expr_stub_a);
  1202. arithmetic_recursion(postfix_expr, "SDIV_X0_X1_X0\n", "UDIV_X0_X1_X0\n", "/", additive_expr_stub_a);
  1203. arithmetic_recursion(postfix_expr, "SDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n", "UDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n", "%", additive_expr_stub_a);
  1204. }
  1205. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1206. {
  1207. general_recursion(postfix_expr, "rd_a0 rs1_a1 rs2_a0 mul\n", "*", additive_expr_stub_a);
  1208. arithmetic_recursion(postfix_expr, "rd_a0 rs1_a1 rs2_a0 div\n", "rd_a0 rs1_a1 rs2_a0 divu\n", "/", additive_expr_stub_a);
  1209. arithmetic_recursion(postfix_expr, "rd_a0 rs1_a1 rs2_a0 rem\n", "rd_a0 rs1_a1 rs2_a0 remu\n", "%", additive_expr_stub_a);
  1210. }
  1211. }
  1212. void additive_expr_a()
  1213. {
  1214. postfix_expr();
  1215. additive_expr_stub_a();
  1216. }
  1217. void additive_expr_stub_b()
  1218. {
  1219. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1220. {
  1221. arithmetic_recursion(additive_expr_a, "ADD R0 R1 R0\n", "ADDU R0 R1 R0\n", "+", additive_expr_stub_b);
  1222. arithmetic_recursion(additive_expr_a, "SUB R0 R1 R0\n", "SUBU R0 R1 R0\n", "-", additive_expr_stub_b);
  1223. }
  1224. else if(X86 == Architecture)
  1225. {
  1226. arithmetic_recursion(additive_expr_a, "add_eax,ebx\n", "add_eax,ebx\n", "+", additive_expr_stub_b);
  1227. arithmetic_recursion(additive_expr_a, "sub_ebx,eax\nmov_eax,ebx\n", "sub_ebx,eax\nmov_eax,ebx\n", "-", additive_expr_stub_b);
  1228. }
  1229. else if(AMD64 == Architecture)
  1230. {
  1231. arithmetic_recursion(additive_expr_a, "add_rax,rbx\n", "add_rax,rbx\n", "+", additive_expr_stub_b);
  1232. arithmetic_recursion(additive_expr_a, "sub_rbx,rax\nmov_rax,rbx\n", "sub_rbx,rax\nmov_rax,rbx\n", "-", additive_expr_stub_b);
  1233. }
  1234. else if(ARMV7L == Architecture)
  1235. {
  1236. arithmetic_recursion(additive_expr_a, "'0' R0 R0 ADD R1 ARITH2_ALWAYS\n", "'0' R0 R0 ADD R1 ARITH2_ALWAYS\n", "+", additive_expr_stub_b);
  1237. arithmetic_recursion(additive_expr_a, "'0' R0 R0 SUB R1 ARITH2_ALWAYS\n", "'0' R0 R0 SUB R1 ARITH2_ALWAYS\n", "-", additive_expr_stub_b);
  1238. }
  1239. else if(AARCH64 == Architecture)
  1240. {
  1241. general_recursion(additive_expr_a, "ADD_X0_X1_X0\n", "+", additive_expr_stub_b);
  1242. general_recursion(additive_expr_a, "SUB_X0_X1_X0\n", "-", additive_expr_stub_b);
  1243. }
  1244. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1245. {
  1246. general_recursion(additive_expr_a, "rd_a0 rs1_a1 rs2_a0 add\n", "+", additive_expr_stub_b);
  1247. general_recursion(additive_expr_a, "rd_a0 rs1_a1 rs2_a0 sub\n", "-", additive_expr_stub_b);
  1248. }
  1249. }
  1250. void additive_expr_b()
  1251. {
  1252. additive_expr_a();
  1253. additive_expr_stub_b();
  1254. }
  1255. void additive_expr_stub_c()
  1256. {
  1257. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1258. {
  1259. arithmetic_recursion(additive_expr_b, "SAL R0 R1 R0\n", "SL0 R0 R1 R0\n", "<<", additive_expr_stub_c);
  1260. arithmetic_recursion(additive_expr_b, "SAR R0 R1 R0\n", "SR0 R0 R1 R0\n", ">>", additive_expr_stub_c);
  1261. }
  1262. else if(X86 == Architecture)
  1263. {
  1264. arithmetic_recursion(additive_expr_b, "mov_ecx,eax\nmov_eax,ebx\nsal_eax,cl\n", "mov_ecx,eax\nmov_eax,ebx\nshl_eax,cl\n", "<<", additive_expr_stub_c);
  1265. arithmetic_recursion(additive_expr_b, "mov_ecx,eax\nmov_eax,ebx\nsar_eax,cl\n", "mov_ecx,eax\nmov_eax,ebx\nshr_eax,cl\n", ">>", additive_expr_stub_c);
  1266. }
  1267. else if(AMD64 == Architecture)
  1268. {
  1269. arithmetic_recursion(additive_expr_b, "mov_rcx,rax\nmov_rax,rbx\nsal_rax,cl\n", "mov_rcx,rax\nmov_rax,rbx\nshl_rax,cl\n", "<<", additive_expr_stub_c);
  1270. arithmetic_recursion(additive_expr_b, "mov_rcx,rax\nmov_rax,rbx\nsar_rax,cl\n", "mov_rcx,rax\nmov_rax,rbx\nshr_rax,cl\n", ">>", additive_expr_stub_c);
  1271. }
  1272. else if(ARMV7L == Architecture)
  1273. {
  1274. arithmetic_recursion(additive_expr_b, "LEFT R1 R0 R0 SHIFT AUX_ALWAYS\n", "LEFT R1 R0 R0 SHIFT AUX_ALWAYS\n", "<<", additive_expr_stub_c);
  1275. arithmetic_recursion(additive_expr_b, "ARITH_RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n", "RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n", ">>", additive_expr_stub_c);
  1276. }
  1277. else if(AARCH64 == Architecture)
  1278. {
  1279. general_recursion(additive_expr_b, "LSHIFT_X0_X1_X0\n", "<<", additive_expr_stub_c);
  1280. arithmetic_recursion(additive_expr_b, "ARITH_RSHIFT_X0_X1_X0\n", "LOGICAL_RSHIFT_X0_X1_X0\n", ">>", additive_expr_stub_c);
  1281. }
  1282. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1283. {
  1284. general_recursion(additive_expr_b, "rd_a0 rs1_a1 rs2_a0 sll\n", "<<", additive_expr_stub_c);
  1285. arithmetic_recursion(additive_expr_b, "rd_a0 rs1_a1 rs2_a0 sra\n", "rd_a0 rs1_a1 rs2_a0 srl\n", ">>", additive_expr_stub_c);
  1286. }
  1287. }
  1288. void additive_expr_c()
  1289. {
  1290. additive_expr_b();
  1291. additive_expr_stub_c();
  1292. }
  1293. /*
  1294. * relational-expr:
  1295. * additive_expr
  1296. * relational-expr < additive_expr
  1297. * relational-expr <= additive_expr
  1298. * relational-expr >= additive_expr
  1299. * relational-expr > additive_expr
  1300. */
  1301. void relational_expr_stub()
  1302. {
  1303. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1304. {
  1305. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.L R0 R0 1\n", "CMPU R0 R1 R0\nSET.L R0 R0 1\n", "<", relational_expr_stub);
  1306. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.LE R0 R0 1\n", "CMPU R0 R1 R0\nSET.LE R0 R0 1\n", "<=", relational_expr_stub);
  1307. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.GE R0 R0 1\n", "CMPU R0 R1 R0\nSET.GE R0 R0 1\n", ">=", relational_expr_stub);
  1308. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.G R0 R0 1\n", "CMPU R0 R1 R0\nSET.G R0 R0 1\n", ">", relational_expr_stub);
  1309. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.E R0 R0 1\n", "CMPU R0 R1 R0\nSET.E R0 R0 1\n", "==", relational_expr_stub);
  1310. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.NE R0 R0 1\n", "CMPU R0 R1 R0\nSET.NE R0 R0 1\n", "!=", relational_expr_stub);
  1311. }
  1312. else if(X86 == Architecture)
  1313. {
  1314. arithmetic_recursion(additive_expr_c, "cmp\nsetl_al\nmovzx_eax,al\n", "cmp\nsetb_al\nmovzx_eax,al\n", "<", relational_expr_stub);
  1315. arithmetic_recursion(additive_expr_c, "cmp\nsetle_al\nmovzx_eax,al\n", "cmp\nsetbe_al\nmovzx_eax,al\n", "<=", relational_expr_stub);
  1316. arithmetic_recursion(additive_expr_c, "cmp\nsetge_al\nmovzx_eax,al\n", "cmp\nsetae_al\nmovzx_eax,al\n", ">=", relational_expr_stub);
  1317. arithmetic_recursion(additive_expr_c, "cmp\nsetg_al\nmovzx_eax,al\n", "cmp\nseta_al\nmovzx_eax,al\n", ">", relational_expr_stub);
  1318. general_recursion(additive_expr_c, "cmp\nsete_al\nmovzx_eax,al\n", "==", relational_expr_stub);
  1319. general_recursion(additive_expr_c, "cmp\nsetne_al\nmovzx_eax,al\n", "!=", relational_expr_stub);
  1320. }
  1321. else if(AMD64 == Architecture)
  1322. {
  1323. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetl_al\nmovzx_rax,al\n", "cmp_rbx,rax\nsetb_al\nmovzx_rax,al\n", "<", relational_expr_stub);
  1324. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetle_al\nmovzx_rax,al\n", "cmp_rbx,rax\nsetbe_al\nmovzx_rax,al\n", "<=", relational_expr_stub);
  1325. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetge_al\nmovzx_rax,al\n", "cmp_rbx,rax\nsetae_al\nmovzx_rax,al\n", ">=", relational_expr_stub);
  1326. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetg_al\nmovzx_rax,al\n", "cmp_rbx,rax\nseta_al\nmovzx_rax,al\n", ">", relational_expr_stub);
  1327. general_recursion(additive_expr_c, "cmp_rbx,rax\nsete_al\nmovzx_rax,al\n", "==", relational_expr_stub);
  1328. general_recursion(additive_expr_c, "cmp_rbx,rax\nsetne_al\nmovzx_rax,al\n", "!=", relational_expr_stub);
  1329. }
  1330. else if(ARMV7L == Architecture)
  1331. {
  1332. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_L\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_LO\n", "<", relational_expr_stub);
  1333. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_LE\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_LS\n", "<=", relational_expr_stub);
  1334. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_GE\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_HS\n", ">=", relational_expr_stub);
  1335. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_G\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_HI\n", ">", relational_expr_stub);
  1336. general_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_EQUAL\n", "==", relational_expr_stub);
  1337. general_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_NE\n", "!=", relational_expr_stub);
  1338. }
  1339. else if(AARCH64 == Architecture)
  1340. {
  1341. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LT\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LO\nSET_X0_TO_0\n", "<", relational_expr_stub);
  1342. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LE\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LS\nSET_X0_TO_0\n", "<=", relational_expr_stub);
  1343. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_GE\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_HS\nSET_X0_TO_0\n", ">=", relational_expr_stub);
  1344. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_GT\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_HI\nSET_X0_TO_0\n", ">", relational_expr_stub);
  1345. general_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_EQ\nSET_X0_TO_0\n", "==", relational_expr_stub);
  1346. general_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_NE\nSET_X0_TO_0\n", "!=", relational_expr_stub);
  1347. }
  1348. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1349. {
  1350. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a1 rs2_a0 slt\n", "rd_a0 rs1_a1 rs2_a0 sltu\n", "<", relational_expr_stub);
  1351. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 slt\nrd_a0 rs1_a0 !1 xori\n", "rd_a0 rs1_a0 rs2_a1 sltu\nrd_a0 rs1_a0 !1 xori\n", "<=", relational_expr_stub);
  1352. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a1 rs2_a0 slt\nrd_a0 rs1_a0 !1 xori\n", "rd_a0 rs1_a1 rs2_a0 sltu\nrd_a0 rs1_a0 !1 xori\n", ">=", relational_expr_stub);
  1353. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 slt\n", "rd_a0 rs1_a0 rs2_a1 sltu\n", ">", relational_expr_stub);
  1354. general_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 sub\nrd_a0 rs1_a0 !1 sltiu\n", "==", relational_expr_stub);
  1355. general_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 sub\nrd_a0 rs2_a0 sltu\n", "!=", relational_expr_stub);
  1356. }
  1357. }
  1358. void relational_expr()
  1359. {
  1360. additive_expr_c();
  1361. relational_expr_stub();
  1362. }
  1363. /*
  1364. * bitwise-expr:
  1365. * relational-expr
  1366. * bitwise-expr & bitwise-expr
  1367. * bitwise-expr && bitwise-expr
  1368. * bitwise-expr | bitwise-expr
  1369. * bitwise-expr || bitwise-expr
  1370. * bitwise-expr ^ bitwise-expr
  1371. */
  1372. void bitwise_expr_stub()
  1373. {
  1374. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1375. {
  1376. general_recursion(relational_expr, "AND R0 R0 R1\n", "&", bitwise_expr_stub);
  1377. general_recursion(relational_expr, "AND R0 R0 R1\n", "&&", bitwise_expr_stub);
  1378. general_recursion(relational_expr, "OR R0 R0 R1\n", "|", bitwise_expr_stub);
  1379. general_recursion(relational_expr, "OR R0 R0 R1\n", "||", bitwise_expr_stub);
  1380. general_recursion(relational_expr, "XOR R0 R0 R1\n", "^", bitwise_expr_stub);
  1381. }
  1382. else if(X86 == Architecture)
  1383. {
  1384. general_recursion(relational_expr, "and_eax,ebx\n", "&", bitwise_expr_stub);
  1385. general_recursion(relational_expr, "and_eax,ebx\n", "&&", bitwise_expr_stub);
  1386. general_recursion(relational_expr, "or_eax,ebx\n", "|", bitwise_expr_stub);
  1387. general_recursion(relational_expr, "or_eax,ebx\n", "||", bitwise_expr_stub);
  1388. general_recursion(relational_expr, "xor_eax,ebx\n", "^", bitwise_expr_stub);
  1389. }
  1390. else if(AMD64 == Architecture)
  1391. {
  1392. general_recursion(relational_expr, "and_rax,rbx\n", "&", bitwise_expr_stub);
  1393. general_recursion(relational_expr, "and_rax,rbx\n", "&&", bitwise_expr_stub);
  1394. general_recursion(relational_expr, "or_rax,rbx\n", "|", bitwise_expr_stub);
  1395. general_recursion(relational_expr, "or_rax,rbx\n", "||", bitwise_expr_stub);
  1396. general_recursion(relational_expr, "xor_rax,rbx\n", "^", bitwise_expr_stub);
  1397. }
  1398. else if(ARMV7L == Architecture)
  1399. {
  1400. general_recursion(relational_expr, "NO_SHIFT R0 R0 AND R1 ARITH2_ALWAYS\n", "&", bitwise_expr_stub);
  1401. general_recursion(relational_expr, "NO_SHIFT R0 R0 AND R1 ARITH2_ALWAYS\n", "&&", bitwise_expr_stub);
  1402. general_recursion(relational_expr, "NO_SHIFT R0 R0 OR R1 AUX_ALWAYS\n", "|", bitwise_expr_stub);
  1403. general_recursion(relational_expr, "NO_SHIFT R0 R0 OR R1 AUX_ALWAYS\n", "||", bitwise_expr_stub);
  1404. general_recursion(relational_expr, "'0' R0 R0 XOR R1 ARITH2_ALWAYS\n", "^", bitwise_expr_stub);
  1405. }
  1406. else if(AARCH64 == Architecture)
  1407. {
  1408. general_recursion(relational_expr, "AND_X0_X1_X0\n", "&", bitwise_expr_stub);
  1409. general_recursion(relational_expr, "AND_X0_X1_X0\n", "&&", bitwise_expr_stub);
  1410. general_recursion(relational_expr, "OR_X0_X1_X0\n", "|", bitwise_expr_stub);
  1411. general_recursion(relational_expr, "OR_X0_X1_X0\n", "||", bitwise_expr_stub);
  1412. general_recursion(relational_expr, "XOR_X0_X1_X0\n", "^", bitwise_expr_stub);
  1413. }
  1414. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1415. {
  1416. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 and\n", "&", bitwise_expr_stub);
  1417. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 and\n", "&&", bitwise_expr_stub);
  1418. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 or\n", "|", bitwise_expr_stub);
  1419. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 or\n", "||", bitwise_expr_stub);
  1420. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 xor\n", "^", bitwise_expr_stub);
  1421. }
  1422. }
  1423. void bitwise_expr()
  1424. {
  1425. relational_expr();
  1426. bitwise_expr_stub();
  1427. }
  1428. /*
  1429. * expression:
  1430. * bitwise-or-expr
  1431. * bitwise-or-expr = expression
  1432. */
  1433. void primary_expr()
  1434. {
  1435. require(NULL != global_token, "Received EOF where primary expression expected\n");
  1436. if(match("&", global_token->s))
  1437. {
  1438. Address_of = TRUE;
  1439. global_token = global_token->next;
  1440. require(NULL != global_token, "Received EOF after & where primary expression expected\n");
  1441. }
  1442. else
  1443. {
  1444. Address_of = FALSE;
  1445. }
  1446. if(match("sizeof", global_token->s)) unary_expr_sizeof();
  1447. else if('-' == global_token->s[0])
  1448. {
  1449. if(X86 == Architecture) emit_out("mov_eax, %0\n");
  1450. else if(AMD64 == Architecture) emit_out("mov_rax, %0\n");
  1451. else if(ARMV7L == Architecture) emit_out("!0 R0 LOADI8_ALWAYS\n");
  1452. else if(AARCH64 == Architecture) emit_out("SET_X0_TO_0\n");
  1453. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 mv\n");
  1454. common_recursion(primary_expr);
  1455. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("NEG R0 R0\n");
  1456. else if(X86 == Architecture) emit_out("sub_ebx,eax\nmov_eax,ebx\n");
  1457. else if(AMD64 == Architecture) emit_out("sub_rbx,rax\nmov_rax,rbx\n");
  1458. else if(ARMV7L == Architecture) emit_out("'0' R0 R0 SUB R1 ARITH2_ALWAYS\n");
  1459. else if(AARCH64 == Architecture) emit_out("SUB_X0_X1_X0\n");
  1460. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a1 rs2_a0 sub\n");
  1461. }
  1462. else if('!' == global_token->s[0])
  1463. {
  1464. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADI R0 1\n");
  1465. else if(X86 == Architecture) emit_out("mov_eax, %1\n");
  1466. else if(AMD64 == Architecture) emit_out("mov_rax, %1\n");
  1467. else if(ARMV7L == Architecture) emit_out("!1 R0 LOADI8_ALWAYS\n");
  1468. else if(AARCH64 == Architecture) emit_out("SET_X0_TO_1\n");
  1469. common_recursion(postfix_expr);
  1470. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("CMPU R0 R1 R0\nSET.G R0 R0 1\n");
  1471. else if(X86 == Architecture) emit_out("cmp\nseta_al\nmovzx_eax,al\n");
  1472. else if(AMD64 == Architecture) emit_out("cmp_rbx,rax\nseta_al\nmovzx_rax,al\n");
  1473. else if(ARMV7L == Architecture) emit_out("'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_HI\n");
  1474. else if(AARCH64 == Architecture) emit_out("CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_HI\nSET_X0_TO_0\n");
  1475. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a0 !1 sltiu\n");
  1476. }
  1477. else if('~' == global_token->s[0])
  1478. {
  1479. common_recursion(postfix_expr);
  1480. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("NOT R0 R0\n");
  1481. else if(X86 == Architecture) emit_out("not_eax\n");
  1482. else if(AMD64 == Architecture) emit_out("not_rax\n");
  1483. else if(ARMV7L == Architecture) emit_out("'0' R0 R0 MVN_ALWAYS\n");
  1484. else if(AARCH64 == Architecture) emit_out("MVN_X0\n");
  1485. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a0 not\n");
  1486. }
  1487. else if(global_token->s[0] == '(')
  1488. {
  1489. global_token = global_token->next;
  1490. expression();
  1491. require_match("Error in Primary expression\nDidn't get )\n", ")");
  1492. }
  1493. else if(global_token->s[0] == '\'') primary_expr_char();
  1494. else if(global_token->s[0] == '"') primary_expr_string();
  1495. else if(in_set(global_token->s[0], "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_")) primary_expr_variable();
  1496. else if(global_token->s[0] == '*') primary_expr_variable();
  1497. else if(in_set(global_token->s[0], "0123456789"))
  1498. {
  1499. primary_expr_number(global_token->s);
  1500. global_token = global_token->next;
  1501. }
  1502. else primary_expr_failure();
  1503. }
  1504. char* compound_operation(char* operator, int is_signed)
  1505. {
  1506. char* operation = "";
  1507. if(match("+=", operator))
  1508. {
  1509. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1510. {
  1511. if(is_signed) operation = "ADD R0 R1 R0\n";
  1512. else operation = "ADDU R0 R1 R0\n";
  1513. }
  1514. else if(X86 == Architecture) operation = "add_eax,ebx\n";
  1515. else if(AMD64 == Architecture) operation = "add_rax,rbx\n";
  1516. else if(ARMV7L == Architecture) operation = "'0' R0 R0 ADD R1 ARITH2_ALWAYS\n";
  1517. else if(AARCH64 == Architecture) operation = "ADD_X0_X1_X0\n";
  1518. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 add\n";
  1519. }
  1520. else if(match("-=", operator))
  1521. {
  1522. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1523. {
  1524. if(is_signed) operation = "SUB R0 R1 R0\n";
  1525. else operation = "SUBU R0 R1 R0\n";
  1526. }
  1527. else if(X86 == Architecture) operation = "sub_ebx,eax\nmov_eax,ebx\n";
  1528. else if(AMD64 == Architecture) operation = "sub_rbx,rax\nmov_rax,rbx\n";
  1529. else if(ARMV7L == Architecture) operation = "'0' R0 R0 SUB R1 ARITH2_ALWAYS\n";
  1530. else if(AARCH64 == Architecture) operation = "SUB_X0_X1_X0\n";
  1531. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 sub\n";
  1532. }
  1533. else if(match("*=", operator))
  1534. {
  1535. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1536. {
  1537. if(is_signed) operation = "MUL R0 R1 R0\n";
  1538. else operation = "MULU R0 R1 R0\n";
  1539. }
  1540. else if(X86 == Architecture)
  1541. {
  1542. if(is_signed) operation = "imul_ebx\n";
  1543. else operation = "mul_ebx\n";
  1544. }
  1545. else if(AMD64 == Architecture)
  1546. {
  1547. if(is_signed) operation = "imul_rbx\n";
  1548. else operation = "mul_rbx\n";
  1549. }
  1550. else if(ARMV7L == Architecture) operation = "'9' R0 '0' R1 MULS R0 ARITH2_ALWAYS\n";
  1551. else if(AARCH64 == Architecture) operation = "MUL_X0_X1_X0\n";
  1552. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 mul\n";
  1553. }
  1554. else if(match("/=", operator))
  1555. {
  1556. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1557. {
  1558. if(is_signed) operation = "DIV R0 R1 R0\n";
  1559. else operation = "DIVU R0 R1 R0\n";
  1560. }
  1561. else if(X86 == Architecture)
  1562. {
  1563. if (is_signed) operation = "xchg_ebx,eax\ncdq\nidiv_ebx\n";
  1564. else operation = "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\n";
  1565. }
  1566. else if(AMD64 == Architecture)
  1567. {
  1568. if(is_signed) operation = "xchg_rbx,rax\ncqo\nidiv_rbx\n";
  1569. else operation = "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\n";
  1570. }
  1571. else if(ARMV7L == Architecture)
  1572. {
  1573. if(is_signed) operation = "{LR} PUSH_ALWAYS\n^~divides CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1574. else operation = "{LR} PUSH_ALWAYS\n^~divide CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1575. }
  1576. else if(AARCH64 == Architecture)
  1577. {
  1578. if(is_signed) operation = "SDIV_X0_X1_X0\n";
  1579. else operation = "UDIV_X0_X1_X0\n";
  1580. }
  1581. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1582. {
  1583. if(is_signed) operation = "rd_a0 rs1_a1 rs2_a0 div\n";
  1584. else operation = "rd_a0 rs1_a1 rs2_a0 divu\n";
  1585. }
  1586. }
  1587. else if(match("%=", operator))
  1588. {
  1589. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1590. {
  1591. if(is_signed) operation = "MOD R0 R1 R0\n";
  1592. else operation = "MODU R0 R1 R0\n";
  1593. }
  1594. else if(X86 == Architecture)
  1595. {
  1596. if(is_signed) operation = "xchg_ebx,eax\ncdq\nidiv_ebx\nmov_eax,edx\n";
  1597. else operation = "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\nmov_eax,edx\n";
  1598. }
  1599. else if(AMD64 == Architecture)
  1600. {
  1601. if(is_signed) operation = "xchg_rbx,rax\ncqo\nidiv_rbx\nmov_rax,rdx\n";
  1602. else operation = "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\nmov_rax,rdx\n";
  1603. }
  1604. else if(ARMV7L == Architecture)
  1605. {
  1606. if(is_signed) operation = "{LR} PUSH_ALWAYS\n^~moduluss CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1607. else operation = "{LR} PUSH_ALWAYS\n^~modulus CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1608. }
  1609. else if(AARCH64 == Architecture)
  1610. {
  1611. if(is_signed) operation = "SDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n";
  1612. else operation = "UDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n";
  1613. }
  1614. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1615. {
  1616. if(is_signed) operation = "rd_a0 rs1_a1 rs2_a0 rem\n";
  1617. else operation = "rd_a0 rs1_a1 rs2_a0 remu\n";
  1618. }
  1619. }
  1620. else if(match("<<=", operator))
  1621. {
  1622. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1623. {
  1624. if(is_signed) operation = "SAL R0 R1 R0\n";
  1625. else operation = "SL0 R0 R1 R0\n";
  1626. }
  1627. else if(X86 == Architecture)
  1628. {
  1629. if(is_signed) operation = "mov_ecx,eax\nmov_eax,ebx\nsal_eax,cl\n";
  1630. else operation = "mov_ecx,eax\nmov_eax,ebx\nshl_eax,cl\n";
  1631. }
  1632. else if(AMD64 == Architecture)
  1633. {
  1634. if(is_signed) operation = "mov_rcx,rax\nmov_rax,rbx\nsal_rax,cl\n";
  1635. else operation = "mov_rcx,rax\nmov_rax,rbx\nshl_rax,cl\n";
  1636. }
  1637. else if(ARMV7L == Architecture) operation = "LEFT R1 R0 R0 SHIFT AUX_ALWAYS\n";
  1638. else if(AARCH64 == Architecture) operation = "LSHIFT_X0_X1_X0\n";
  1639. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 sll\n";
  1640. }
  1641. else if(match(">>=", operator))
  1642. {
  1643. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1644. {
  1645. if(is_signed) operation = "SAR R0 R1 R0\n";
  1646. else operation = "SR0 R0 R1 R0\n";
  1647. }
  1648. else if(X86 == Architecture)
  1649. {
  1650. if(is_signed) operation = "mov_ecx,eax\nmov_eax,ebx\nsar_eax,cl\n";
  1651. else operation = "mov_ecx,eax\nmov_eax,ebx\nshr_eax,cl\n";
  1652. }
  1653. else if(AMD64 == Architecture)
  1654. {
  1655. if(is_signed) operation = "mov_rcx,rax\nmov_rax,rbx\nsar_rax,cl\n";
  1656. else operation = "mov_rcx,rax\nmov_rax,rbx\nshr_rax,cl\n";
  1657. }
  1658. else if(ARMV7L == Architecture)
  1659. {
  1660. if(is_signed) operation = "ARITH_RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n";
  1661. else operation = "RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n";
  1662. }
  1663. else if(AARCH64 == Architecture)
  1664. {
  1665. if(is_signed) operation = "ARITH_RSHIFT_X0_X1_X0\n";
  1666. else operation = "LOGICAL_RSHIFT_X0_X1_X0\n";
  1667. }
  1668. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1669. {
  1670. if(is_signed) operation = "rd_a0 rs1_a1 rs2_a0 sra\n";
  1671. else operation = "rd_a0 rs1_a1 rs2_a0 srl\n";
  1672. }
  1673. }
  1674. else if(match("&=", operator))
  1675. {
  1676. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) operation = "AND R0 R0 R1\n";
  1677. else if(X86 == Architecture) operation = "and_eax,ebx\n";
  1678. else if(AMD64 == Architecture) operation = "and_rax,rbx\n";
  1679. else if(ARMV7L == Architecture) operation = "NO_SHIFT R0 R0 AND R1 ARITH2_ALWAYS\n";
  1680. else if(AARCH64 == Architecture) operation = "AND_X0_X1_X0\n";
  1681. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 and\n";
  1682. }
  1683. else if(match("^=", operator))
  1684. {
  1685. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) operation = "XOR R0 R0 R1\n";
  1686. else if(X86 == Architecture) operation = "xor_eax,ebx\n";
  1687. else if(AMD64 == Architecture) operation = "xor_rax,rbx\n";
  1688. else if(ARMV7L == Architecture) operation = "'0' R0 R0 XOR R1 ARITH2_ALWAYS\n";
  1689. else if(AARCH64 == Architecture) operation = "XOR_X0_X1_X0\n";
  1690. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 xor\n";
  1691. }
  1692. else if(match("|=", operator))
  1693. {
  1694. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) operation = "OR R0 R0 R1\n";
  1695. else if(X86 == Architecture) operation = "or_eax,ebx\n";
  1696. else if(AMD64 == Architecture) operation = "or_rax,rbx\n";
  1697. else if(ARMV7L == Architecture) operation = "NO_SHIFT R0 R0 OR R1 AUX_ALWAYS\n";
  1698. else if(AARCH64 == Architecture) operation = "OR_X0_X1_X0\n";
  1699. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 or\n";
  1700. }
  1701. else
  1702. {
  1703. fputs("Found illegal compound assignment operator: ", stderr);
  1704. fputs(operator, stderr);
  1705. fputc('\n', stderr);
  1706. exit(EXIT_FAILURE);
  1707. }
  1708. return operation;
  1709. }
  1710. void expression()
  1711. {
  1712. bitwise_expr();
  1713. if(match("=", global_token->s))
  1714. {
  1715. char* store = "";
  1716. if(match("]", global_token->prev->s))
  1717. {
  1718. store = store_value(current_target->type->size);
  1719. }
  1720. else
  1721. {
  1722. store = store_value(current_target->size);
  1723. }
  1724. common_recursion(expression);
  1725. emit_out(store);
  1726. current_target = integer;
  1727. }
  1728. else if(is_compound_assignment(global_token->s))
  1729. {
  1730. maybe_bootstrap_error("compound operator");
  1731. char* push = "";
  1732. char* load = "";
  1733. char* operation = "";
  1734. char* pop = "";
  1735. char* store = "";
  1736. struct type* last_type = current_target;
  1737. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) push = "PUSHR R1 R15\n";
  1738. else if(X86 == Architecture) push = "push_ebx\n";
  1739. else if(AMD64 == Architecture) push = "push_rbx\n";
  1740. else if(ARMV7L == Architecture) push = "{R1} PUSH_ALWAYS\n";
  1741. else if(AARCH64 == Architecture) push = "PUSH_X1\n";
  1742. else if(RISCV32 == Architecture) push = "rs1_sp rs2_a1 @-4 sw\n";
  1743. else if(RISCV64 == Architecture) push = "rs1_sp rs2_a1 @-8 sd\n";
  1744. if(!match("]", global_token->prev->s) || !match("char*", current_target->name))
  1745. {
  1746. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) load = "LOAD R1 R1 0\n";
  1747. else if(X86 == Architecture) load = "mov_ebx,[ebx]\n";
  1748. else if(AMD64 == Architecture) load = "mov_rbx,[rbx]\n";
  1749. else if(ARMV7L == Architecture) load = "!0 R1 LOAD32 R1 MEMORY\n";
  1750. else if(AARCH64 == Architecture) load = "DEREF_X1\n";
  1751. else if(RISCV32 == Architecture) load = "rd_a1 rs1_a1 lw\n";
  1752. else if(RISCV64 == Architecture) load = "rd_a1 rs1_a1 ld\n";
  1753. }
  1754. else
  1755. {
  1756. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) load = "LOAD8 R1 R1 0\n";
  1757. else if(X86 == Architecture) load = "movsx_ebx,BYTE_PTR_[ebx]\n";
  1758. else if(AMD64 == Architecture) load = "movsx_rbx,BYTE_PTR_[rbx]\n";
  1759. else if(ARMV7L == Architecture) load = "LOADU8 R1 LOAD R1 MEMORY\n";
  1760. else if(AARCH64 == Architecture) load = "DEREF_X1_BYTE\n";
  1761. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) load = "rd_a1 rs1_a1 lbu\n";
  1762. }
  1763. char *operator = global_token->s;
  1764. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) pop = "POPR R1 R15\n";
  1765. else if(X86 == Architecture) pop = "pop_ebx\n";
  1766. else if(AMD64 == Architecture) pop = "pop_rbx\n";
  1767. else if(ARMV7L == Architecture) pop = "{R1} POP_ALWAYS\n";
  1768. else if(AARCH64 == Architecture) pop = "POP_X1\n";
  1769. else if(RISCV32 == Architecture) pop = "rd_a1 rs1_sp !-4 lw\n";
  1770. else if(RISCV64 == Architecture) pop = "rd_a1 rs1_sp !-8 ld\n";
  1771. if(match("]", global_token->prev->s))
  1772. {
  1773. store = store_value(current_target->type->size);
  1774. }
  1775. else
  1776. {
  1777. store = store_value(current_target->size);
  1778. }
  1779. common_recursion(expression);
  1780. current_target = promote_type(current_target, last_type);
  1781. emit_out(push);
  1782. emit_out(load);
  1783. operation = compound_operation(operator, current_target->is_signed);
  1784. emit_out(operation);
  1785. emit_out(pop);
  1786. emit_out(store);
  1787. current_target = integer;
  1788. }
  1789. }
  1790. int iskeywordp(char* s)
  1791. {
  1792. if(match("auto", s)) return TRUE;
  1793. if(match("break", s)) return TRUE;
  1794. if(match("case", s)) return TRUE;
  1795. if(match("char", s)) return TRUE;
  1796. if(match("const", s)) return TRUE;
  1797. if(match("continue", s)) return TRUE;
  1798. if(match("default", s)) return TRUE;
  1799. if(match("do", s)) return TRUE;
  1800. if(match("double", s)) return TRUE;
  1801. if(match("else", s)) return TRUE;
  1802. if(match("enum", s)) return TRUE;
  1803. if(match("extern", s)) return TRUE;
  1804. if(match("float", s)) return TRUE;
  1805. if(match("for", s)) return TRUE;
  1806. if(match("goto", s)) return TRUE;
  1807. if(match("if", s)) return TRUE;
  1808. if(match("int", s)) return TRUE;
  1809. if(match("long", s)) return TRUE;
  1810. if(match("register", s)) return TRUE;
  1811. if(match("return", s)) return TRUE;
  1812. if(match("short", s)) return TRUE;
  1813. if(match("signed", s)) return TRUE;
  1814. if(match("sizeof", s)) return TRUE;
  1815. if(match("static", s)) return TRUE;
  1816. if(match("struct", s)) return TRUE;
  1817. if(match("switch", s)) return TRUE;
  1818. if(match("typedef", s)) return TRUE;
  1819. if(match("union", s)) return TRUE;
  1820. if(match("unsigned", s)) return TRUE;
  1821. if(match("void", s)) return TRUE;
  1822. if(match("volatile", s)) return TRUE;
  1823. if(match("while", s)) return TRUE;
  1824. return FALSE;
  1825. }
  1826. /* Similar to integer division a / b but rounds up */
  1827. unsigned ceil_div(unsigned a, unsigned b)
  1828. {
  1829. return (a + b - 1) / b;
  1830. }
  1831. /* Process local variable */
  1832. void collect_local()
  1833. {
  1834. if(NULL != break_target_func)
  1835. {
  1836. fputs("Local variable initialized inside of loop in file: ", stderr);
  1837. line_error();
  1838. fputs("\nMove the variable outside of the loop to resolve\n", stderr);
  1839. fputs("Otherwise the binary will segfault while running\n", stderr);
  1840. exit(EXIT_FAILURE);
  1841. }
  1842. struct type* type_size = type_name();
  1843. require(NULL != global_token, "Received EOF while collecting locals\n");
  1844. require(!in_set(global_token->s[0], "[{(<=>)}]|&!^%;:'\""), "forbidden character in local variable name\n");
  1845. require(!iskeywordp(global_token->s), "You are not allowed to use a keyword as a local variable name\n");
  1846. require(NULL != type_size, "Must have non-null type\n");
  1847. struct token_list* a = sym_declare(global_token->s, type_size, function->locals);
  1848. if(match("main", function->s) && (NULL == function->locals))
  1849. {
  1850. if(KNIGHT_NATIVE == Architecture) a->depth = register_size;
  1851. else if(KNIGHT_POSIX == Architecture) a->depth = 20;
  1852. else if(X86 == Architecture) a->depth = -20;
  1853. else if(AMD64 == Architecture) a->depth = -40;
  1854. else if(ARMV7L == Architecture) a->depth = 16;
  1855. else if(AARCH64 == Architecture) a->depth = 32; /* argc, argv, envp and the local (8 bytes each) */
  1856. else if(RISCV32 == Architecture) a->depth = -16;
  1857. else if(RISCV64 == Architecture) a->depth = -32;
  1858. }
  1859. else if((NULL == function->arguments) && (NULL == function->locals))
  1860. {
  1861. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = register_size;
  1862. else if(X86 == Architecture) a->depth = -8;
  1863. else if(AMD64 == Architecture) a->depth = -16;
  1864. else if(ARMV7L == Architecture) a->depth = 8;
  1865. else if(AARCH64 == Architecture) a->depth = register_size;
  1866. else if(RISCV32 == Architecture) a->depth = -4;
  1867. else if(RISCV64 == Architecture) a->depth = -8;
  1868. }
  1869. else if(NULL == function->locals)
  1870. {
  1871. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = function->arguments->depth + 8;
  1872. else if(X86 == Architecture) a->depth = function->arguments->depth - 8;
  1873. else if(AMD64 == Architecture) a->depth = function->arguments->depth - 16;
  1874. else if(ARMV7L == Architecture) a->depth = function->arguments->depth + 8;
  1875. else if(AARCH64 == Architecture) a->depth = function->arguments->depth + register_size;
  1876. else if(RISCV32 == Architecture) a->depth = function->arguments->depth - 4;
  1877. else if(RISCV64 == Architecture) a->depth = function->arguments->depth - 8;
  1878. }
  1879. else
  1880. {
  1881. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = function->locals->depth + register_size;
  1882. else if(X86 == Architecture) a->depth = function->locals->depth - register_size;
  1883. else if(AMD64 == Architecture) a->depth = function->locals->depth - register_size;
  1884. else if(ARMV7L == Architecture) a->depth = function->locals->depth + register_size;
  1885. else if(AARCH64 == Architecture) a->depth = function->locals->depth + register_size;
  1886. else if(RISCV32 == Architecture) a->depth = function->locals->depth - register_size;
  1887. else if(RISCV64 == Architecture) a->depth = function->locals->depth - register_size;
  1888. }
  1889. /* Adjust the depth of local structs. When stack grows downwards, we want them to
  1890. start at the bottom of allocated space. */
  1891. unsigned struct_depth_adjustment = (ceil_div(a->type->size, register_size) - 1) * register_size;
  1892. if(KNIGHT_POSIX == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1893. else if(KNIGHT_NATIVE == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1894. else if(X86 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1895. else if(AMD64 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1896. else if(ARMV7L == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1897. else if(AARCH64 == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1898. else if(RISCV32 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1899. else if(RISCV64 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1900. function->locals = a;
  1901. emit_out("# Defining local ");
  1902. emit_out(global_token->s);
  1903. emit_out("\n");
  1904. global_token = global_token->next;
  1905. require(NULL != global_token, "incomplete local missing name\n");
  1906. if(match("=", global_token->s))
  1907. {
  1908. global_token = global_token->next;
  1909. require(NULL != global_token, "incomplete local assignment\n");
  1910. expression();
  1911. }
  1912. require_match("ERROR in collect_local\nMissing ;\n", ";");
  1913. unsigned i = (a->type->size + register_size - 1) / register_size;
  1914. while(i != 0)
  1915. {
  1916. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#");
  1917. else if(X86 == Architecture) emit_out("push_eax\t#");
  1918. else if(AMD64 == Architecture) emit_out("push_rax\t#");
  1919. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#");
  1920. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#");
  1921. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\nrs1_sp rs2_a0 sw\t#");
  1922. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\nrs1_sp rs2_a0 sd\t#");
  1923. emit_out(a->s);
  1924. emit_out("\n");
  1925. i = i - 1;
  1926. }
  1927. }
  1928. void statement();
  1929. /* Evaluate if statements */
  1930. void process_if()
  1931. {
  1932. char* number_string = int2str(current_count, 10, TRUE);
  1933. current_count = current_count + 1;
  1934. emit_out("# IF_");
  1935. uniqueID_out(function->s, number_string);
  1936. global_token = global_token->next;
  1937. require_match("ERROR in process_if\nMISSING (\n", "(");
  1938. expression();
  1939. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.Z R0 @ELSE_");
  1940. else if(X86 == Architecture) emit_out("test_eax,eax\nje %ELSE_");
  1941. else if(AMD64 == Architecture) emit_out("test_rax,rax\nje %ELSE_");
  1942. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~ELSE_");
  1943. else if(AARCH64 == Architecture) emit_out("CBNZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&ELSE_");
  1944. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @8 bnez\n$ELSE_");
  1945. uniqueID_out(function->s, number_string);
  1946. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\n");
  1947. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  1948. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  1949. require_match("ERROR in process_if\nMISSING )\n", ")");
  1950. statement();
  1951. require(NULL != global_token, "Reached EOF inside of function\n");
  1952. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_END_IF_");
  1953. else if(X86 == Architecture) emit_out("jmp %_END_IF_");
  1954. else if(AMD64 == Architecture) emit_out("jmp %_END_IF_");
  1955. else if(ARMV7L == Architecture) emit_out("^~_END_IF_");
  1956. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&_END_IF_");
  1957. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_END_IF_");
  1958. uniqueID_out(function->s, number_string);
  1959. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  1960. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  1961. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  1962. emit_out(":ELSE_");
  1963. uniqueID_out(function->s, number_string);
  1964. if(match("else", global_token->s))
  1965. {
  1966. global_token = global_token->next;
  1967. require(NULL != global_token, "Received EOF where an else statement expected\n");
  1968. statement();
  1969. require(NULL != global_token, "Reached EOF inside of function\n");
  1970. }
  1971. emit_out(":_END_IF_");
  1972. uniqueID_out(function->s, number_string);
  1973. }
  1974. void process_case()
  1975. {
  1976. process_case_iter:
  1977. if(match("case", global_token->s)) return;
  1978. if(match(":default", global_token->s)) return;
  1979. if(match("break", global_token->s))
  1980. {
  1981. statement();
  1982. }
  1983. else
  1984. {
  1985. statement();
  1986. goto process_case_iter;
  1987. }
  1988. }
  1989. void process_switch()
  1990. {
  1991. maybe_bootstrap_error("switch/case statements");
  1992. struct token_list* nested_locals = break_frame;
  1993. char* nested_break_head = break_target_head;
  1994. char* nested_break_func = break_target_func;
  1995. char* nested_break_num = break_target_num;
  1996. char* nested_continue_head = continue_target_head;
  1997. char* number_string = int2str(current_count, 10, TRUE);
  1998. current_count = current_count + 1;
  1999. break_target_head = "_SWITCH_END_";
  2000. continue_target_head = NULL; /* don't allow continue in switch statements */
  2001. break_target_num = number_string;
  2002. break_frame = function->locals;
  2003. break_target_func = function->s;
  2004. emit_out("# switch_");
  2005. uniqueID_out(function->s, number_string);
  2006. /* get what we are casing on */
  2007. global_token = global_token->next;
  2008. require_match("ERROR in process_switch\nMISSING (\n", "(");
  2009. expression();
  2010. require_match("ERROR in process_switch\nMISSING )\n", ")");
  2011. /* Put the value in R1 as it is currently in R0 */
  2012. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("MOVE R1 R0\n");
  2013. else if(X86 == Architecture) emit_out("mov_ebx,eax\n");
  2014. else if(AMD64 == Architecture) emit_out("push_rax\npop_rbx\n");
  2015. else if(ARMV7L == Architecture) emit_out("'0' R1 R0 NO_SHIFT MOVE_ALWAYS\n");
  2016. else if(AARCH64 == Architecture) emit_out("SET_X1_FROM_X0\n");
  2017. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a1 rs1_a0 mv\n");
  2018. /* Jump to the switch table */
  2019. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_SWITCH_TABLE_");
  2020. else if(X86 == Architecture) emit_out("jmp %_SWITCH_TABLE_");
  2021. else if(AMD64 == Architecture) emit_out("jmp %_SWITCH_TABLE_");
  2022. else if(ARMV7L == Architecture) emit_out("^~_SWITCH_TABLE_");
  2023. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&_SWITCH_TABLE_");
  2024. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_SWITCH_TABLE_");
  2025. uniqueID_out(function->s, number_string);
  2026. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2027. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2028. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2029. /* must be switch (exp) {$STATEMENTS}; form */
  2030. require_match("ERROR in process_switch\nMISSING {\n", "{");
  2031. struct case_list* backtrack = NULL;
  2032. process_switch_iter:
  2033. if(match("case", global_token->s))
  2034. {
  2035. global_token = global_token->next;
  2036. if(':' == global_token->s[0])
  2037. {
  2038. struct case_list* c = calloc(1, sizeof(struct case_list));
  2039. c->next = backtrack;
  2040. c->value = global_token->s + 1;
  2041. backtrack = c;
  2042. emit_out(":_SWITCH_CASE_");
  2043. emit_out(c->value);
  2044. emit_out("_");
  2045. uniqueID_out(function->s, number_string);
  2046. global_token = global_token->next;
  2047. process_case();
  2048. }
  2049. else line_error();
  2050. goto process_switch_iter;
  2051. }
  2052. else if(match(":default", global_token->s))
  2053. { /* because of how M2-Planet treats labels */
  2054. global_token = global_token->next;
  2055. emit_out(":_SWITCH_DEFAULT_");
  2056. uniqueID_out(function->s, number_string);
  2057. /* collect statements until } */
  2058. while(!match("}", global_token->s))
  2059. {
  2060. statement();
  2061. }
  2062. /* jump over the switch table */
  2063. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_SWITCH_END_");
  2064. else if(X86 == Architecture) emit_out("jmp %_SWITCH_END_");
  2065. else if(AMD64 == Architecture) emit_out("jmp %_SWITCH_END_");
  2066. else if(ARMV7L == Architecture) emit_out("^~_SWITCH_END_");
  2067. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&_SWITCH_END_");
  2068. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_SWITCH_END_");
  2069. uniqueID_out(function->s, number_string);
  2070. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2071. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2072. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2073. }
  2074. /* Switch statements must end with } */
  2075. require_match("ERROR in process_switch\nMISSING }\n", "}");
  2076. /* create the table */
  2077. emit_out(":_SWITCH_TABLE_");
  2078. uniqueID_out(function->s, number_string);
  2079. struct case_list* hold;
  2080. while(NULL != backtrack)
  2081. {
  2082. /* put case value in R0 as the switch (value) is in R1 */
  2083. primary_expr_number(backtrack->value);
  2084. hold = backtrack->next;
  2085. /* compare R0 and R1 and jump to case if equal */
  2086. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("CMPU R0 R0 R1\nJUMP.E R0 @_SWITCH_CASE_");
  2087. else if(X86 == Architecture) emit_out("cmp\nje %_SWITCH_CASE_");
  2088. else if(AMD64 == Architecture) emit_out("cmp_rbx,rax\nje %_SWITCH_CASE_");
  2089. else if(ARMV7L == Architecture) emit_out("'0' R0 CMP R1 AUX_ALWAYS\n^~_SWITCH_CASE_");
  2090. else if(AARCH64 == Architecture) emit_out("CMP_X1_X0\nSKIP_32_DATA\n&_SWITCH_CASE_");
  2091. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a0 rs2_a1 sub\nrs1_a0 @8 bnez\n$_SWITCH_CASE_");
  2092. emit_out(backtrack->value);
  2093. emit_out("_");
  2094. uniqueID_out(function->s, number_string);
  2095. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\n");
  2096. else if(AARCH64 == Architecture) emit_out("\nSKIP_INST_NE\nBR_X16\n");
  2097. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2098. free(backtrack);
  2099. backtrack = hold;
  2100. }
  2101. /* Default to :default */
  2102. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_SWITCH_DEFAULT_");
  2103. else if(X86 == Architecture) emit_out("jmp %_SWITCH_DEFAULT_");
  2104. else if(AMD64 == Architecture) emit_out("jmp %_SWITCH_DEFAULT_");
  2105. else if(ARMV7L == Architecture) emit_out("^~_SWITCH_DEFAULT_");
  2106. else if(AARCH64 == Architecture) emit_out("SKIP_32_DATA\n&_SWITCH_DEFAULT_");
  2107. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_SWITCH_DEFAULT_");
  2108. uniqueID_out(function->s, number_string);
  2109. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2110. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2111. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2112. /* put the exit of the switch */
  2113. emit_out(":_SWITCH_END_");
  2114. uniqueID_out(function->s, number_string);
  2115. break_target_head = nested_break_head;
  2116. break_target_func = nested_break_func;
  2117. break_target_num = nested_break_num;
  2118. continue_target_head = nested_continue_head;
  2119. break_frame = nested_locals;
  2120. }
  2121. void process_for()
  2122. {
  2123. struct token_list* nested_locals = break_frame;
  2124. char* nested_break_head = break_target_head;
  2125. char* nested_break_func = break_target_func;
  2126. char* nested_break_num = break_target_num;
  2127. char* nested_continue_head = continue_target_head;
  2128. char* number_string = int2str(current_count, 10, TRUE);
  2129. current_count = current_count + 1;
  2130. break_target_head = "FOR_END_";
  2131. continue_target_head = "FOR_ITER_";
  2132. break_target_num = number_string;
  2133. break_frame = function->locals;
  2134. break_target_func = function->s;
  2135. emit_out("# FOR_initialization_");
  2136. uniqueID_out(function->s, number_string);
  2137. global_token = global_token->next;
  2138. require_match("ERROR in process_for\nMISSING (\n", "(");
  2139. if(!match(";",global_token->s))
  2140. {
  2141. expression();
  2142. }
  2143. emit_out(":FOR_");
  2144. uniqueID_out(function->s, number_string);
  2145. require_match("ERROR in process_for\nMISSING ;1\n", ";");
  2146. expression();
  2147. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.Z R0 @FOR_END_");
  2148. else if(X86 == Architecture) emit_out("test_eax,eax\nje %FOR_END_");
  2149. else if(AMD64 == Architecture) emit_out("test_rax,rax\nje %FOR_END_");
  2150. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~FOR_END_");
  2151. else if(AARCH64 == Architecture) emit_out("CBNZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_END_");
  2152. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @8 bnez\n$FOR_END_");
  2153. uniqueID_out(function->s, number_string);
  2154. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\n");
  2155. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2156. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2157. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @FOR_THEN_");
  2158. else if(X86 == Architecture) emit_out("jmp %FOR_THEN_");
  2159. else if(AMD64 == Architecture) emit_out("jmp %FOR_THEN_");
  2160. else if(ARMV7L == Architecture) emit_out("^~FOR_THEN_");
  2161. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_THEN_");
  2162. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$FOR_THEN_");
  2163. uniqueID_out(function->s, number_string);
  2164. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2165. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2166. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2167. emit_out(":FOR_ITER_");
  2168. uniqueID_out(function->s, number_string);
  2169. require_match("ERROR in process_for\nMISSING ;2\n", ";");
  2170. expression();
  2171. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @FOR_");
  2172. else if(X86 == Architecture) emit_out("jmp %FOR_");
  2173. else if(AMD64 == Architecture) emit_out("jmp %FOR_");
  2174. else if(ARMV7L == Architecture) emit_out("^~FOR_");
  2175. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_");
  2176. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$FOR_");
  2177. uniqueID_out(function->s, number_string);
  2178. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2179. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2180. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2181. emit_out(":FOR_THEN_");
  2182. uniqueID_out(function->s, number_string);
  2183. require_match("ERROR in process_for\nMISSING )\n", ")");
  2184. statement();
  2185. require(NULL != global_token, "Reached EOF inside of function\n");
  2186. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @FOR_ITER_");
  2187. else if(X86 == Architecture) emit_out("jmp %FOR_ITER_");
  2188. else if(AMD64 == Architecture) emit_out("jmp %FOR_ITER_");
  2189. else if(ARMV7L == Architecture) emit_out("^~FOR_ITER_");
  2190. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_ITER_");
  2191. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$FOR_ITER_");
  2192. uniqueID_out(function->s, number_string);
  2193. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2194. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2195. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2196. emit_out(":FOR_END_");
  2197. uniqueID_out(function->s, number_string);
  2198. break_target_head = nested_break_head;
  2199. break_target_func = nested_break_func;
  2200. break_target_num = nested_break_num;
  2201. continue_target_head = nested_continue_head;
  2202. break_frame = nested_locals;
  2203. }
  2204. /* Process Assembly statements */
  2205. void process_asm()
  2206. {
  2207. global_token = global_token->next;
  2208. require_match("ERROR in process_asm\nMISSING (\n", "(");
  2209. while('"' == global_token->s[0])
  2210. {
  2211. emit_out((global_token->s + 1));
  2212. emit_out("\n");
  2213. global_token = global_token->next;
  2214. require(NULL != global_token, "Received EOF inside asm statement\n");
  2215. }
  2216. require_match("ERROR in process_asm\nMISSING )\n", ")");
  2217. require_match("ERROR in process_asm\nMISSING ;\n", ";");
  2218. }
  2219. /* Process do while loops */
  2220. void process_do()
  2221. {
  2222. struct token_list* nested_locals = break_frame;
  2223. char* nested_break_head = break_target_head;
  2224. char* nested_break_func = break_target_func;
  2225. char* nested_break_num = break_target_num;
  2226. char* nested_continue_head = continue_target_head;
  2227. char* number_string = int2str(current_count, 10, TRUE);
  2228. current_count = current_count + 1;
  2229. break_target_head = "DO_END_";
  2230. continue_target_head = "DO_TEST_";
  2231. break_target_num = number_string;
  2232. break_frame = function->locals;
  2233. break_target_func = function->s;
  2234. emit_out(":DO_");
  2235. uniqueID_out(function->s, number_string);
  2236. global_token = global_token->next;
  2237. require(NULL != global_token, "Received EOF where do statement is expected\n");
  2238. statement();
  2239. require(NULL != global_token, "Reached EOF inside of function\n");
  2240. emit_out(":DO_TEST_");
  2241. uniqueID_out(function->s, number_string);
  2242. require_match("ERROR in process_do\nMISSING while\n", "while");
  2243. require_match("ERROR in process_do\nMISSING (\n", "(");
  2244. expression();
  2245. require_match("ERROR in process_do\nMISSING )\n", ")");
  2246. require_match("ERROR in process_do\nMISSING ;\n", ";");
  2247. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.NZ R0 @DO_");
  2248. else if(X86 == Architecture) emit_out("test_eax,eax\njne %DO_");
  2249. else if(AMD64 == Architecture) emit_out("test_rax,rax\njne %DO_");
  2250. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~DO_");
  2251. else if(AARCH64 == Architecture) emit_out("CBZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&DO_");
  2252. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @DO_END_");
  2253. uniqueID_out(function->s, number_string);
  2254. if(ARMV7L == Architecture) emit_out(" JUMP_NE\n");
  2255. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2256. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  2257. {
  2258. emit_out("beqz\n$DO_");
  2259. uniqueID_out(function->s, number_string);
  2260. emit_out("jal\n");
  2261. }
  2262. emit_out(":DO_END_");
  2263. uniqueID_out(function->s, number_string);
  2264. break_frame = nested_locals;
  2265. break_target_head = nested_break_head;
  2266. break_target_func = nested_break_func;
  2267. break_target_num = nested_break_num;
  2268. continue_target_head = nested_continue_head;
  2269. }
  2270. /* Process while loops */
  2271. void process_while()
  2272. {
  2273. struct token_list* nested_locals = break_frame;
  2274. char* nested_break_head = break_target_head;
  2275. char* nested_break_func = break_target_func;
  2276. char* nested_break_num = break_target_num;
  2277. char* nested_continue_head = continue_target_head;
  2278. char* number_string = int2str(current_count, 10, TRUE);
  2279. current_count = current_count + 1;
  2280. break_target_head = "END_WHILE_";
  2281. continue_target_head = "WHILE_";
  2282. break_target_num = number_string;
  2283. break_frame = function->locals;
  2284. break_target_func = function->s;
  2285. emit_out(":WHILE_");
  2286. uniqueID_out(function->s, number_string);
  2287. global_token = global_token->next;
  2288. require_match("ERROR in process_while\nMISSING (\n", "(");
  2289. expression();
  2290. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.Z R0 @END_WHILE_");
  2291. else if(X86 == Architecture) emit_out("test_eax,eax\nje %END_WHILE_");
  2292. else if(AMD64 == Architecture) emit_out("test_rax,rax\nje %END_WHILE_");
  2293. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~END_WHILE_");
  2294. else if(AARCH64 == Architecture) emit_out("CBNZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&END_WHILE_");
  2295. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @8 bnez\n$END_WHILE_");
  2296. uniqueID_out(function->s, number_string);
  2297. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\t");
  2298. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2299. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2300. emit_out("# THEN_while_");
  2301. uniqueID_out(function->s, number_string);
  2302. require_match("ERROR in process_while\nMISSING )\n", ")");
  2303. statement();
  2304. require(NULL != global_token, "Reached EOF inside of function\n");
  2305. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @WHILE_");
  2306. else if(X86 == Architecture) emit_out("jmp %WHILE_");
  2307. else if(AMD64 == Architecture) emit_out("jmp %WHILE_");
  2308. else if(ARMV7L == Architecture) emit_out("^~WHILE_");
  2309. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&WHILE_");
  2310. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$WHILE_");
  2311. uniqueID_out(function->s, number_string);
  2312. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2313. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2314. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2315. emit_out(":END_WHILE_");
  2316. uniqueID_out(function->s, number_string);
  2317. break_target_head = nested_break_head;
  2318. break_target_func = nested_break_func;
  2319. break_target_num = nested_break_num;
  2320. continue_target_head = nested_continue_head;
  2321. break_frame = nested_locals;
  2322. }
  2323. /* Ensure that functions return */
  2324. void return_result()
  2325. {
  2326. global_token = global_token->next;
  2327. require(NULL != global_token, "Incomplete return statement received\n");
  2328. if(global_token->s[0] != ';') expression();
  2329. require_match("ERROR in return_result\nMISSING ;\n", ";");
  2330. struct token_list* i;
  2331. unsigned size_local_var;
  2332. for(i = function->locals; NULL != i; i = i->next)
  2333. {
  2334. size_local_var = ceil_div(i->type->size, register_size);
  2335. while(size_local_var != 0)
  2336. {
  2337. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _return_result_locals\n");
  2338. else if(X86 == Architecture) emit_out("pop_ebx\t# _return_result_locals\n");
  2339. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _return_result_locals\n");
  2340. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _return_result_locals\n");
  2341. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _return_result_locals\n");
  2342. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw # _return_result_locals\nrd_sp rs1_sp !4 addi\n");
  2343. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld # _return_result_locals\nrd_sp rs1_sp !8 addi\n");
  2344. size_local_var = size_local_var - 1;
  2345. }
  2346. }
  2347. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("RET R15\n");
  2348. else if(X86 == Architecture) emit_out("ret\n");
  2349. else if(AMD64 == Architecture) emit_out("ret\n");
  2350. else if(ARMV7L == Architecture) emit_out("'1' LR RETURN\n");
  2351. else if(AARCH64 == Architecture) emit_out("RETURN\n");
  2352. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("ret\n");
  2353. }
  2354. void process_break()
  2355. {
  2356. if(NULL == break_target_head)
  2357. {
  2358. line_error();
  2359. fputs("Not inside of a loop or case statement\n", stderr);
  2360. exit(EXIT_FAILURE);
  2361. }
  2362. struct token_list* i = function->locals;
  2363. while(i != break_frame)
  2364. {
  2365. if(NULL == i) break;
  2366. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# break_cleanup_locals\n");
  2367. else if(X86 == Architecture) emit_out("pop_ebx\t# break_cleanup_locals\n");
  2368. else if(AMD64 == Architecture) emit_out("pop_rbx\t# break_cleanup_locals\n");
  2369. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# break_cleanup_locals\n");
  2370. else if(AARCH64 == Architecture) emit_out("POP_X1\t# break_cleanup_locals\n");
  2371. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\t# break_cleanup_locals\nrd_sp rs1_sp !4 addi\n");
  2372. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\t# break_cleanup_locals\nrd_sp rs1_sp !8 addi\n");
  2373. i = i->next;
  2374. }
  2375. global_token = global_token->next;
  2376. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @");
  2377. else if(X86 == Architecture) emit_out("jmp %");
  2378. else if(AMD64 == Architecture) emit_out("jmp %");
  2379. else if(ARMV7L == Architecture) emit_out("^~");
  2380. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&");
  2381. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$");
  2382. emit_out(break_target_head);
  2383. emit_out(break_target_func);
  2384. emit_out("_");
  2385. emit_out(break_target_num);
  2386. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS");
  2387. else if(AARCH64 == Architecture) emit_out("\nBR_X16");
  2388. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" jal");
  2389. emit_out("\n");
  2390. require_match("ERROR in break statement\nMissing ;\n", ";");
  2391. }
  2392. void process_continue()
  2393. {
  2394. if(NULL == continue_target_head)
  2395. {
  2396. line_error();
  2397. fputs("Not inside of a loop\n", stderr);
  2398. exit(EXIT_FAILURE);
  2399. }
  2400. global_token = global_token->next;
  2401. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @");
  2402. else if(X86 == Architecture) emit_out("jmp %");
  2403. else if(AMD64 == Architecture) emit_out("jmp %");
  2404. else if(ARMV7L == Architecture) emit_out("^~");
  2405. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&");
  2406. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$");
  2407. emit_out(continue_target_head);
  2408. emit_out(break_target_func);
  2409. emit_out("_");
  2410. emit_out(break_target_num);
  2411. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS");
  2412. else if(AARCH64 == Architecture) emit_out("\nBR_X16");
  2413. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" jal");
  2414. emit_out("\n");
  2415. require_match("ERROR in continue statement\nMissing ;\n", ";");
  2416. }
  2417. void recursive_statement()
  2418. {
  2419. global_token = global_token->next;
  2420. require(NULL != global_token, "Received EOF in recursive statement\n");
  2421. struct token_list* frame = function->locals;
  2422. while(!match("}", global_token->s))
  2423. {
  2424. statement();
  2425. require(NULL != global_token, "Received EOF in recursive statement prior to }\n");
  2426. }
  2427. global_token = global_token->next;
  2428. /* Clean up any locals added */
  2429. if(((X86 == Architecture) && !match("ret\n", output_list->s)) ||
  2430. ((AMD64 == Architecture) && !match("ret\n", output_list->s)) ||
  2431. (((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) && !match("RET R15\n", output_list->s)) ||
  2432. ((ARMV7L == Architecture) && !match("'1' LR RETURN\n", output_list->s)) ||
  2433. ((AARCH64 == Architecture) && !match("RETURN\n", output_list->s)) ||
  2434. (((RISCV32 == Architecture) || (RISCV64 == Architecture)) && !match("ret\n", output_list->s)))
  2435. {
  2436. struct token_list* i;
  2437. for(i = function->locals; frame != i; i = i->next)
  2438. {
  2439. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _recursive_statement_locals\n");
  2440. else if(X86 == Architecture) emit_out( "pop_ebx\t# _recursive_statement_locals\n");
  2441. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _recursive_statement_locals\n");
  2442. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _recursive_statement_locals\n");
  2443. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _recursive_statement_locals\n");
  2444. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\t# _recursive_statement_locals\nrd_sp rs1_sp !4 addi\n");
  2445. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\t# _recursive_statement_locals\nrd_sp rs1_sp !8 addi\n");
  2446. }
  2447. }
  2448. function->locals = frame;
  2449. }
  2450. /*
  2451. * statement:
  2452. * { statement-list-opt }
  2453. * type-name identifier ;
  2454. * type-name identifier = expression;
  2455. * if ( expression ) statement
  2456. * if ( expression ) statement else statement
  2457. * do statement while ( expression ) ;
  2458. * while ( expression ) statement
  2459. * for ( expression ; expression ; expression ) statement
  2460. * asm ( "assembly" ... "assembly" ) ;
  2461. * goto label ;
  2462. * label:
  2463. * return ;
  2464. * break ;
  2465. * expr ;
  2466. */
  2467. struct type* lookup_type(char* s, struct type* start);
  2468. void statement()
  2469. {
  2470. require(NULL != global_token, "expected a C statement but received EOF\n");
  2471. /* Always an integer until told otherwise */
  2472. current_target = integer;
  2473. if(global_token->s[0] == '{')
  2474. {
  2475. recursive_statement();
  2476. }
  2477. else if(':' == global_token->s[0])
  2478. {
  2479. emit_out(global_token->s);
  2480. emit_out("\t#C goto label\n");
  2481. global_token = global_token->next;
  2482. }
  2483. else if((NULL != lookup_type(global_token->s, prim_types)) ||
  2484. match("struct", global_token->s))
  2485. {
  2486. collect_local();
  2487. }
  2488. else if(match("if", global_token->s))
  2489. {
  2490. process_if();
  2491. }
  2492. else if(match("switch", global_token->s))
  2493. {
  2494. process_switch();
  2495. }
  2496. else if(match("do", global_token->s))
  2497. {
  2498. process_do();
  2499. }
  2500. else if(match("while", global_token->s))
  2501. {
  2502. process_while();
  2503. }
  2504. else if(match("for", global_token->s))
  2505. {
  2506. process_for();
  2507. }
  2508. else if(match("asm", global_token->s))
  2509. {
  2510. process_asm();
  2511. }
  2512. else if(match("goto", global_token->s))
  2513. {
  2514. global_token = global_token->next;
  2515. require(NULL != global_token, "naked goto is not supported\n");
  2516. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @");
  2517. else if(X86 == Architecture) emit_out("jmp %");
  2518. else if(AMD64 == Architecture) emit_out("jmp %");
  2519. else if(ARMV7L == Architecture) emit_out("^~");
  2520. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&");
  2521. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$");
  2522. emit_out(global_token->s);
  2523. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS");
  2524. else if(AARCH64 == Architecture) emit_out("\nBR_X16");
  2525. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" jal");
  2526. emit_out("\n");
  2527. global_token = global_token->next;
  2528. require_match("ERROR in statement\nMissing ;\n", ";");
  2529. }
  2530. else if(match("return", global_token->s))
  2531. {
  2532. return_result();
  2533. }
  2534. else if(match("break", global_token->s))
  2535. {
  2536. process_break();
  2537. }
  2538. else if(match("continue", global_token->s))
  2539. {
  2540. process_continue();
  2541. }
  2542. else
  2543. {
  2544. expression();
  2545. require_match("ERROR in statement\nMISSING ;\n", ";");
  2546. }
  2547. }
  2548. /* Collect function arguments */
  2549. void collect_arguments()
  2550. {
  2551. global_token = global_token->next;
  2552. require(NULL != global_token, "Received EOF when attempting to collect arguments\n");
  2553. struct type* type_size;
  2554. struct token_list* a;
  2555. while(!match(")", global_token->s))
  2556. {
  2557. type_size = type_name();
  2558. require(NULL != global_token, "Received EOF when attempting to collect arguments\n");
  2559. require(NULL != type_size, "Must have non-null type\n");
  2560. if(global_token->s[0] == ')')
  2561. {
  2562. /* foo(int,char,void) doesn't need anything done */
  2563. continue;
  2564. }
  2565. else if(global_token->s[0] != ',')
  2566. {
  2567. /* deal with foo(int a, char b) */
  2568. require(!in_set(global_token->s[0], "[{(<=>)}]|&!^%;:'\""), "forbidden character in argument variable name\n");
  2569. require(!iskeywordp(global_token->s), "You are not allowed to use a keyword as a argument variable name\n");
  2570. a = sym_declare(global_token->s, type_size, function->arguments);
  2571. if(NULL == function->arguments)
  2572. {
  2573. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = 0;
  2574. else if(X86 == Architecture) a->depth = -4;
  2575. else if(AMD64 == Architecture) a->depth = -8;
  2576. else if(ARMV7L == Architecture) a->depth = 4;
  2577. else if(AARCH64 == Architecture) a->depth = register_size;
  2578. else if(RISCV32 == Architecture) a->depth = -4;
  2579. else if(RISCV64 == Architecture) a->depth = -8;
  2580. }
  2581. else
  2582. {
  2583. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = function->arguments->depth + register_size;
  2584. else if(X86 == Architecture) a->depth = function->arguments->depth - register_size;
  2585. else if(AMD64 == Architecture) a->depth = function->arguments->depth - register_size;
  2586. else if(ARMV7L == Architecture) a->depth = function->arguments->depth + register_size;
  2587. else if(AARCH64 == Architecture) a->depth = function->arguments->depth + register_size;
  2588. else if(RISCV32 == Architecture) a->depth = function->arguments->depth - register_size;
  2589. else if(RISCV64 == Architecture) a->depth = function->arguments->depth - register_size;
  2590. }
  2591. global_token = global_token->next;
  2592. require(NULL != global_token, "Incomplete argument list\n");
  2593. function->arguments = a;
  2594. }
  2595. /* ignore trailing comma (needed for foo(bar(), 1); expressions*/
  2596. if(global_token->s[0] == ',')
  2597. {
  2598. global_token = global_token->next;
  2599. require(NULL != global_token, "naked comma in collect arguments\n");
  2600. }
  2601. require(NULL != global_token, "Argument list never completed\n");
  2602. }
  2603. global_token = global_token->next;
  2604. }
  2605. void declare_function()
  2606. {
  2607. current_count = 0;
  2608. function = sym_declare(global_token->prev->s, NULL, global_function_list);
  2609. /* allow previously defined functions to be looked up */
  2610. global_function_list = function;
  2611. if((KNIGHT_NATIVE == Architecture) && match("main", function->s))
  2612. {
  2613. require_match("Impossible error ( vanished\n", "(");
  2614. require_match("Reality ERROR (USING KNIGHT-NATIVE)\nHardware does not support arguments\nthus neither can main on this architecture\ntry tape_01 and tape_02 instead\n", ")");
  2615. }
  2616. else collect_arguments();
  2617. require(NULL != global_token, "Function definitions either need to be prototypes or full\n");
  2618. /* If just a prototype don't waste time */
  2619. if(global_token->s[0] == ';') global_token = global_token->next;
  2620. else
  2621. {
  2622. emit_out("# Defining function ");
  2623. emit_out(function->s);
  2624. emit_out("\n");
  2625. emit_out(":FUNCTION_");
  2626. emit_out(function->s);
  2627. emit_out("\n");
  2628. statement();
  2629. /* Prevent duplicate RETURNS */
  2630. if(((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) && !match("RET R15\n", output_list->s)) emit_out("RET R15\n");
  2631. else if((X86 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2632. else if((AMD64 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2633. else if((ARMV7L == Architecture) && !match("'1' LR RETURN\n", output_list->s)) emit_out("'1' LR RETURN\n");
  2634. else if((AARCH64 == Architecture) && !match("RETURN\n", output_list->s)) emit_out("RETURN\n");
  2635. else if((RISCV32 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2636. else if((RISCV64 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2637. }
  2638. }
  2639. void global_constant()
  2640. {
  2641. global_token = global_token->next;
  2642. require(NULL != global_token, "CONSTANT lacks a name\n");
  2643. global_constant_list = sym_declare(global_token->s, NULL, global_constant_list);
  2644. require(NULL != global_token->next, "CONSTANT lacks a value\n");
  2645. if(match("sizeof", global_token->next->s))
  2646. {
  2647. global_token = global_token->next->next;
  2648. require_match("ERROR in CONSTANT with sizeof\nMissing (\n", "(");
  2649. struct type* a = type_name();
  2650. require_match("ERROR in CONSTANT with sizeof\nMissing )\n", ")");
  2651. global_token->prev->s = int2str(a->size, 10, TRUE);
  2652. global_constant_list->arguments = global_token->prev;
  2653. }
  2654. else
  2655. {
  2656. global_constant_list->arguments = global_token->next;
  2657. global_token = global_token->next->next;
  2658. }
  2659. }
  2660. struct type* global_typedef()
  2661. {
  2662. struct type* type_size;
  2663. /* typedef $TYPE $NAME; */
  2664. global_token = global_token->next;
  2665. type_size = type_name();
  2666. require(NULL != global_token, "Received EOF while reading typedef\n");
  2667. type_size = mirror_type(type_size, global_token->s);
  2668. add_primitive(type_size);
  2669. global_token = global_token->next;
  2670. require_match("ERROR in typedef statement\nMissing ;\n", ";");
  2671. return type_size;
  2672. }
  2673. void global_static_array(struct type* type_size, struct token_list* name)
  2674. {
  2675. int size;
  2676. maybe_bootstrap_error("global array definitions");
  2677. globals_list = emit(":GLOBAL_", globals_list);
  2678. globals_list = emit(name->s, globals_list);
  2679. globals_list = emit("\n&GLOBAL_STORAGE_", globals_list);
  2680. globals_list = emit(name->s, globals_list);
  2681. if (AARCH64 == Architecture || AMD64 == Architecture || RISCV64 == Architecture)
  2682. {
  2683. globals_list = emit(" %0", globals_list);
  2684. }
  2685. globals_list = emit("\n:GLOBAL_STORAGE_", globals_list);
  2686. globals_list = emit(name->s, globals_list);
  2687. require(NULL != global_token->next, "Unterminated global\n");
  2688. global_token = global_token->next;
  2689. /* Make sure not negative */
  2690. if(match("-", global_token->s))
  2691. {
  2692. line_error();
  2693. fputs("Negative values are not supported for allocated arrays\n", stderr);
  2694. exit(EXIT_FAILURE);
  2695. }
  2696. /* length */
  2697. size = strtoint(global_token->s) * type_size->size;
  2698. /* Stop bad states */
  2699. if((size < 0) || (size > 0x100000))
  2700. {
  2701. line_error();
  2702. fputs("M2-Planet is very inefficient so you probably don't want to allocate over 1MB into your binary for NULLs\n", stderr);
  2703. exit(EXIT_FAILURE);
  2704. }
  2705. /* Ensure properly closed */
  2706. global_token = global_token->next;
  2707. require_match("missing close bracket\n", "]");
  2708. require_match("missing ;\n", ";");
  2709. globals_list = emit("\n'", globals_list);
  2710. while (0 != size)
  2711. {
  2712. globals_list = emit(" 00", globals_list);
  2713. size = size - 1;
  2714. }
  2715. globals_list = emit("'\n", globals_list);
  2716. }
  2717. void global_assignment()
  2718. {
  2719. /* Store the global's value*/
  2720. globals_list = emit(":GLOBAL_", globals_list);
  2721. globals_list = emit(global_token->prev->s, globals_list);
  2722. globals_list = emit("\n", globals_list);
  2723. global_token = global_token->next;
  2724. require(NULL != global_token, "Global locals value in assignment\n");
  2725. unsigned padding_zeroes;
  2726. if(in_set(global_token->s[0], "0123456789"))
  2727. { /* Assume Int */
  2728. globals_list = emit("%", globals_list);
  2729. globals_list = emit(global_token->s, globals_list);
  2730. /* broken for big endian architectures */
  2731. padding_zeroes = (register_size / 4) - 1;
  2732. while(padding_zeroes > 0)
  2733. {
  2734. /* Assume positive Int */
  2735. globals_list = emit(" %0", globals_list);
  2736. padding_zeroes = padding_zeroes - 1;
  2737. }
  2738. globals_list = emit("\n", globals_list);
  2739. }
  2740. else if(('"' == global_token->s[0]))
  2741. { /* Assume a string*/
  2742. globals_list = emit("&GLOBAL_", globals_list);
  2743. globals_list = emit(global_token->prev->prev->s, globals_list);
  2744. globals_list = emit("_contents\n", globals_list);
  2745. globals_list = emit(":GLOBAL_", globals_list);
  2746. globals_list = emit(global_token->prev->prev->s, globals_list);
  2747. globals_list = emit("_contents\n", globals_list);
  2748. globals_list = emit(parse_string(global_token->s), globals_list);
  2749. }
  2750. else
  2751. {
  2752. line_error();
  2753. fputs("Received ", stderr);
  2754. fputs(global_token->s, stderr);
  2755. fputs(" in program\n", stderr);
  2756. exit(EXIT_FAILURE);
  2757. }
  2758. global_token = global_token->next;
  2759. require_match("ERROR in Program\nMissing ;\n", ";");
  2760. }
  2761. /*
  2762. * program:
  2763. * declaration
  2764. * declaration program
  2765. *
  2766. * declaration:
  2767. * CONSTANT identifer value
  2768. * typedef identifer type;
  2769. * type-name identifier ;
  2770. * type-name identifier = value ;
  2771. * type-name identifier [ value ];
  2772. * type-name identifier ( parameter-list ) ;
  2773. * type-name identifier ( parameter-list ) statement
  2774. *
  2775. * parameter-list:
  2776. * parameter-declaration
  2777. * parameter-list, parameter-declaration
  2778. *
  2779. * parameter-declaration:
  2780. * type-name identifier-opt
  2781. */
  2782. void program()
  2783. {
  2784. unsigned i;
  2785. function = NULL;
  2786. Address_of = FALSE;
  2787. struct type* type_size;
  2788. new_type:
  2789. /* Deal with garbage input */
  2790. if (NULL == global_token) return;
  2791. require('#' != global_token->s[0], "unhandled macro directive\n");
  2792. require(!match("\n", global_token->s), "unexpected newline token\n");
  2793. /* Handle cc_* CONSTANT statements */
  2794. if(match("CONSTANT", global_token->s))
  2795. {
  2796. global_constant();
  2797. goto new_type;
  2798. }
  2799. /* Handle c typedef statements */
  2800. if(match("typedef", global_token->s))
  2801. {
  2802. type_size = global_typedef();
  2803. goto new_type;
  2804. }
  2805. type_size = type_name();
  2806. /* Deal with case of struct definitions */
  2807. if(NULL == type_size) goto new_type;
  2808. require(NULL != global_token->next, "Unterminated global\n");
  2809. /* Add to global symbol table */
  2810. global_symbol_list = sym_declare(global_token->s, type_size, global_symbol_list);
  2811. global_token = global_token->next;
  2812. /* Deal with global variables */
  2813. if(match(";", global_token->s))
  2814. {
  2815. /* Ensure enough bytes are allocated to store global variable.
  2816. In some cases it allocates too much but that is harmless. */
  2817. globals_list = emit(":GLOBAL_", globals_list);
  2818. globals_list = emit(global_token->prev->s, globals_list);
  2819. /* round up division */
  2820. i = ceil_div(type_size->size, register_size);
  2821. globals_list = emit("\n", globals_list);
  2822. while(i != 0)
  2823. {
  2824. globals_list = emit("NULL\n", globals_list);
  2825. i = i - 1;
  2826. }
  2827. global_token = global_token->next;
  2828. goto new_type;
  2829. }
  2830. /* Deal with global functions */
  2831. if(match("(", global_token->s))
  2832. {
  2833. declare_function();
  2834. goto new_type;
  2835. }
  2836. /* Deal with assignment to a global variable */
  2837. if(match("=", global_token->s))
  2838. {
  2839. global_assignment();
  2840. goto new_type;
  2841. }
  2842. /* Deal with global static arrays */
  2843. if(match("[", global_token->s))
  2844. {
  2845. global_static_array(type_size, global_token->prev);
  2846. goto new_type;
  2847. }
  2848. /* Everything else is just an error */
  2849. line_error();
  2850. fputs("Received ", stderr);
  2851. fputs(global_token->s, stderr);
  2852. fputs(" in program\n", stderr);
  2853. exit(EXIT_FAILURE);
  2854. }
  2855. void recursive_output(struct token_list* head, FILE* out)
  2856. {
  2857. struct token_list* i = reverse_list(head);
  2858. while(NULL != i)
  2859. {
  2860. fputs(i->s, out);
  2861. i = i->next;
  2862. }
  2863. }
  2864. void output_tokens(struct token_list *i, FILE* out)
  2865. {
  2866. while(NULL != i)
  2867. {
  2868. fputs(i->s, out);
  2869. fputs(" ", out);
  2870. i = i->next;
  2871. }
  2872. }