expr.c 77 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651
  1. /* Convert tree expression to rtl instructions, for GNU compiler.
  2. Copyright (C) 1987 Free Software Foundation, Inc.
  3. This file is part of GNU CC.
  4. GNU CC is distributed in the hope that it will be useful,
  5. but WITHOUT ANY WARRANTY. No author or distributor
  6. accepts responsibility to anyone for the consequences of using it
  7. or for whether it serves any particular purpose or works at all,
  8. unless he says so in writing. Refer to the GNU CC General Public
  9. License for full details.
  10. Everyone is granted permission to copy, modify and redistribute
  11. GNU CC, but only under the conditions described in the
  12. GNU CC General Public License. A copy of this license is
  13. supposed to have been given to you along with GNU CC so you
  14. can know your rights and responsibilities. It should be in a
  15. file named COPYING. Among other things, the copyright notice
  16. and this notice must be preserved on all copies. */
  17. #include "config.h"
  18. #include "rtl.h"
  19. #include "tree.h"
  20. #include "insn-flags.h"
  21. #include "insn-codes.h"
  22. #include "expr.h"
  23. /* If this is nonzero, we do not bother generating VOLATILE
  24. around volatile memory references, and we are willing to
  25. output indirect addresses. If cse is to follow, we reject
  26. indirect addresses so a useful potential cse is generated;
  27. if it is used only once, instruction combination will produce
  28. the same indirect address eventually. */
  29. int cse_not_expected;
  30. /* Nonzero to generate code for all the subroutines within an
  31. expression before generating the upper levels of the expression.
  32. Nowadays this is never zero. */
  33. int do_preexpand_calls = 1;
  34. /* Number of units that we should eventually pop off the stack.
  35. These are the arguments to function calls that have already returned. */
  36. int pending_stack_adjust;
  37. /* Total size of arguments already pushed for function calls that
  38. have not happened yet. Also counts 1 for each level of conditional
  39. expression that we are inside. When this is nonzero,
  40. args passed to function calls must be popped right away
  41. to ensure contiguity of argument lists for future calls. */
  42. int current_args_size;
  43. static rtx store_expr ();
  44. static rtx expand_call ();
  45. static void gen_call_1 ();
  46. static rtx compare ();
  47. static rtx compare1 ();
  48. static rtx do_store_flag ();
  49. static void preexpand_calls ();
  50. /* MOVE_RATIO is the number of move instructions that is better than
  51. a block move. */
  52. #if defined (HAVE_movstrhi) || defined (HAVE_movstrsi)
  53. #define MOVE_RATIO 2
  54. #else
  55. #define MOVE_RATIO 6
  56. #endif
  57. /* Table indexed by tree code giving 1 if the code is for a
  58. comparison operation, or anything that is most easily
  59. computed with a conditional branch.
  60. We include tree.def to give it the proper length.
  61. The contents thus created are irrelevant.
  62. The real contents are initialized in init_comparisons. */
  63. #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
  64. static char comparison_code[] = {
  65. #include "tree.def"
  66. };
  67. #undef DEFTREECODE
  68. init_comparisons ()
  69. {
  70. bzero (comparison_code, sizeof comparison_code);
  71. comparison_code[(int) EQ_EXPR] = 1;
  72. comparison_code[(int) NE_EXPR] = 1;
  73. comparison_code[(int) LT_EXPR] = 1;
  74. comparison_code[(int) GT_EXPR] = 1;
  75. comparison_code[(int) LE_EXPR] = 1;
  76. comparison_code[(int) GE_EXPR] = 1;
  77. }
  78. /* Manage the queue of increment instructions to be output
  79. for POSTINCREMENT_EXPR expressions, etc. */
  80. static rtx pending_chain;
  81. /* Queue up to increment (or change) VAR later. BODY says how:
  82. BODY should be the same thing you would pass to emit_insn
  83. to increment right away. It will go to emit_insn later on.
  84. The value is a QUEUED expression to be used in place of VAR
  85. where you want to guarantee the pre-incrementation value of VAR.
  86. When constructing BODY, you should pass VAR through copy_rtx
  87. each time it is used. If VAR is a MEM, this prevents BODY from
  88. sharing structure incorrectly with itself or with places that
  89. explicitly use VAR. */
  90. static rtx
  91. enqueue_insn (var, body)
  92. rtx var, body;
  93. {
  94. pending_chain = gen_rtx (QUEUED, GET_MODE (var),
  95. var, 0, 0, body, pending_chain);
  96. return pending_chain;
  97. }
  98. /* Use protect_from_queue to convert a QUEUED expression
  99. into something that you can put immediately into an instruction.
  100. If the queued incrementation has not happened yet,
  101. protect_from_queue returns the variable itself.
  102. If the incrementation has happened, protect_from_queue returns a temp
  103. that contains a copy of the old value of the variable.
  104. Any time an rtx which might possibly be a QUEUED is to be put
  105. into an instruction, it must be passed through protect_from_queue first.
  106. QUEUED expressions are not meaningful in instructions.
  107. Do not pass a value through protect_from_queue and then hold
  108. on to it for a while before putting it in an instruction!
  109. If the queue is flushed in between, incorrect code will result. */
  110. rtx
  111. protect_from_queue (x, modify)
  112. register rtx x;
  113. int modify;
  114. {
  115. register RTX_CODE code = GET_CODE (x);
  116. if (code != QUEUED)
  117. {
  118. /* A special hack for read access to (MEM (QUEUED ...))
  119. to facilitate use of autoincrement.
  120. Make a copy of the contents of the memory location
  121. rather than a copy of the address. */
  122. if (code == MEM && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
  123. {
  124. register rtx y = XEXP (x, 0);
  125. XEXP (x, 0) = QUEUED_VAR (y);
  126. if (QUEUED_INSN (y))
  127. {
  128. register rtx temp = gen_reg_rtx (GET_MODE (x));
  129. emit_insn_before (gen_move_insn (temp, x),
  130. QUEUED_INSN (y));
  131. return temp;
  132. }
  133. return x;
  134. }
  135. /* Otherwise, recursively protect the subexpressions of all
  136. the kinds of rtx's that can contain a QUEUED. */
  137. if (code == MEM)
  138. XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
  139. else if (code == PLUS || code == MULT)
  140. {
  141. XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
  142. XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
  143. }
  144. return x;
  145. }
  146. /* If the increment has not happened, use the variable itself. */
  147. if (QUEUED_INSN (x) == 0)
  148. return QUEUED_VAR (x);
  149. /* If the increment has happened and a pre-increment copy exists,
  150. use that copy. */
  151. if (QUEUED_COPY (x) != 0)
  152. return QUEUED_COPY (x);
  153. /* The increment has happened but we haven't set up a pre-increment copy.
  154. Set one up now, and use it. */
  155. QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
  156. emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
  157. QUEUED_INSN (x));
  158. return QUEUED_COPY (x);
  159. }
  160. /* perform all the pending incrementations. */
  161. void
  162. emit_queue ()
  163. {
  164. register rtx p;
  165. while (p = pending_chain)
  166. {
  167. QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
  168. pending_chain = QUEUED_NEXT (p);
  169. }
  170. }
  171. void
  172. init_queue ()
  173. {
  174. if (pending_chain)
  175. abort ();
  176. }
  177. /* Copy data from FROM to TO, where the machine modes are not the same.
  178. Both modes may be integer, or both may be floating.
  179. UNSIGNEDP should be nonzero if FROM is an unsigned type.
  180. This causes zero-extension instead of sign-extension. */
  181. void
  182. convert_move (to, from, unsignedp)
  183. register rtx to, from;
  184. int unsignedp;
  185. {
  186. enum machine_mode to_mode = GET_MODE (to);
  187. enum machine_mode from_mode = GET_MODE (from);
  188. int to_real = to_mode == SFmode || to_mode == DFmode;
  189. int from_real = from_mode == SFmode || from_mode == DFmode;
  190. int extending = (int) to_mode > (int) from_mode;
  191. to = protect_from_queue (to, 1);
  192. from = protect_from_queue (from, 0);
  193. if (to_real != from_real)
  194. abort ();
  195. if (to_mode == from_mode || GET_CODE (from) == CONST_INT)
  196. {
  197. emit_move_insn (to, from);
  198. return;
  199. }
  200. if (to_real)
  201. {
  202. #ifdef HAVE_extendsfdf2
  203. if (HAVE_extendsfdf2 && extending)
  204. {
  205. emit_insn (gen_extendsfdf2 (to, from));
  206. return;
  207. }
  208. #endif
  209. #ifdef HAVE_truncdfsf2
  210. if (HAVE_truncdfsf2 && ! extending)
  211. {
  212. emit_insn (gen_truncdfsf2 (to, from));
  213. return;
  214. }
  215. #endif
  216. emit_library_call (gen_rtx (SYMBOL_REF, Pmode, (extending
  217. ? "extendsfdf2"
  218. : "truncdfsf2")),
  219. 1, from, (extending ? SFmode : DFmode));
  220. copy_function_value (to);
  221. return;
  222. }
  223. if (to_mode == DImode)
  224. {
  225. emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
  226. if (unsignedp)
  227. {
  228. convert_move (gen_lowpart (SImode, to), from, unsignedp);
  229. emit_clr_insn (gen_highpart (SImode, to));
  230. }
  231. #ifdef HAVE_sltsi
  232. else if (HAVE_sltsi)
  233. {
  234. convert_move (gen_lowpart (SImode, to), from, unsignedp);
  235. emit_insn (gen_sltsi (gen_highpart (SImode, to)));
  236. }
  237. #endif
  238. else
  239. {
  240. register rtx label = gen_label_rtx ();
  241. emit_clr_insn (gen_highpart (SImode, to));
  242. convert_move (gen_lowpart (SImode, to), from, unsignedp);
  243. emit_cmp_insn (gen_lowpart (SImode, to),
  244. gen_rtx (CONST_INT, VOIDmode, 0),
  245. 0, 0);
  246. emit_jump_insn (gen_bge (label));
  247. expand_unop (SImode, one_cmpl_optab,
  248. gen_highpart (SImode, to), gen_highpart (SImode, to),
  249. 1);
  250. emit_label (label);
  251. }
  252. return;
  253. }
  254. if (from_mode == DImode)
  255. {
  256. convert_move (to, gen_lowpart (SImode, from), 0);
  257. return;
  258. }
  259. /* Now follow all the conversions between integers
  260. no more than a word long. */
  261. if (to_mode == SImode && from_mode == HImode)
  262. {
  263. if (unsignedp)
  264. {
  265. #ifdef HAVE_zero_extendhisi2
  266. if (HAVE_zero_extendhisi2)
  267. emit_insn (gen_zero_extendhisi2 (to, from));
  268. else
  269. #endif
  270. abort ();
  271. }
  272. else
  273. {
  274. #ifdef HAVE_extendhisi2
  275. if (HAVE_extendhisi2)
  276. emit_insn (gen_extendhisi2 (to, from));
  277. else
  278. #endif
  279. abort ();
  280. }
  281. return;
  282. }
  283. if (to_mode == SImode && from_mode == QImode)
  284. {
  285. if (unsignedp)
  286. {
  287. #ifdef HAVE_zero_extendqisi2
  288. if (HAVE_zero_extendqisi2)
  289. {
  290. emit_insn (gen_zero_extendqisi2 (to, from));
  291. return;
  292. }
  293. #endif
  294. #if defined (HAVE_zero_extendqihi2) && defined (HAVE_extendhisi2)
  295. if (HAVE_zero_extendqihi2 && HAVE_extendhisi2)
  296. {
  297. register rtx temp = gen_reg_rtx (HImode);
  298. emit_insn (gen_zero_extendqihi2 (temp, from));
  299. emit_insn (gen_extendhisi2 (to, temp));
  300. return;
  301. }
  302. #endif
  303. }
  304. else
  305. {
  306. #ifdef HAVE_extendqisi2
  307. if (HAVE_extendqisi2)
  308. {
  309. emit_insn (gen_extendqisi2 (to, from));
  310. return;
  311. }
  312. #endif
  313. #if defined (HAVE_extendqihi2) && defined (HAVE_extendhisi2)
  314. if (HAVE_extendqihi2 && HAVE_extendhisi2)
  315. {
  316. register rtx temp = gen_reg_rtx (HImode);
  317. emit_insn (gen_extendqihi2 (temp, from));
  318. emit_insn (gen_extendhisi2 (to, temp));
  319. return;
  320. }
  321. #endif
  322. }
  323. abort ();
  324. }
  325. if (to_mode == HImode && from_mode == QImode)
  326. {
  327. if (unsignedp)
  328. {
  329. #ifdef HAVE_zero_extendqihi2
  330. if (HAVE_zero_extendqihi2)
  331. {
  332. emit_insn (gen_zero_extendqihi2 (to, from));
  333. return;
  334. }
  335. #endif
  336. }
  337. else
  338. {
  339. #ifdef HAVE_extendqihi2
  340. if (HAVE_extendqihi2)
  341. {
  342. emit_insn (gen_extendqihi2 (to, from));
  343. return;
  344. }
  345. #endif
  346. }
  347. abort ();
  348. }
  349. /* Now we are truncating an integer to a smaller one.
  350. If the result is a temporary, we might as well just copy it,
  351. since only the low-order part of the result needs to be valid
  352. and it is valid with no change. */
  353. if (GET_CODE (to) == REG)
  354. {
  355. if (GET_CODE (from) == REG)
  356. {
  357. emit_move_insn (to, gen_lowpart (GET_MODE (to), from));
  358. return;
  359. }
  360. #ifndef BYTES_BIG_ENDIAN
  361. else if (GET_CODE (from) == MEM)
  362. {
  363. register rtx addr = XEXP (from, 0);
  364. GO_IF_LEGITIMATE_ADDRESS (GET_MODE (to), addr, win);
  365. if (0)
  366. {
  367. win:
  368. emit_move_insn (to, gen_rtx (MEM, GET_MODE (to), addr));
  369. return;
  370. }
  371. }
  372. #endif /* not BYTES_BIG_ENDIAN */
  373. }
  374. if (from_mode == SImode && to_mode == HImode)
  375. {
  376. #ifdef HAVE_truncsihi2
  377. if (HAVE_truncsihi2)
  378. {
  379. emit_insn (gen_truncsihi2 (to, from));
  380. return;
  381. }
  382. #endif
  383. abort ();
  384. }
  385. if (from_mode == SImode && to_mode == QImode)
  386. {
  387. #ifdef HAVE_truncsiqi2
  388. if (HAVE_truncsiqi2)
  389. {
  390. emit_insn (gen_truncsiqi2 (to, from));
  391. return;
  392. }
  393. #endif
  394. abort ();
  395. }
  396. if (from_mode == HImode && to_mode == QImode)
  397. {
  398. #ifdef HAVE_trunchiqi2
  399. if (HAVE_trunchiqi2)
  400. {
  401. emit_insn (gen_trunchiqi2 (to, from));
  402. return;
  403. }
  404. #endif
  405. abort ();
  406. }
  407. }
  408. /* Return an rtx for a value that would result
  409. from converting X to mode MODE.
  410. Both X and MODE may be floating, or both integer.
  411. UNSIGNEDP is nonzero if X is an unsigned value.
  412. This can be done by referring to a part of X in place
  413. or by copying to a new temporary with conversion. */
  414. rtx
  415. convert_to_mode (mode, x, unsignedp)
  416. enum machine_mode mode;
  417. rtx x;
  418. int unsignedp;
  419. {
  420. register rtx temp;
  421. if (mode == GET_MODE (x))
  422. return x;
  423. if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x)))
  424. return gen_lowpart (mode, x);
  425. temp = gen_reg_rtx (mode);
  426. convert_move (temp, x, unsignedp);
  427. return temp;
  428. }
  429. /* Generate several move instructions to copy LEN bytes
  430. from address FROM to address TO. The caller must pass FROM and TO
  431. through protect_from_queue before calling.
  432. FROM_VOL and TO_VOL are nonzero if references to
  433. FROM and TO, respectively, should be marked VOLATILE.
  434. ALIGN (in bytes) is maximum alignment we can assume. */
  435. struct move_by_pieces
  436. {
  437. rtx to;
  438. int autinc_to;
  439. int explicit_inc_to;
  440. int to_vol;
  441. rtx from;
  442. int autinc_from;
  443. int explicit_inc_from;
  444. int from_vol;
  445. int len;
  446. int offset;
  447. int reverse;
  448. };
  449. static void
  450. move_by_pieces (to, from, len, align, to_vol, from_vol)
  451. rtx to, from;
  452. int len, align;
  453. int to_vol, from_vol;
  454. {
  455. struct move_by_pieces data;
  456. data.offset = 0;
  457. data.to = to;
  458. data.from = from;
  459. data.to_vol = to_vol;
  460. data.from_vol = from_vol;
  461. data.autinc_to = (GET_CODE (to) == PRE_INC || GET_CODE (to) == PRE_DEC
  462. || GET_CODE (to) == POST_INC || GET_CODE (to) == POST_DEC);
  463. data.autinc_from = (GET_CODE (from) == PRE_INC || GET_CODE (from) == PRE_DEC
  464. || GET_CODE (from) == POST_INC
  465. || GET_CODE (from) == POST_DEC);
  466. data.explicit_inc_from = 0;
  467. data.explicit_inc_to = 0;
  468. data.reverse = (GET_CODE (to) == PRE_DEC || GET_CODE (to) == POST_DEC);
  469. if (data.reverse) data.offset = len;
  470. data.len = len;
  471. /* If copying requires more than two move insns,
  472. copy addresses to registers (to make displacements shorter)
  473. and use post-increment if available. */
  474. if (!(data.autinc_from && data.autinc_to)
  475. && move_by_pieces_ninsns (len, align) > 2)
  476. {
  477. #ifdef HAVE_PRE_DECREMENT
  478. if (data.reverse && ! data.autinc_from)
  479. {
  480. data.from = copy_to_reg (plus_constant (from, len));
  481. data.autinc_from = 1;
  482. data.explicit_inc_from = -1;
  483. }
  484. #endif
  485. #ifdef HAVE_POST_INCREMENT
  486. if (! data.autinc_from)
  487. {
  488. data.from = copy_to_reg (from);
  489. data.autinc_from = 1;
  490. data.explicit_inc_from = 1;
  491. }
  492. #endif
  493. if (!data.autinc_from && CONSTANT_ADDRESS_P (from))
  494. data.from = copy_to_reg (from);
  495. #ifdef HAVE_PRE_DECREMENT
  496. if (data.reverse && ! data.autinc_to)
  497. {
  498. data.to = copy_to_reg (plus_constant (to, len));
  499. data.autinc_to = 1;
  500. data.explicit_inc_to = -1;
  501. }
  502. #endif
  503. #ifdef HAVE_POST_INCREMENT
  504. if (! data.reverse && ! data.autinc_to)
  505. {
  506. data.to = copy_to_reg (to);
  507. data.autinc_to = 1;
  508. data.explicit_inc_to = 1;
  509. }
  510. #endif
  511. if (!data.autinc_to && CONSTANT_ADDRESS_P (to))
  512. data.to = copy_to_reg (to);
  513. }
  514. #ifdef STRICT_ALIGNMENT
  515. if (align > MOVE_MAX)
  516. align = MOVE_MAX;
  517. #else
  518. align = MOVE_MAX;
  519. #endif
  520. #ifdef HAVE_movti
  521. if (HAVE_movti && align >= GET_MODE_SIZE (TImode))
  522. move_by_pieces_1 (gen_movti, TImode, &data);
  523. #endif
  524. #ifdef HAVE_movdi
  525. if (HAVE_movdi && align >= GET_MODE_SIZE (DImode))
  526. move_by_pieces_1 (gen_movdi, DImode, &data);
  527. #endif
  528. if (align >= GET_MODE_SIZE (SImode))
  529. move_by_pieces_1 (gen_movsi, SImode, &data);
  530. if (align >= GET_MODE_SIZE (HImode))
  531. move_by_pieces_1 (gen_movhi, HImode, &data);
  532. move_by_pieces_1 (gen_movqi, QImode, &data);
  533. }
  534. /* Return number of insns required to move L bytes by pieces.
  535. ALIGN (in bytes) is maximum alignment we can assume. */
  536. int
  537. move_by_pieces_ninsns (l, align)
  538. unsigned int l;
  539. int align;
  540. {
  541. register int n_insns = 0;
  542. #ifdef STRICT_ALIGNMENT
  543. if (align > MOVE_MAX)
  544. align = MOVE_MAX;
  545. #else
  546. align = MOVE_MAX;
  547. #endif
  548. #ifdef HAVE_movti
  549. if (HAVE_movti && align >= GET_MODE_SIZE (TImode))
  550. n_insns += l / GET_MODE_SIZE (TImode), l %= GET_MODE_SIZE (TImode);
  551. #endif
  552. #ifdef HAVE_movdi
  553. if (HAVE_movdi && align >= GET_MODE_SIZE (DImode))
  554. n_insns += l / GET_MODE_SIZE (DImode), l %= GET_MODE_SIZE (DImode);
  555. #endif
  556. if (HAVE_movsi && align >= GET_MODE_SIZE (SImode))
  557. n_insns += l / GET_MODE_SIZE (SImode), l %= GET_MODE_SIZE (SImode);
  558. if (HAVE_movhi && align >= GET_MODE_SIZE (HImode))
  559. n_insns += l / GET_MODE_SIZE (HImode), l %= GET_MODE_SIZE (HImode);
  560. n_insns += l;
  561. return n_insns;
  562. }
  563. /* Subroutine of move_by_pieces. Move as many bytes as appropriate
  564. with move instructions for mode MODE. GENFUN is the gen_... function
  565. to make a move insn for that mode. DATA has all the other info. */
  566. move_by_pieces_1 (genfun, mode, data)
  567. rtx (*genfun) ();
  568. enum machine_mode mode;
  569. struct move_by_pieces *data;
  570. {
  571. register int size = GET_MODE_SIZE (mode);
  572. register rtx to1, from1;
  573. #define add_offset(FLAG,X) (FLAG ? (X) : plus_constant (X, data->offset))
  574. while (data->len >= size)
  575. {
  576. to1 = gen_rtx (MEM, mode, add_offset (data->autinc_to, data->to));
  577. from1 = gen_rtx (MEM, mode, add_offset (data->autinc_from, data->from));
  578. if (data->to_vol) to1 = gen_rtx (VOLATILE, mode, to1);
  579. if (data->from_vol) from1 = gen_rtx (VOLATILE, mode, from1);
  580. if (data->reverse) data->offset -= size;
  581. #ifdef HAVE_PRE_DECREMENT
  582. if (data->explicit_inc_to < 0)
  583. emit_insn (gen_sub2_insn (data->to,
  584. gen_rtx (CONST_INT, VOIDmode, size)));
  585. if (data->explicit_inc_from < 0)
  586. emit_insn (gen_sub2_insn (data->from,
  587. gen_rtx (CONST_INT, VOIDmode, size)));
  588. #endif
  589. emit_insn (genfun (to1, from1));
  590. #ifdef HAVE_POST_INCREMENT
  591. if (data->explicit_inc_to > 0)
  592. emit_insn (gen_add2_insn (data->to,
  593. gen_rtx (CONST_INT, VOIDmode, size)));
  594. if (data->explicit_inc_from > 0)
  595. emit_insn (gen_add2_insn (data->from,
  596. gen_rtx (CONST_INT, VOIDmode, size)));
  597. #endif
  598. if (! data->reverse) data->offset += size;
  599. data->len -= size;
  600. }
  601. }
  602. /* Emit code to move a block Y to a block X.
  603. This may be done with string-move instructions,
  604. with multiple scalar move instructions, or with a library call.
  605. Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
  606. with mode BLKmode.
  607. SIZE is an rtx that says how long they are.
  608. ALIGN is the maximum alignment we can assume they have,
  609. measured in bytes. */
  610. static void
  611. emit_block_move (x, y, size, align)
  612. rtx x, y;
  613. rtx size;
  614. int align;
  615. {
  616. register int max_step;
  617. rtx xinner, yinner;
  618. int xvolatile = 0, yvolatile = 0;
  619. if (GET_MODE (x) != BLKmode)
  620. abort ();
  621. if (GET_MODE (y) != BLKmode)
  622. abort ();
  623. x = protect_from_queue (x, 1);
  624. y = protect_from_queue (y, 0);
  625. xinner = x, yinner = y;
  626. if (GET_CODE (x) == VOLATILE)
  627. xvolatile = 1, xinner = XEXP (x, 0);
  628. if (GET_CODE (y) == VOLATILE)
  629. yvolatile = 1, yinner = XEXP (y, 0);
  630. if (GET_CODE (xinner) != MEM)
  631. abort ();
  632. if (GET_CODE (yinner) != MEM)
  633. abort ();
  634. if (size == 0)
  635. abort ();
  636. if (GET_CODE (size) == CONST_INT
  637. && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
  638. < MOVE_RATIO))
  639. move_by_pieces (XEXP (xinner, 0), XEXP (yinner, 0),
  640. INTVAL (size), align,
  641. xvolatile, yvolatile);
  642. else
  643. {
  644. #ifdef HAVE_movstrsi
  645. if (HAVE_movstrsi)
  646. {
  647. emit_insn (gen_movstrsi (x, y, size));
  648. return;
  649. }
  650. #endif
  651. #ifdef HAVE_movstrhi
  652. if (HAVE_movstrhi
  653. && GET_CODE (size) == CONST_INT
  654. && ((unsigned) INTVAL (size)
  655. < (1 << (GET_MODE_SIZE (HImode) * BITS_PER_UNIT - 1))))
  656. {
  657. emit_insn (gen_movstrhi (x, y, size));
  658. return;
  659. }
  660. #endif
  661. emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "bcopy"),
  662. 3, XEXP (yinner, 0), Pmode,
  663. XEXP (xinner, 0), Pmode,
  664. size, Pmode);
  665. }
  666. }
  667. /* Generate code to copy Y into X.
  668. Both Y and X must have the same mode, except that
  669. Y can be a constant with VOIDmode.
  670. This mode cannot be BLKmode; use emit_block_move for that. */
  671. emit_move_insn (x, y)
  672. rtx x, y;
  673. {
  674. enum machine_mode mode = GET_MODE (x);
  675. x = protect_from_queue (x, 1);
  676. y = protect_from_queue (y, 0);
  677. if (mode == BLKmode)
  678. abort ();
  679. if (mov_optab[(int) mode].insn_code != CODE_FOR_nothing)
  680. emit_insn (GEN_FCN (mov_optab[(int) mode].insn_code) (x, y));
  681. else if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (SImode))
  682. {
  683. register int count = GET_MODE_SIZE (mode) / GET_MODE_SIZE (SImode);
  684. register int i;
  685. for (i = 0; i < count; i++)
  686. {
  687. rtx x1, y1;
  688. if (GET_CODE (x) == REG)
  689. x1 = gen_rtx (SUBREG, SImode, x, i);
  690. else
  691. x1 = gen_rtx (MEM, SImode,
  692. memory_address (SImode,
  693. plus_constant (XEXP (x, 0),
  694. i * GET_MODE_SIZE (SImode))));
  695. if (GET_CODE (y) == REG)
  696. y1 = gen_rtx (SUBREG, SImode, y, i);
  697. else
  698. y1 = gen_rtx (MEM, SImode,
  699. memory_address (SImode,
  700. plus_constant (XEXP (y, 0),
  701. i * GET_MODE_SIZE (SImode))));
  702. emit_insn (gen_movsi (protect_from_queue (x1, 1), protect_from_queue (y1, 0)));
  703. }
  704. }
  705. else
  706. abort ();
  707. }
  708. /* Pushing data onto the stack. */
  709. /* Push a block of length SIZE (perhaps variable)
  710. and return an rtx to address the beginning of the block.
  711. Note that it is not possible for the value returned to be a QUEUED. */
  712. static rtx
  713. push_block (size)
  714. rtx size;
  715. {
  716. register rtx temp;
  717. anti_adjust_stack (size);
  718. #ifdef STACK_GROWS_DOWNWARD
  719. temp = gen_rtx (REG, Pmode, STACK_POINTER_REGNUM);
  720. #else
  721. temp = gen_rtx (PLUS, Pmode,
  722. gen_rtx (REG, Pmode, STACK_POINTER_REGNUM),
  723. size);
  724. if (GET_CODE (size) != CONST_INT)
  725. temp = force_operand (temp, 0);
  726. #endif
  727. return memory_address (QImode, temp);
  728. }
  729. static rtx
  730. gen_push_operand ()
  731. {
  732. return gen_rtx (
  733. #ifdef STACK_GROWS_DOWNWARD
  734. PRE_DEC,
  735. #else
  736. PRE_INC,
  737. #endif
  738. Pmode,
  739. gen_rtx (REG, Pmode, STACK_POINTER_REGNUM));
  740. }
  741. /* Generate code to push X onto the stack, assuming it has mode MODE.
  742. MODE is redundant except when X is a CONST_INT (since they don't
  743. carry mode info).
  744. SIZE is an rtx for the size of data to be copied (in bytes),
  745. needed only if X is BLKmode.
  746. ALIGN (in bytes) is maximum alignment we can assume. */
  747. static void
  748. emit_push_insn (x, mode, size, align)
  749. register rtx x;
  750. enum machine_mode mode;
  751. rtx size;
  752. int align;
  753. {
  754. rtx xinner;
  755. xinner = x = protect_from_queue (x, 0);
  756. if (GET_CODE (x) == VOLATILE)
  757. xinner = XEXP (x, 0);
  758. if (mode == BLKmode)
  759. {
  760. register rtx temp;
  761. if (size == 0)
  762. abort ();
  763. if (GET_CODE (size) == CONST_INT
  764. && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
  765. < MOVE_RATIO))
  766. move_by_pieces (gen_push_operand (),
  767. XEXP (xinner, 0),
  768. INTVAL (size), align,
  769. 0, GET_CODE (x) == VOLATILE);
  770. else
  771. {
  772. temp = push_block (size);
  773. #ifdef HAVE_movstrsi
  774. if (HAVE_movstrsi)
  775. {
  776. emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp), x, size));
  777. return;
  778. }
  779. #endif
  780. #ifdef HAVE_movstrhi
  781. if (HAVE_movstrhi
  782. && GET_CODE (size) == CONST_INT
  783. && ((unsigned) INTVAL (size)
  784. < (1 << (GET_MODE_SIZE (HImode) * BITS_PER_UNIT - 1))))
  785. {
  786. emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
  787. x, size));
  788. return;
  789. }
  790. #endif
  791. /* Correct TEMP so it holds what will be a description of
  792. the address to copy to, valid after one arg is pushed. */
  793. #ifdef STACK_GROWS_DOWNWARD
  794. temp = plus_constant (temp, GET_MODE_SIZE (Pmode));
  795. #else
  796. temp = plus_constant (temp, - GET_MODE_SIZE (Pmode));
  797. #endif
  798. emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "bcopy"),
  799. 3, XEXP (xinner, 0), Pmode,
  800. temp, Pmode,
  801. size, Pmode);
  802. }
  803. }
  804. else if (mov_optab[(int) mode].insn_code != CODE_FOR_nothing)
  805. {
  806. register rtx push = gen_rtx (MEM, mode, gen_push_operand ());
  807. emit_insn (GEN_FCN (mov_optab[(int) mode].insn_code) (push, x));
  808. }
  809. else
  810. abort ();
  811. }
  812. /* Output a library call to function FUN (a SYMBOL_REF rtx)
  813. with NARGS different arguments, passed as alternating rtx values
  814. and machine_modes to convert them to.
  815. The rtx values should have been passed through protect_from_queue already. */
  816. /*VARARGS2*/
  817. void
  818. emit_library_call (fun, nargs, a1)
  819. rtx fun;
  820. int nargs;
  821. struct { rtx value; enum machine_mode mode; } a1;
  822. {
  823. register int args_size = 0;
  824. register int argnum;
  825. #ifndef STACK_GROWS_DOWNWARD
  826. for (argnum = 0; argnum < nargs; argnum++)
  827. #else
  828. for (argnum = nargs - 1; argnum >= 0; argnum--)
  829. #endif
  830. {
  831. register enum machine_mode mode = (&a1)[argnum].mode;
  832. register rtx val = (&a1)[argnum].value;
  833. /* Convert the arg value to the mode the library wants. */
  834. /* ??? It is wrong to do it here; must do it earlier
  835. where we know the signedness of the arg. */
  836. if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
  837. {
  838. val = gen_reg_rtx (mode);
  839. convert_move (val, (&a1)[argnum].value, 0);
  840. }
  841. emit_push_insn (val, mode, 0, 0);
  842. args_size += GET_MODE_SIZE (mode);
  843. current_args_size += GET_MODE_SIZE (mode);
  844. }
  845. emit_queue ();
  846. gen_call_1 (fun, 0, args_size / GET_MODE_SIZE (SImode), args_size);
  847. }
  848. /* Expand an assignment that stores the value of FROM into TO.
  849. Return an rtx for the value of TO. This may contain a QUEUED rtx. */
  850. rtx
  851. expand_assignment (to, from)
  852. tree to, from;
  853. {
  854. register rtx to_rtx = 0;
  855. /* Don't crash if the lhs of the assignment was erroneous. */
  856. if (TREE_CODE (to) == ERROR_MARK)
  857. return expand_expr (from, 0, VOIDmode, 0);
  858. /* Assignment of a structure component needs special treatment
  859. if the structure component's rtx is not simply a MEM. */
  860. if (TREE_CODE (to) == COMPONENT_REF)
  861. {
  862. register enum machine_mode mode1 = DECL_MODE (TREE_OPERAND (to, 1));
  863. int volstruct = 0;
  864. /* Get the structure as an rtx. */
  865. to_rtx = expand_expr (TREE_OPERAND (to, 0), 0, VOIDmode, 0);
  866. /* If the structure is in a register or if the component
  867. is a bit field, we cannot use addressing to access it.
  868. Use bit-field techniques or SUBREG to store in it. */
  869. if (mode1 == BImode || GET_CODE (to_rtx) == REG
  870. || GET_CODE (to_rtx) == SUBREG)
  871. {
  872. tree field = TREE_OPERAND (to, 1);
  873. int bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) * DECL_SIZE_UNIT (field);
  874. return store_bit_field (to_rtx, bitsize, DECL_OFFSET (field),
  875. DECL_MODE (field),
  876. expand_expr (from, 0, VOIDmode, 0));
  877. }
  878. /* Get the address of the structure the component is in.
  879. Record if structure is volatile. */
  880. if (GET_CODE (to_rtx) == VOLATILE)
  881. {
  882. to_rtx = XEXP (to_rtx, 0);
  883. volstruct = 1;
  884. }
  885. if (GET_CODE (to_rtx) != MEM)
  886. abort ();
  887. to_rtx = XEXP (to_rtx, 0);
  888. /* Now build a reference to just the desired component. */
  889. to_rtx = gen_rtx (MEM, mode1,
  890. memory_address (mode1,
  891. plus_constant (to_rtx,
  892. (DECL_OFFSET
  893. (TREE_OPERAND (to, 1))
  894. / BITS_PER_UNIT))));
  895. to_rtx->in_struct = 1;
  896. /* Make component volatile if structure is. */
  897. if (! cse_not_expected && volstruct)
  898. to_rtx = gen_rtx (VOLATILE, mode1, to_rtx);
  899. }
  900. /* Arrays in registers also need special treatment. */
  901. if (TREE_CODE (to) == ARRAY_REF)
  902. {
  903. /* Check to see whether the array is in a register. */
  904. tree array = TREE_OPERAND (TREE_OPERAND (to, 0), 0);
  905. register tree temexp;
  906. /* Look through any COMPONENT_REFS to the containing struct.
  907. Start by taking the array out of the ADDR_EXPR that's operand 0. */
  908. for (temexp = array;
  909. TREE_CODE (temexp) == COMPONENT_REF;
  910. temexp = TREE_OPERAND (temexp, 0));
  911. if (TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
  912. && TREE_CODE (temexp) == VAR_DECL
  913. && DECL_RTL (temexp) != 0
  914. && (GET_CODE (DECL_RTL (temexp)) == REG
  915. || GET_CODE (DECL_RTL (temexp)) == SUBREG))
  916. {
  917. /* The array or containing struct is a variable in a register
  918. and the index is constant. */
  919. int bitsize = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (to)));
  920. to_rtx = expand_expr (array, 0, VOIDmode, 0);
  921. return store_bit_field (to_rtx, bitsize,
  922. TREE_INT_CST_LOW (TREE_OPERAND (to, 1)) * bitsize,
  923. TYPE_MODE (TREE_TYPE (to)),
  924. expand_expr (from, 0, VOIDmode, 0));
  925. }
  926. /* The array is in memory. Generate the tree for *(array+index)
  927. and store into that insted. */
  928. to = build_indirect_ref (build_binary_op (PLUS_EXPR,
  929. TREE_OPERAND (to, 0),
  930. TREE_OPERAND (to, 1)));
  931. }
  932. /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
  933. Don't re-expand if it was expanded already (in COMPONENT_REF case). */
  934. if (to_rtx == 0)
  935. to_rtx = expand_expr (to, 0, VOIDmode, 0);
  936. /* Compute FROM and store the value in the rtx we got. */
  937. store_expr (from, to_rtx);
  938. return to_rtx;
  939. }
  940. /* Generate code for computing expression EXP,
  941. and storing the value into TARGET. Returns TARGET.
  942. TARGET may contain a QUEUED rtx. */
  943. static rtx
  944. store_expr (exp, target)
  945. register tree exp;
  946. register rtx target;
  947. {
  948. register rtx temp = expand_expr (exp, target, GET_MODE (target), 0);
  949. if (temp != target && TREE_CODE (exp) != ERROR_MARK)
  950. {
  951. target = protect_from_queue (target, 1);
  952. if (GET_MODE (temp) != GET_MODE (target)
  953. && GET_MODE (temp) != VOIDmode)
  954. convert_move (target, temp, type_unsigned_p (TREE_TYPE (exp)));
  955. else if (GET_MODE (temp) == BLKmode)
  956. emit_block_move (target, temp, expr_size (exp),
  957. TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
  958. else
  959. emit_move_insn (target, temp);
  960. }
  961. return target;
  962. }
  963. /* Given an rtx VALUE that may contain additions and multiplications,
  964. return an equivalent value that just refers to a register or memory.
  965. This is done by generating instructions to perform the arithmetic
  966. and returning a pseudo-register containing the value. */
  967. rtx
  968. force_operand (value, target)
  969. rtx value, target;
  970. {
  971. register struct optab *binoptab = 0;
  972. register rtx op2 = XEXP (value, 1);
  973. /* Use subtarget as the target for operand 0 of a binary operation. */
  974. register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
  975. if (GET_CODE (value) == PLUS)
  976. binoptab = add_optab;
  977. else if (GET_CODE (value) == MINUS)
  978. binoptab = sub_optab;
  979. else if (GET_CODE (value) == MULT)
  980. {
  981. if (!CONSTANT_ADDRESS_P (op2)
  982. && !(GET_CODE (op2) == REG && op2 != subtarget))
  983. subtarget = 0;
  984. return expand_mult (GET_MODE (value),
  985. force_operand (XEXP (value, 0), subtarget),
  986. force_operand (op2, 0),
  987. target, 0);
  988. }
  989. if (binoptab)
  990. {
  991. if (!CONSTANT_ADDRESS_P (op2)
  992. && !(GET_CODE (op2) == REG && op2 != subtarget))
  993. subtarget = 0;
  994. return expand_binop (GET_MODE (value), binoptab,
  995. force_operand (XEXP (value, 0), subtarget),
  996. force_operand (op2, 0),
  997. target, 0, OPTAB_LIB_WIDEN);
  998. /* We give UNSIGNEP = 0 to expand_binop
  999. because the only operations we are expanding here are signed ones. */
  1000. }
  1001. return value;
  1002. }
  1003. /* expand_expr: generate code for computing expression EXP.
  1004. An rtx for the computed value is returned.
  1005. The value may be stored in TARGET if TARGET is nonzero.
  1006. TARGET is just a suggestion; callers must assume that
  1007. the rtx returned may not be the same as TARGET.
  1008. If TMODE is not VOIDmode, it suggests generating the
  1009. result in mode TMODE. But this is done only when convenient.
  1010. Otherwise, TMODE is ignored and the value generated in its natural mode.
  1011. TMODE is just a suggestion; callers must assume that
  1012. the rtx returned may not have mode TMODE.
  1013. If SUM_OK is nonzero then when EXP is an addition
  1014. we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
  1015. or a nest of (PLUS ...) and (MINUS ...) where the terms are
  1016. products as above, or REG or MEM, or constant.
  1017. If SUM_OK is zero, in such cases we would output mul or add instructions
  1018. and then return a pseudo reg containing the sum. */
  1019. /* Subroutine of expand_expr:
  1020. return the target to use when recursively expanding
  1021. the first operand of an arithmetic operation. */
  1022. static rtx
  1023. validate_subtarget (subtarget, otherop)
  1024. rtx subtarget;
  1025. tree otherop;
  1026. {
  1027. if (TREE_LITERAL (otherop))
  1028. return subtarget;
  1029. if (TREE_CODE (otherop) == VAR_DECL
  1030. && DECL_RTL (otherop) != subtarget)
  1031. return subtarget;
  1032. return 0;
  1033. }
  1034. rtx
  1035. expand_expr (exp, target, tmode, sum_ok)
  1036. register tree exp;
  1037. rtx target;
  1038. enum machine_mode tmode;
  1039. int sum_ok;
  1040. {
  1041. register rtx op0, op1, temp;
  1042. tree type = TREE_TYPE (exp);
  1043. register enum machine_mode mode = TYPE_MODE (type);
  1044. register enum tree_code code = TREE_CODE (exp);
  1045. struct optab *this_optab;
  1046. int negate_1;
  1047. /* Use subtarget as the target for operand 0 of a binary operation. */
  1048. rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
  1049. static tree dbg2;
  1050. dbg2 = exp;
  1051. /* If will do cse, generate all results into registers
  1052. since 1) that allows cse to find more things
  1053. and 2) otherwise cse could produce an insn the machine
  1054. cannot support. */
  1055. if (! cse_not_expected && mode != BLKmode)
  1056. target = subtarget;
  1057. switch (code)
  1058. {
  1059. case FUNCTION_DECL:
  1060. case VAR_DECL:
  1061. temp = DECL_RTL (exp);
  1062. if (! cse_not_expected && TREE_VOLATILE (exp))
  1063. return gen_rtx (VOLATILE, DECL_MODE (exp), temp);
  1064. else
  1065. return temp;
  1066. case PARM_DECL:
  1067. case RESULT_DECL:
  1068. if (DECL_RTL (exp) == 0)
  1069. abort ();
  1070. if (GET_CODE (DECL_RTL (exp)) == SYMBOL_REF)
  1071. abort ();
  1072. return DECL_RTL (exp);
  1073. case INTEGER_CST:
  1074. return gen_rtx (CONST_INT, VOIDmode, TREE_INT_CST_LOW (exp));
  1075. case CONST_DECL:
  1076. return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
  1077. case REAL_CST:
  1078. if (TREE_CST_RTL (exp))
  1079. return TREE_CST_RTL (exp);
  1080. /* If optimized, generate immediate float
  1081. which will be turned into memory float if necessary. */
  1082. if (!cse_not_expected)
  1083. return immed_real_const (exp);
  1084. output_constant_def (exp);
  1085. return TREE_CST_RTL (exp);
  1086. case COMPLEX_CST:
  1087. case STRING_CST:
  1088. if (TREE_CST_RTL (exp))
  1089. return TREE_CST_RTL (exp);
  1090. output_constant_def (exp);
  1091. return TREE_CST_RTL (exp);
  1092. case SAVE_EXPR:
  1093. if (SAVE_EXPR_RTL (exp) == 0)
  1094. {
  1095. SAVE_EXPR_RTL (exp) = gen_reg_rtx (mode);
  1096. store_expr (TREE_OPERAND (exp, 0), SAVE_EXPR_RTL (exp));
  1097. }
  1098. return SAVE_EXPR_RTL (exp);
  1099. case INDIRECT_REF:
  1100. {
  1101. tree exp1 = TREE_OPERAND (exp, 0);
  1102. tree exp2;
  1103. /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
  1104. for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
  1105. This code has the same general effect as simply doing
  1106. expand_expr on the save expr, except that the expression PTR
  1107. is computed for use as a memory address. This means different
  1108. code, suitable for indexing, may be generated. */
  1109. if (TREE_CODE (exp1) == SAVE_EXPR
  1110. && SAVE_EXPR_RTL (exp1) == 0
  1111. && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
  1112. && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
  1113. && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
  1114. {
  1115. temp = expand_expr (TREE_OPERAND (exp1, 0), 0, VOIDmode, 1);
  1116. op0 = memory_address (mode, temp);
  1117. op0 = copy_all_regs (op0);
  1118. SAVE_EXPR_RTL (exp1) = op0;
  1119. }
  1120. else
  1121. {
  1122. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 1);
  1123. op0 = memory_address (mode, op0);
  1124. }
  1125. }
  1126. temp = gen_rtx (MEM, mode, op0);
  1127. if (! cse_not_expected && TREE_THIS_VOLATILE (exp))
  1128. return gen_rtx (VOLATILE, mode, temp);
  1129. else
  1130. return temp;
  1131. case COMPONENT_REF:
  1132. {
  1133. register enum machine_mode mode1 = DECL_MODE (TREE_OPERAND (exp, 1));
  1134. int volstruct = 0;
  1135. tree dbg1 = TREE_OPERAND (exp, 0); /* For debugging */
  1136. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1137. if (mode1 == BImode || GET_CODE (op0) == REG
  1138. || GET_CODE (op0) == SUBREG)
  1139. {
  1140. tree field = TREE_OPERAND (exp, 1);
  1141. int bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) * DECL_SIZE_UNIT (field);
  1142. return extract_bit_field (op0, bitsize, DECL_OFFSET (field),
  1143. type_unsigned_p (TREE_TYPE (field)),
  1144. target, mode, tmode);
  1145. }
  1146. if (tmode != VOIDmode)
  1147. mode = tmode;
  1148. /* Get the address of the structure the component is in. */
  1149. if (GET_CODE (op0) == VOLATILE)
  1150. {
  1151. op0 = XEXP (op0, 0);
  1152. volstruct = 1;
  1153. }
  1154. if (GET_CODE (op0) != MEM)
  1155. abort ();
  1156. op0 = XEXP (op0, 0);
  1157. op0 = gen_rtx (MEM, mode1,
  1158. memory_address (mode1,
  1159. plus_constant (op0,
  1160. (DECL_OFFSET
  1161. (TREE_OPERAND (exp, 1))
  1162. / BITS_PER_UNIT))));
  1163. op0->in_struct = 1;
  1164. if (! cse_not_expected && volstruct)
  1165. op0 = gen_rtx (VOLATILE, mode1, op0);
  1166. if (mode == mode1 || mode == BLKmode)
  1167. return op0;
  1168. if (target == 0)
  1169. target = gen_reg_rtx (mode);
  1170. convert_move (target, op0, type_unsigned_p (TREE_TYPE (TREE_OPERAND (exp, 1))));
  1171. return target;
  1172. }
  1173. /* ARRAY_REF is used in C for an actual array (not just a pointer)
  1174. indexed by a constant index. It enables us to avoid taking the
  1175. address of the array, which may allow a short array (or a struct
  1176. or union containing one) to go in a register. */
  1177. case ARRAY_REF:
  1178. {
  1179. /* Check to see whether the array is in a register. */
  1180. tree array = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
  1181. register tree temexp;
  1182. /* Look through any COMPONENT_REFS to the containing struct.
  1183. Start by taking the array out of the ADDR_EXPR that's operand 0. */
  1184. for (temexp = array;
  1185. TREE_CODE (temexp) == COMPONENT_REF;
  1186. temexp = TREE_OPERAND (temexp, 0));
  1187. if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
  1188. && TREE_CODE (temexp) == VAR_DECL
  1189. && DECL_RTL (temexp) != 0
  1190. && (GET_CODE (DECL_RTL (temexp)) == REG
  1191. || GET_CODE (DECL_RTL (temexp)) == SUBREG))
  1192. {
  1193. /* The array or containing struct is a variable in a register
  1194. and the index is constant. */
  1195. int bitsize = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
  1196. op0 = expand_expr (array, 0, VOIDmode, 0);
  1197. return extract_bit_field (op0, bitsize,
  1198. TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * bitsize,
  1199. type_unsigned_p (TREE_TYPE (exp)),
  1200. target, mode, tmode);
  1201. }
  1202. /* The array is in memory. Generate the tree for *(array+index)
  1203. and expand that. */
  1204. temexp = build_indirect_ref (build_binary_op (PLUS_EXPR,
  1205. TREE_OPERAND (exp, 0),
  1206. TREE_OPERAND (exp, 1)));
  1207. return expand_expr (temexp, 0, VOIDmode, 0);
  1208. }
  1209. /* Intended for a reference to a buffer of a file-object in Pascal.
  1210. But it's not certain that a special tree code will really be
  1211. necessary for these. INDIRECT_REF might work for them. */
  1212. case BUFFER_REF:
  1213. abort ();
  1214. case CALL_EXPR:
  1215. /* If this call was expanded already by preexpand_calls,
  1216. just return the result we got. */
  1217. if (CALL_EXPR_RTL (exp) != 0)
  1218. return CALL_EXPR_RTL (exp);
  1219. return expand_call (exp, target);
  1220. case NOP_EXPR:
  1221. case CONVERT_EXPR:
  1222. if (TREE_CODE (type) == VOID_TYPE)
  1223. {
  1224. expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, sum_ok);
  1225. return const0_rtx;
  1226. }
  1227. if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
  1228. return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, sum_ok);
  1229. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, 0);
  1230. if (GET_MODE (op0) == mode)
  1231. return op0;
  1232. if (target == 0)
  1233. target = gen_reg_rtx (mode);
  1234. convert_move (target, op0, type_unsigned_p (TREE_TYPE (TREE_OPERAND (exp, 0))));
  1235. return target;
  1236. case PLUS_EXPR:
  1237. preexpand_calls (exp);
  1238. if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
  1239. {
  1240. op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, 1);
  1241. op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
  1242. if (sum_ok)
  1243. return op1;
  1244. return force_operand (op1, target);
  1245. }
  1246. negate_1 = 1;
  1247. plus_minus:
  1248. if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
  1249. {
  1250. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 1);
  1251. op0 = plus_constant (op0,
  1252. negate_1 * TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
  1253. if (sum_ok)
  1254. return op0;
  1255. return force_operand (op0, target);
  1256. }
  1257. this_optab = add_optab;
  1258. if (!sum_ok) goto binop;
  1259. subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
  1260. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 1);
  1261. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 1);
  1262. /* Put a sum last, to simplify what follows. */
  1263. #ifdef OLD_INDEXING
  1264. if (GET_CODE (op1) == MULT)
  1265. {
  1266. temp = op0;
  1267. op0 = op1;
  1268. op1 = temp;
  1269. }
  1270. #endif
  1271. #ifndef OLD_INDEXING
  1272. /* Make sure any term that's a sum with a constant comes last. */
  1273. if (GET_CODE (op0) == PLUS
  1274. && CONSTANT_ADDRESS_P (XEXP (op0, 1)))
  1275. {
  1276. temp = op0;
  1277. op0 = op1;
  1278. op1 = temp;
  1279. }
  1280. /* If adding to a sum including a constant,
  1281. associate it to put the constant outside. */
  1282. if (GET_CODE (op1) == PLUS
  1283. && CONSTANT_ADDRESS_P (XEXP (op1, 1)))
  1284. {
  1285. op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
  1286. if (GET_CODE (XEXP (op1, 1)) == CONST_INT)
  1287. return plus_constant (op0, INTVAL (XEXP (op1, 1)));
  1288. else
  1289. return gen_rtx (PLUS, mode, op0, XEXP (op1, 1));
  1290. }
  1291. #endif
  1292. return gen_rtx (PLUS, mode, op0, op1);
  1293. case MINUS_EXPR:
  1294. preexpand_calls (exp);
  1295. if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
  1296. {
  1297. negate_1 = -1;
  1298. goto plus_minus;
  1299. }
  1300. this_optab = sub_optab;
  1301. goto binop;
  1302. case MULT_EXPR:
  1303. preexpand_calls (exp);
  1304. /* If first operand is constant, swap them.
  1305. Thus the following special case checks need only
  1306. check the second operand. */
  1307. if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
  1308. {
  1309. register tree t1 = TREE_OPERAND (exp, 0);
  1310. TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
  1311. TREE_OPERAND (exp, 1) = t1;
  1312. }
  1313. /* Attempt to return something suitable for generating an
  1314. indexed address, for machines that support that. */
  1315. if (sum_ok && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
  1316. {
  1317. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1318. if (GET_CODE (op0) != REG)
  1319. {
  1320. temp = gen_reg_rtx (GET_MODE (op0));
  1321. emit_move_insn (temp, op0);
  1322. op0 = temp;
  1323. }
  1324. return gen_rtx (MULT, mode, op0,
  1325. gen_rtx (CONST_INT, VOIDmode,
  1326. TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
  1327. }
  1328. subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
  1329. /* Check for multiplying things that have been extended
  1330. from a narrower type. If this machine supports multiplying
  1331. in that narrower type with a result in the desired type,
  1332. do it that way, and avoid the explicit type-conversion. */
  1333. if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
  1334. && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE
  1335. && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
  1336. < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
  1337. && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
  1338. && int_fits_type_p (TREE_OPERAND (exp, 1),
  1339. TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
  1340. ||
  1341. (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
  1342. && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
  1343. ==
  1344. TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
  1345. {
  1346. enum machine_mode innermode
  1347. = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
  1348. this_optab = (type_unsigned_p (TREE_TYPE (exp))
  1349. ? umul_widen_optab : smul_widen_optab);
  1350. if ((int) innermode + 1 == (int) mode
  1351. && this_optab[(int) mode].insn_code != CODE_FOR_nothing)
  1352. {
  1353. op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
  1354. 0, VOIDmode, 0);
  1355. if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
  1356. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1357. else
  1358. op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
  1359. 0, VOIDmode, 0);
  1360. goto binop2;
  1361. }
  1362. }
  1363. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1364. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1365. return expand_mult (mode, op0, op1, target, type_unsigned_p (type));
  1366. case TRUNC_DIV_EXPR:
  1367. case FLOOR_DIV_EXPR:
  1368. case CEIL_DIV_EXPR:
  1369. case ROUND_DIV_EXPR:
  1370. preexpand_calls (exp);
  1371. subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
  1372. /* Possible optimization: compute the dividend with SUM_OK
  1373. then if the divisor is constant can optimize the case
  1374. where some terms of the dividend have coeffs divisible by it. */
  1375. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1376. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1377. return expand_divmod (0, code, mode, op0, op1, target,
  1378. type_unsigned_p (type));
  1379. case RDIV_EXPR:
  1380. preexpand_calls (exp);
  1381. this_optab = flodiv_optab;
  1382. goto binop;
  1383. case TRUNC_MOD_EXPR:
  1384. case FLOOR_MOD_EXPR:
  1385. case CEIL_MOD_EXPR:
  1386. case ROUND_MOD_EXPR:
  1387. preexpand_calls (exp);
  1388. subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
  1389. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1390. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1391. return expand_divmod (1, code, mode, op0, op1, target,
  1392. type_unsigned_p (type));
  1393. #if 0
  1394. #ifdef HAVE_divmoddisi4
  1395. if (GET_MODE (op0) != DImode)
  1396. {
  1397. temp = gen_reg_rtx (DImode);
  1398. convert_move (temp, op0, 0);
  1399. op0 = temp;
  1400. if (GET_MODE (op1) != SImode && GET_CODE (op1) != CONST_INT)
  1401. {
  1402. temp = gen_reg_rtx (SImode);
  1403. convert_move (temp, op1, 0);
  1404. op1 = temp;
  1405. }
  1406. temp = gen_reg_rtx (SImode);
  1407. if (target == 0)
  1408. target = gen_reg_rtx (SImode);
  1409. emit_insn (gen_divmoddisi4 (temp, protect_from_queue (op0, 0),
  1410. protect_from_queue (op1, 0),
  1411. protect_from_queue (target, 1)));
  1412. return target;
  1413. }
  1414. #endif
  1415. #endif
  1416. case FIX_ROUND_EXPR:
  1417. case FIX_FLOOR_EXPR:
  1418. case FIX_CEIL_EXPR:
  1419. abort (); /* Not used for C. */
  1420. case FIX_TRUNC_EXPR:
  1421. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1422. if (target == 0)
  1423. target = gen_reg_rtx (mode);
  1424. if (mode == HImode || mode == QImode)
  1425. {
  1426. register rtx temp = gen_reg_rtx (SImode);
  1427. expand_fix (temp, op0);
  1428. convert_move (target, temp, 0);
  1429. }
  1430. else
  1431. expand_fix (target, op0);
  1432. return target;
  1433. case FLOAT_EXPR:
  1434. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1435. if (target == 0)
  1436. target = gen_reg_rtx (mode);
  1437. if (GET_MODE (op0) == HImode
  1438. || GET_MODE (op0) == QImode)
  1439. {
  1440. register rtx temp = gen_reg_rtx (SImode);
  1441. convert_move (temp, op0, 0);
  1442. expand_float (target, temp);
  1443. }
  1444. else
  1445. expand_float (target, op0);
  1446. return target;
  1447. case NEGATE_EXPR:
  1448. op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
  1449. temp = expand_unop (mode, neg_optab, op0, target, 0);
  1450. if (temp == 0)
  1451. abort ();
  1452. return temp;
  1453. case ABS_EXPR:
  1454. /* First try to do it with a special abs instruction.
  1455. If that does not win, use conditional jump and negate. */
  1456. op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
  1457. temp = expand_unop (mode, abs_optab, op0, target, 0);
  1458. if (temp != 0)
  1459. return temp;
  1460. temp = gen_label_rtx ();
  1461. if (target == 0 || GET_CODE (target) != REG)
  1462. target = gen_reg_rtx (GET_MODE (op0));
  1463. emit_move_insn (target, op0);
  1464. emit_tst_insn (target);
  1465. emit_jump_insn (gen_bge (temp));
  1466. op0 = expand_unop (mode, neg_optab, target, target, 0);
  1467. if (op0 != target)
  1468. emit_move_insn (target, op0);
  1469. emit_label (temp);
  1470. return target;
  1471. case MAX_EXPR:
  1472. case MIN_EXPR:
  1473. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1474. if (target == 0 || GET_CODE (target) != REG || target == op1)
  1475. target = gen_reg_rtx (GET_MODE (op0));
  1476. op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
  1477. if (target != op0)
  1478. emit_move_insn (target, op0);
  1479. op0 = gen_label_rtx ();
  1480. if (code == MAX_EXPR)
  1481. temp = (type_unsigned_p (TREE_TYPE (TREE_OPERAND (exp, 1)))
  1482. ? compare1 (target, op1, GEU, LEU, 1)
  1483. : compare1 (target, op1, GE, LE, 0));
  1484. else
  1485. temp = (type_unsigned_p (TREE_TYPE (TREE_OPERAND (exp, 1)))
  1486. ? compare1 (target, op1, LEU, GEU, 1)
  1487. : compare1 (target, op1, LE, GE, 0));
  1488. emit_jump_insn (gen_rtx (SET, VOIDmode, pc_rtx,
  1489. gen_rtx (IF_THEN_ELSE, VOIDmode,
  1490. temp,
  1491. gen_rtx (LABEL_REF, VOIDmode, op0),
  1492. pc_rtx)));
  1493. emit_move_insn (target, op1);
  1494. emit_label (temp);
  1495. return target;
  1496. /* ??? Can optimize when the operand of this is a bitwise operation,
  1497. by using a different bitwise operation. */
  1498. case BIT_NOT_EXPR:
  1499. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1500. temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
  1501. if (temp == 0)
  1502. abort ();
  1503. return temp;
  1504. /* ??? Can optimize bitwise operations with one arg constant.
  1505. Pastel optimizes (a bitwise1 n) bitwise2 (a bitwise3 b)
  1506. and (a bitwise1 b) bitwise2 b (etc)
  1507. but that is probably not worth while. */
  1508. /* AND_EXPR is for bitwise anding.
  1509. TRUTH_AND_EXPR is for anding two boolean values
  1510. when we want in all cases to compute both of them.
  1511. In general it is fastest to do TRUTH_AND_EXPR by
  1512. computing both operands as actual zero-or-1 values
  1513. and then bitwise anding. In cases where there cannot
  1514. be any side effects, better code would be made by
  1515. treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
  1516. but the question is how to recognize those cases. */
  1517. case TRUTH_AND_EXPR:
  1518. case BIT_AND_EXPR:
  1519. preexpand_calls (exp);
  1520. subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
  1521. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1522. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1523. return expand_bit_and (mode, op0, op1, target);
  1524. /* See comment above about TRUTH_AND_EXPR; it applies here too. */
  1525. case TRUTH_OR_EXPR:
  1526. case BIT_IOR_EXPR:
  1527. preexpand_calls (exp);
  1528. this_optab = ior_optab;
  1529. goto binop;
  1530. case BIT_XOR_EXPR:
  1531. preexpand_calls (exp);
  1532. this_optab = xor_optab;
  1533. goto binop;
  1534. case LSHIFT_EXPR:
  1535. case RSHIFT_EXPR:
  1536. case LROTATE_EXPR:
  1537. case RROTATE_EXPR:
  1538. preexpand_calls (exp);
  1539. subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
  1540. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1541. return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
  1542. type_unsigned_p (type));
  1543. /* ??? cv's were used to effect here to combine additive constants
  1544. and to determine the answer when only additive constants differ.
  1545. Also, the addition of one can be handled by changing the condition. */
  1546. case LT_EXPR:
  1547. case LE_EXPR:
  1548. case GT_EXPR:
  1549. case GE_EXPR:
  1550. case EQ_EXPR:
  1551. case NE_EXPR:
  1552. preexpand_calls (exp);
  1553. temp = do_store_flag (exp, target);
  1554. if (temp != 0)
  1555. return temp;
  1556. if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)))
  1557. {
  1558. /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
  1559. temp = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1560. if (temp != subtarget)
  1561. temp = copy_to_reg (temp);
  1562. op1 = gen_label_rtx ();
  1563. emit_cmp_insn (temp, const0_rtx, 0, type_unsigned_p (type));
  1564. emit_jump_insn (gen_beq (op1));
  1565. emit_move_insn (temp, const1_rtx);
  1566. emit_label (op1);
  1567. return temp;
  1568. }
  1569. /* If no set-flag instruction, must generate a conditional
  1570. store into a temporary variable. Drop through
  1571. and handle this like && and ||. */
  1572. case TRUTH_ANDIF_EXPR:
  1573. case TRUTH_ORIF_EXPR:
  1574. temp = gen_reg_rtx (mode);
  1575. emit_clr_insn (temp);
  1576. op1 = gen_label_rtx ();
  1577. jumpifnot (exp, op1);
  1578. emit_0_to_1_insn (temp);
  1579. emit_label (op1);
  1580. return temp;
  1581. case TRUTH_NOT_EXPR:
  1582. op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
  1583. /* The parser is careful to generate TRUTH_NOT_EXPR
  1584. only with operands that are always zero or one. */
  1585. temp = expand_binop (mode, xor_optab, op0,
  1586. gen_rtx (CONST_INT, mode, 1),
  1587. target, 1, OPTAB_LIB_WIDEN);
  1588. if (temp == 0)
  1589. abort ();
  1590. return temp;
  1591. case COMPOUND_EXPR:
  1592. expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1593. emit_queue ();
  1594. return expand_expr (TREE_OPERAND (exp, 1), target, VOIDmode, 0);
  1595. case COND_EXPR:
  1596. /* Note that COND_EXPRs whose type is a structure or union
  1597. are required to be constructed to contain assignments of
  1598. a temporary variable, so that we can evaluate them here
  1599. for side effect only. If type is void, we must do likewise. */
  1600. op0 = gen_label_rtx ();
  1601. op1 = gen_label_rtx ();
  1602. if (mode == BLKmode || mode == VOIDmode)
  1603. temp = 0;
  1604. else if (target)
  1605. temp = target;
  1606. else
  1607. temp = gen_reg_rtx (mode);
  1608. jumpifnot (TREE_OPERAND (exp, 0), op0);
  1609. current_args_size += 1;
  1610. if (temp != 0)
  1611. store_expr (TREE_OPERAND (exp, 1), temp);
  1612. else
  1613. expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1614. emit_queue ();
  1615. emit_jump_insn (gen_jump (op1));
  1616. emit_barrier ();
  1617. emit_label (op0);
  1618. if (temp != 0)
  1619. store_expr (TREE_OPERAND (exp, 2), temp);
  1620. else
  1621. expand_expr (TREE_OPERAND (exp, 2), 0, VOIDmode, 0);
  1622. emit_queue ();
  1623. emit_label (op1);
  1624. current_args_size -= 1;
  1625. return temp;
  1626. case MODIFY_EXPR:
  1627. /* If lhs is complex, expand calls in rhs before computing it.
  1628. That's so we don't compute a pointer and save it over a call.
  1629. If lhs is simple, compute it first so we can give it as a
  1630. target if the rhs is just a call. This avoids an extra temp and copy
  1631. and that prevents a partial-subsumption which makes bad code.
  1632. Actually we could treat component_ref's of vars like vars. */
  1633. if (TREE_CODE (TREE_OPERAND (exp, 0)) != VAR_DECL)
  1634. preexpand_calls (exp);
  1635. temp = expand_assignment (TREE_OPERAND (exp, 0),
  1636. TREE_OPERAND (exp, 1));
  1637. return temp;
  1638. case PREINCREMENT_EXPR:
  1639. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1640. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1641. expand_binop (mode, add_optab, copy_rtx (op0), op1, copy_rtx (op0),
  1642. 0, OPTAB_LIB_WIDEN);
  1643. return op0;
  1644. case PREDECREMENT_EXPR:
  1645. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1646. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1647. expand_binop (mode, sub_optab, copy_rtx (op0), op1, copy_rtx (op0),
  1648. 0, OPTAB_LIB_WIDEN);
  1649. return op0;
  1650. case POSTINCREMENT_EXPR:
  1651. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1652. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1653. op0 = stabilize (op0);
  1654. return enqueue_insn (op0, gen_add2_insn (copy_rtx (op0), op1));
  1655. case POSTDECREMENT_EXPR:
  1656. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1657. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1658. op0 = stabilize (op0);
  1659. return enqueue_insn (op0, gen_sub2_insn (copy_rtx (op0), op1));
  1660. case ADDR_EXPR:
  1661. op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  1662. if (GET_CODE (op0) == VOLATILE)
  1663. op0 = XEXP (op0, 0);
  1664. if (GET_CODE (op0) != MEM)
  1665. abort ();
  1666. if (sum_ok)
  1667. return XEXP (op0, 0);
  1668. return force_operand (XEXP (op0, 0), target);
  1669. case ENTRY_VALUE_EXPR:
  1670. abort ();
  1671. case ERROR_MARK:
  1672. return gen_rtx (CONST_INT, (mode != VOIDmode) ? mode : SImode, 0);
  1673. default:
  1674. abort ();
  1675. }
  1676. /* Here to do an ordinary binary operator, generating an instruction
  1677. from the optab already placed in `this_optab'. */
  1678. binop:
  1679. /* Detect things like x = y | (a == b)
  1680. and do them as (x = y), (a == b ? x |= 1 : 0), x. */
  1681. /* First, get the comparison or conditional into the second arg. */
  1682. if (comparison_code[(int) TREE_CODE (TREE_OPERAND (exp, 0))]
  1683. || (TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR
  1684. && (integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
  1685. || integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 2)))))
  1686. {
  1687. if (this_optab == ior_optab || this_optab == add_optab
  1688. || this_optab == xor_optab)
  1689. {
  1690. tree exch = TREE_OPERAND (exp, 1);
  1691. TREE_OPERAND (exp, 1) = TREE_OPERAND (exp, 0);
  1692. TREE_OPERAND (exp, 0) = exch;
  1693. }
  1694. }
  1695. if (comparison_code[(int) TREE_CODE (TREE_OPERAND (exp, 1))]
  1696. || (TREE_CODE (TREE_OPERAND (exp, 1)) == COND_EXPR
  1697. && (integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 1), 1))
  1698. || integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 1), 2)))))
  1699. {
  1700. if (this_optab == ior_optab || this_optab == add_optab
  1701. || this_optab == xor_optab || this_optab == sub_optab
  1702. || this_optab == lshl_optab || this_optab == ashl_optab
  1703. || this_optab == lshr_optab || this_optab == ashr_optab
  1704. || this_optab == rotl_optab || this_optab == rotr_optab)
  1705. {
  1706. tree thenexp, condexp;
  1707. rtx thenv = 0;
  1708. if (target == 0) target = gen_reg_rtx (mode);
  1709. store_expr (TREE_OPERAND (exp, 0), target);
  1710. op0 = gen_label_rtx ();
  1711. if (TREE_CODE (TREE_OPERAND (exp, 1)) != COND_EXPR)
  1712. {
  1713. do_jump (TREE_OPERAND (exp, 1), op0, 0);
  1714. thenv = const1_rtx;
  1715. }
  1716. else if (integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 1), 2)))
  1717. {
  1718. do_jump (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), op0, 0);
  1719. thenexp = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
  1720. }
  1721. else
  1722. {
  1723. do_jump (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0, op0);
  1724. thenexp = TREE_OPERAND (TREE_OPERAND (exp, 1), 2);
  1725. }
  1726. if (thenv == 0)
  1727. thenv = expand_expr (thenexp, 0, VOIDmode, 0);
  1728. if (this_optab == rotl_optab || this_optab == rotr_optab)
  1729. temp = expand_binop (mode, this_optab, target, thenv, target,
  1730. -1, OPTAB_LIB);
  1731. else if (this_optab == lshl_optab || this_optab == lshr_optab)
  1732. temp = expand_binop (mode, this_optab, target, thenv, target,
  1733. 1, OPTAB_LIB_WIDEN);
  1734. else
  1735. temp = expand_binop (mode, this_optab, target, thenv, target,
  1736. 0, OPTAB_LIB_WIDEN);
  1737. if (target != temp)
  1738. emit_move_insn (target, temp);
  1739. emit_label (op0);
  1740. return target;
  1741. }
  1742. }
  1743. subtarget = validate_subtarget (subtarget, TREE_OPERAND (exp, 1));
  1744. op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
  1745. op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  1746. binop2:
  1747. temp = expand_binop (mode, this_optab, op0, op1, target,
  1748. type_unsigned_p (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
  1749. binop1:
  1750. if (temp == 0)
  1751. abort ();
  1752. return temp;
  1753. }
  1754. /* Expand all function calls contained within EXP, innermost ones first.
  1755. But don't look within expressions that have sequence points.
  1756. For each CALL_EXPR, record the rtx for its value
  1757. in the CALL_EXPR_RTL field.. */
  1758. static void
  1759. preexpand_calls (exp)
  1760. tree exp;
  1761. {
  1762. register int nops, i;
  1763. if (! do_preexpand_calls)
  1764. return;
  1765. switch (TREE_CODE (exp))
  1766. {
  1767. case CALL_EXPR:
  1768. if (CALL_EXPR_RTL (exp) == 0)
  1769. CALL_EXPR_RTL (exp) = expand_call (exp, 0);
  1770. return;
  1771. case COMPOUND_EXPR:
  1772. case COND_EXPR:
  1773. case TRUTH_ANDIF_EXPR:
  1774. case TRUTH_ORIF_EXPR:
  1775. /* If we find one of these, then we can be sure
  1776. the adjust will be done for it (since it makes jumps).
  1777. Do it now, so that if this is inside an argument
  1778. of a function, we don't get the stack adjustment
  1779. after some other args have already been pushed. */
  1780. do_pending_stack_adjust ();
  1781. return;
  1782. case SAVE_EXPR:
  1783. if (SAVE_EXPR_RTL (exp) != 0)
  1784. return;
  1785. }
  1786. nops = tree_code_length[(int) TREE_CODE (exp)];
  1787. for (i = 0; i < nops; i++)
  1788. if (TREE_OPERAND (exp, i) != 0)
  1789. {
  1790. register int type = *tree_code_type[(int) TREE_CODE (TREE_OPERAND (exp, i))];
  1791. if (type == 'e' || type == 'r')
  1792. preexpand_calls (TREE_OPERAND (exp, i));
  1793. }
  1794. }
  1795. /* Generate instructions to call function FUNEXP and pass
  1796. it the static chain. NARGS is the "number of args",
  1797. to put in the call instruction on machines that require this.
  1798. Also generate the code to pop the args after returning,
  1799. (ARGS_SIZE is size of stuff to pop, in bytes). */
  1800. static void
  1801. gen_call_1 (funexp, context, nargs, args_size)
  1802. rtx funexp;
  1803. rtx context;
  1804. int nargs;
  1805. int args_size;
  1806. {
  1807. funexp = protect_from_queue (funexp, 0);
  1808. if (context)
  1809. context = protect_from_queue (context, 0);
  1810. /* Function variable in language with nested functions. */
  1811. if (GET_MODE (funexp) == EPmode)
  1812. {
  1813. register rtx reg = gen_rtx (REG, Pmode, STATIC_CHAIN_REGNUM);
  1814. emit_insn (gen_movsi (reg, gen_highpart (Pmode, funexp)));
  1815. emit_insn (gen_rtx (USE, VOIDmode, reg));
  1816. funexp = memory_address (QImode, gen_lowpart (Pmode, funexp));
  1817. emit_call_insn (gen_call (gen_rtx (MEM, QImode, funexp),
  1818. gen_rtx (CONST_INT, VOIDmode, nargs)));
  1819. }
  1820. else
  1821. {
  1822. if (context != 0)
  1823. {
  1824. /* Unless function variable in C, or top level function constant */
  1825. register rtx reg = gen_rtx (REG, Pmode, STATIC_CHAIN_REGNUM);
  1826. emit_insn (gen_movsi (reg, lookup_static_chain (context)));
  1827. emit_insn (gen_rtx (USE, VOIDmode, reg));
  1828. }
  1829. emit_call_insn (gen_call (gen_rtx (MEM, QImode,
  1830. memory_address (QImode, funexp)),
  1831. gen_rtx (CONST_INT, VOIDmode, nargs)));
  1832. }
  1833. /* If returning from the subroutine does not automatically pop the args,
  1834. we need an instruction to pop them sooner or later.
  1835. Perhaps do it now; perhaps just record how much space to pop later. */
  1836. current_args_size -= args_size;
  1837. #ifndef RETURN_POPS_ARGS
  1838. if (args_size != 0)
  1839. {
  1840. if (TARGET_DEFER_POP && current_args_size == 0)
  1841. pending_stack_adjust += args_size;
  1842. else
  1843. adjust_stack (gen_rtx (CONST_INT, VOIDmode, args_size));
  1844. }
  1845. #endif
  1846. }
  1847. /* At the start of a function, record that we have no previously-pushed
  1848. arguments waiting to be popped. */
  1849. clear_pending_stack_adjust ()
  1850. {
  1851. pending_stack_adjust = 0;
  1852. }
  1853. /* At start of function, initialize. */
  1854. clear_current_args_size ()
  1855. {
  1856. current_args_size = 0;
  1857. }
  1858. /* Pop any previously-pushed arguments that have not been popped yet. */
  1859. do_pending_stack_adjust ()
  1860. {
  1861. if (current_args_size == 0)
  1862. {
  1863. if (pending_stack_adjust != 0)
  1864. adjust_stack (gen_rtx (CONST_INT, VOIDmode, pending_stack_adjust));
  1865. pending_stack_adjust = 0;
  1866. }
  1867. }
  1868. /* Generate all the code for a function call
  1869. and return an rtx for its value.
  1870. Store the value in TARGET (specified as an rtx) if convenient.
  1871. If the value is stored in TARGET then TARGET is returned. */
  1872. static rtx
  1873. expand_call (exp, target)
  1874. tree exp;
  1875. rtx target;
  1876. {
  1877. tree actparms = TREE_OPERAND (exp, 1);
  1878. register tree p;
  1879. int args_size = 0;
  1880. register int i;
  1881. register tree *argvec;
  1882. int num_actuals;
  1883. rtx structure_value_addr = 0;
  1884. /* Don't let pending stack adjusts add up to too much.
  1885. Also, do all pending adjustments now
  1886. if there is any chance this might be a call to alloca. */
  1887. if (pending_stack_adjust >= 32
  1888. || (pending_stack_adjust > 0
  1889. &&
  1890. /* Unless it's a call to a specific function that isn't alloca,
  1891. we must assume it might be alloca. */
  1892. !(p = TREE_OPERAND (exp, 0),
  1893. TREE_CODE (p) == ADDR_EXPR
  1894. && TREE_CODE (TREE_OPERAND (p, 0)) == FUNCTION_DECL
  1895. && strcmp (IDENTIFIER_POINTER (DECL_NAME (TREE_OPERAND (p, 0))),
  1896. "alloca"))))
  1897. do_pending_stack_adjust ();
  1898. if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
  1899. {
  1900. /* This call returns a big structure. */
  1901. if (target)
  1902. structure_value_addr = XEXP (target, 0);
  1903. else
  1904. /* Make room on the stack to hold the value. */
  1905. structure_value_addr = get_structure_value_addr (expr_size (exp));
  1906. }
  1907. for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
  1908. num_actuals = i;
  1909. argvec = (tree *) alloca (i * sizeof (tree));
  1910. #ifdef STACK_GROWS_DOWNWARD
  1911. /* In this case, must reverse order of args
  1912. so that we compute and pust the last arg first. */
  1913. for (p = actparms, i = num_actuals - 1; p; p = TREE_CHAIN (p), i--)
  1914. argvec[i] = p;
  1915. #else
  1916. for (p = actparms, i = 0; p; p = TREE_CHAIN (p), i++)
  1917. argvec[i] = p;
  1918. #endif
  1919. for (i = 0; i < num_actuals; i++)
  1920. {
  1921. register tree p = argvec[i];
  1922. register tree pval = TREE_VALUE (p);
  1923. /* Push the next argument. Note that it has already been converted
  1924. if necessary to the type that the called function expects. */
  1925. if (TREE_CODE (pval) == ERROR_MARK)
  1926. ;
  1927. else if (TYPE_MODE (TREE_TYPE (pval)) != BLKmode)
  1928. {
  1929. register int size, used;
  1930. /* Argument is a scalar.
  1931. Push it, and if its size is less than the
  1932. amount of space allocated to it,
  1933. also bump stack pointer by the additional space.
  1934. Note that in C the default argument promotions
  1935. will prevent such mismatches. */
  1936. used = size = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (pval)));
  1937. /* Compute how much space the push instruction will push.
  1938. On many machines, pushing a byte will advance the stack
  1939. pointer by a halfword. */
  1940. size = PUSH_ROUNDING (size);
  1941. /* Compute how much space the argument should get:
  1942. round up to a multiple of the alignment for arguments. */
  1943. used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
  1944. / (PARM_BOUNDARY / BITS_PER_UNIT))
  1945. * (PARM_BOUNDARY / BITS_PER_UNIT));
  1946. #ifdef STACK_GROWS_DOWNWARD
  1947. if (size != used)
  1948. anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode,
  1949. used - size));
  1950. #endif
  1951. emit_push_insn (expand_expr (pval, 0, VOIDmode, 0),
  1952. TYPE_MODE (TREE_TYPE (pval)), 0, 0);
  1953. #ifndef STACK_GROWS_DOWNWARD
  1954. if (size != used)
  1955. anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode,
  1956. used - size));
  1957. #endif
  1958. /* Account for the space thus used. */
  1959. args_size += used;
  1960. current_args_size += used;
  1961. }
  1962. else
  1963. {
  1964. register rtx tem = expand_expr (pval, 0, VOIDmode, 0);
  1965. register tree size = size_in_bytes (TREE_TYPE (pval));
  1966. register tree used;
  1967. register int excess;
  1968. /* Pushing a nonscalar. Round its size up to a multiple
  1969. of the allocation unit for arguments. This part works
  1970. on variable-size objects since SIZE and USED are rtx's. */
  1971. used = convert_units (convert_units (size, BITS_PER_UNIT, PARM_BOUNDARY),
  1972. PARM_BOUNDARY, BITS_PER_UNIT);
  1973. if (!TREE_LITERAL (used))
  1974. abort ();
  1975. excess = TREE_INT_CST_LOW (used) - PUSH_ROUNDING (TREE_INT_CST_LOW (size));
  1976. #ifdef STACK_GROWS_DOWNWARD
  1977. if (excess != 0)
  1978. anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, excess));
  1979. #endif
  1980. emit_push_insn (tem, TYPE_MODE (TREE_TYPE (pval)),
  1981. expand_expr (size, 0, VOIDmode, 0),
  1982. (TYPE_ALIGN (TREE_TYPE (pval))
  1983. / BITS_PER_UNIT));
  1984. #ifndef STACK_GROWS_DOWNWARD
  1985. if (excess != 0)
  1986. anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, excess));
  1987. #endif
  1988. args_size += TREE_INT_CST_LOW (used);
  1989. current_args_size += TREE_INT_CST_LOW (used);
  1990. }
  1991. }
  1992. /* Perform postincrements before actually calling the function. */
  1993. emit_queue ();
  1994. /* Pass the function the address in which to return a structure value. */
  1995. if (structure_value_addr)
  1996. {
  1997. register rtx reg = gen_rtx (REG, Pmode, STRUCT_VALUE_REGNUM);
  1998. emit_move_insn (reg, structure_value_addr);
  1999. emit_insn (gen_rtx (USE, VOIDmode, reg));
  2000. }
  2001. gen_call_1 (expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0),
  2002. /* ??? For Pascal, this must pass a context to get the static chain from
  2003. in certain cases. */
  2004. 0,
  2005. args_size / GET_MODE_SIZE (SImode), args_size);
  2006. /* ??? Nothing has been done here to record control flow
  2007. when contained functions can do nonlocal gotos. */
  2008. /* If value type not void, return an rtx for the value. */
  2009. if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
  2010. return 0;
  2011. if (structure_value_addr)
  2012. {
  2013. if (target)
  2014. return target;
  2015. return gen_rtx (MEM, BLKmode, structure_value_addr);
  2016. }
  2017. if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp)))
  2018. {
  2019. copy_function_value (target);
  2020. return target;
  2021. }
  2022. return function_value (TYPE_MODE (TREE_TYPE (exp)));
  2023. }
  2024. /* Expand conditional expressions. */
  2025. /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
  2026. LABEL is an rtx of code CODE_LABEL, in this function and all the
  2027. functions here. */
  2028. jumpifnot (exp, label)
  2029. tree exp;
  2030. rtx label;
  2031. {
  2032. do_jump (exp, label, 0);
  2033. }
  2034. /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
  2035. jumpif (exp, label)
  2036. tree exp;
  2037. rtx label;
  2038. {
  2039. do_jump (exp, 0, label);
  2040. }
  2041. /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
  2042. the result is zero, or IF_TRUE_LABEL if the result is one.
  2043. Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
  2044. meaning fall through in that case.
  2045. This function is responsible for optimizing cases such as
  2046. &&, || and comparison operators in EXP. */
  2047. do_jump (exp, if_false_label, if_true_label)
  2048. tree exp;
  2049. rtx if_false_label, if_true_label;
  2050. {
  2051. register enum tree_code code = TREE_CODE (exp);
  2052. /* Some cases need to create a label to jump to
  2053. in order to properly fall through.
  2054. These cases set DROP_THROUGH_LABEL nonzero. */
  2055. rtx drop_through_label = 0;
  2056. rtx temp;
  2057. rtx comparison = 0;
  2058. emit_queue ();
  2059. switch (code)
  2060. {
  2061. case ERROR_MARK:
  2062. break;
  2063. case INTEGER_CST:
  2064. temp = integer_zerop (exp) ? if_false_label : if_true_label;
  2065. if (temp)
  2066. emit_jump (temp);
  2067. break;
  2068. case ADDR_EXPR:
  2069. /* The address of something can never be zero. */
  2070. if (if_true_label)
  2071. emit_jump (if_true_label);
  2072. break;
  2073. case NOP_EXPR:
  2074. do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
  2075. break;
  2076. case TRUTH_NOT_EXPR:
  2077. do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
  2078. break;
  2079. case TRUTH_ANDIF_EXPR:
  2080. if (if_false_label == 0)
  2081. if_false_label = drop_through_label = gen_label_rtx ();
  2082. do_jump (TREE_OPERAND (exp, 0), if_false_label, 0);
  2083. do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
  2084. break;
  2085. case TRUTH_ORIF_EXPR:
  2086. if (if_true_label == 0)
  2087. if_true_label = drop_through_label = gen_label_rtx ();
  2088. do_jump (TREE_OPERAND (exp, 0), 0, if_true_label);
  2089. do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
  2090. break;
  2091. case COMPOUND_EXPR:
  2092. expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  2093. emit_queue ();
  2094. do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
  2095. break;
  2096. case COND_EXPR:
  2097. {
  2098. register rtx label1 = gen_label_rtx ();
  2099. drop_through_label = gen_label_rtx ();
  2100. do_jump (TREE_OPERAND (exp, 0), label1, 0);
  2101. /* Now the THEN-expression. */
  2102. do_jump (TREE_OPERAND (exp, 1),
  2103. if_false_label ? if_false_label : drop_through_label,
  2104. if_true_label ? if_true_label : drop_through_label);
  2105. emit_label (label1);
  2106. /* Now the ELSE-expression. */
  2107. do_jump (TREE_OPERAND (exp, 2),
  2108. if_false_label ? if_false_label : drop_through_label,
  2109. if_true_label ? if_true_label : drop_through_label);
  2110. }
  2111. break;
  2112. case EQ_EXPR:
  2113. comparison = compare (exp, EQ, EQ, EQ, EQ);
  2114. break;
  2115. case NE_EXPR:
  2116. comparison = compare (exp, NE, NE, NE, NE);
  2117. break;
  2118. case LT_EXPR:
  2119. comparison = compare (exp, LT, LTU, GT, GTU);
  2120. break;
  2121. case LE_EXPR:
  2122. comparison = compare (exp, LE, LEU, GE, GEU);
  2123. break;
  2124. case GT_EXPR:
  2125. comparison = compare (exp, GT, GTU, LT, LTU);
  2126. break;
  2127. case GE_EXPR:
  2128. comparison = compare (exp, GE, GEU, LE, LEU);
  2129. break;
  2130. default:
  2131. temp = expand_expr (exp, 0, VOIDmode, 0);
  2132. do_pending_stack_adjust ();
  2133. emit_cmp_insn (temp, gen_rtx (CONST_INT, GET_MODE (temp), 0),
  2134. 0, 0);
  2135. if (if_true_label)
  2136. emit_jump_insn (gen_bne (if_true_label));
  2137. if (if_false_label)
  2138. {
  2139. if (if_true_label)
  2140. emit_jump (if_false_label);
  2141. else
  2142. emit_jump_insn (gen_beq (if_false_label));
  2143. }
  2144. }
  2145. /* If COMPARISON is nonzero here, it is an rtx that can be substituted
  2146. straight into a conditional jump instruction as the jump condition.
  2147. Otherwise, all the work has been done already. */
  2148. if (comparison)
  2149. if (if_true_label)
  2150. {
  2151. emit_jump_insn (gen_rtx (SET, VOIDmode, pc_rtx,
  2152. gen_rtx (IF_THEN_ELSE, VOIDmode, comparison,
  2153. gen_rtx (LABEL_REF, VOIDmode,
  2154. if_true_label),
  2155. pc_rtx)));
  2156. if (if_false_label)
  2157. emit_jump (if_false_label);
  2158. }
  2159. else if (if_false_label)
  2160. {
  2161. emit_jump_insn (gen_rtx (SET, VOIDmode, pc_rtx,
  2162. gen_rtx (IF_THEN_ELSE, VOIDmode, comparison,
  2163. pc_rtx,
  2164. gen_rtx (LABEL_REF, VOIDmode,
  2165. if_false_label))));
  2166. }
  2167. if (drop_through_label)
  2168. emit_label (drop_through_label);
  2169. }
  2170. /* Generate code for a comparison expression EXP
  2171. (including code to compute the values to be compared)
  2172. and set (CC0) according to the result.
  2173. SIGNED_FORWARD should be the rtx operation for this comparison for
  2174. signed data; UNSIGNED_FORWARD, likewise for use if data is unsigned.
  2175. SIGNED_REVERSE and UNSIGNED_REVERSE are used if it is desirable
  2176. to interchange the operands for the compare instruction.
  2177. We force a stack adjustment unless there are currently
  2178. things pushed on the stack that aren't yet used. */
  2179. static rtx
  2180. compare (exp, signed_forward, unsigned_forward,
  2181. signed_reverse, unsigned_reverse)
  2182. register tree exp;
  2183. enum rtx_code signed_forward, unsigned_forward;
  2184. enum rtx_code signed_reverse, unsigned_reverse;
  2185. {
  2186. register rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
  2187. register rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
  2188. register enum machine_mode mode = GET_MODE (op0);
  2189. int unsignedp;
  2190. /* If one operand is 0, make it the second one. */
  2191. if (op0 == const0_rtx || op0 == fconst0_rtx || op0 == dconst0_rtx)
  2192. {
  2193. rtx tem = op0;
  2194. op0 = op1;
  2195. op1 = tem;
  2196. signed_forward = signed_reverse;
  2197. unsigned_forward = unsigned_reverse;
  2198. }
  2199. if (force_mem)
  2200. {
  2201. op0 = force_not_mem (op0);
  2202. op1 = force_not_mem (op1);
  2203. }
  2204. do_pending_stack_adjust ();
  2205. unsignedp = (type_unsigned_p (TREE_TYPE (TREE_OPERAND (exp, 0)))
  2206. || type_unsigned_p (TREE_TYPE (TREE_OPERAND (exp, 1))));
  2207. emit_cmp_insn (op0, op1,
  2208. (mode == BLKmode) ? expr_size (TREE_OPERAND (exp, 0)) : 0,
  2209. unsignedp);
  2210. return gen_rtx ((unsignedp ? unsigned_forward : signed_forward),
  2211. VOIDmode, cc0_rtx, const0_rtx);
  2212. }
  2213. /* Like compare but expects the values to compare as two rtx's.
  2214. The decision as to signed or unsigned comparison must be made by the caller.
  2215. BLKmode is not allowed. */
  2216. static rtx
  2217. compare1 (op0, op1, forward_op, reverse_op, unsignedp)
  2218. register rtx op0, op1;
  2219. enum rtx_code forward_op, reverse_op;
  2220. int unsignedp;
  2221. {
  2222. register enum machine_mode mode = GET_MODE (op0);
  2223. /* If one operand is 0, make it the second one. */
  2224. if (op0 == const0_rtx || op0 == fconst0_rtx || op0 == dconst0_rtx)
  2225. {
  2226. rtx tem = op0;
  2227. op0 = op1;
  2228. op1 = tem;
  2229. forward_op = reverse_op;
  2230. }
  2231. if (force_mem)
  2232. {
  2233. op0 = force_not_mem (op0);
  2234. op1 = force_not_mem (op1);
  2235. }
  2236. do_pending_stack_adjust ();
  2237. emit_cmp_insn (op0, op1, 0, unsignedp);
  2238. return gen_rtx (forward_op, VOIDmode, cc0_rtx, const0_rtx);
  2239. }
  2240. /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
  2241. void
  2242. do_jump_if_equal (op1, op2, label)
  2243. rtx op1, op2, label;
  2244. {
  2245. emit_cmp_insn (op1, op2, 0);
  2246. emit_jump_insn (gen_beq (label));
  2247. }
  2248. /* Generate code to calculate EXP using a store-flag instruction
  2249. and return an rtx for the result.
  2250. If TARGET is nonzero, store the result there if convenient.
  2251. Return zero if there is no suitable set-flag instruction
  2252. available on this machine. */
  2253. static rtx
  2254. do_store_flag (exp, target)
  2255. tree exp;
  2256. rtx target;
  2257. {
  2258. register enum tree_code code = TREE_CODE (exp);
  2259. register rtx comparison = 0;
  2260. if (target == 0 || GET_MODE (target) != SImode)
  2261. target = gen_reg_rtx (SImode);
  2262. switch (code)
  2263. {
  2264. #ifdef HAVE_seqsi
  2265. case EQ_EXPR:
  2266. if (HAVE_seqsi)
  2267. comparison = compare (exp, EQ, EQ, EQ, EQ);
  2268. break;
  2269. #endif
  2270. #ifdef HAVE_snesi
  2271. case NE_EXPR:
  2272. if (HAVE_snesi)
  2273. comparison = compare (exp, NE, NE, NE, NE);
  2274. break;
  2275. #endif
  2276. #if defined (HAVE_sltsi) && defined (HAVE_sltusi) && defined (HAVE_sgtsi) && defined (HAVE_sgtusi)
  2277. case LT_EXPR:
  2278. if (HAVE_sltsi && HAVE_sltusi && HAVE_sgtsi && HAVE_sgtusi)
  2279. comparison = compare (exp, LT, LTU, GT, GTU);
  2280. break;
  2281. case GT_EXPR:
  2282. if (HAVE_sltsi && HAVE_sltusi && HAVE_sgtsi && HAVE_sgtusi)
  2283. comparison = compare (exp, GT, GTU, LT, LTU);
  2284. break;
  2285. #endif
  2286. #if defined (HAVE_slesi) && defined (HAVE_sleusi) && defined (HAVE_sgesi) && defined (HAVE_sgeusi)
  2287. case LE_EXPR:
  2288. if (HAVE_slesi && HAVE_sleusi && HAVE_sgesi && HAVE_sgeusi)
  2289. comparison = compare (exp, LE, LEU, GE, GEU);
  2290. break;
  2291. case GE_EXPR:
  2292. if (HAVE_slesi && HAVE_sleusi && HAVE_sgesi && HAVE_sgeusi)
  2293. comparison = compare (exp, GE, GEU, LE, LEU);
  2294. break;
  2295. #endif
  2296. }
  2297. if (comparison == 0)
  2298. return 0;
  2299. emit_insn (gen_rtx (SET, VOIDmode, target, comparison));
  2300. expand_bit_and (GET_MODE (target), target, const1_rtx, target);
  2301. return target;
  2302. }
  2303. /* Generate a tablejump instruction (used for switch statements). */
  2304. #ifdef HAVE_tablejump
  2305. /* INDEX is the value being switched on, with the lowest value
  2306. in the table already subtracted.
  2307. RANGE is the length of the jump table.
  2308. TABLE_LABEL is a CODE_LABEL rtx for the table itself.
  2309. DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
  2310. index value is out of range. */
  2311. void
  2312. do_tablejump (index, range, table_label, default_label)
  2313. rtx index, range, table_label, default_label;
  2314. {
  2315. register rtx temp;
  2316. emit_cmp_insn (index, const0_rtx, 0);
  2317. emit_jump_insn (gen_blt (default_label));
  2318. emit_cmp_insn (range, index, 0);
  2319. emit_jump_insn (gen_blt (default_label));
  2320. index = memory_address (CASE_VECTOR_MODE,
  2321. gen_rtx (PLUS, Pmode,
  2322. gen_rtx (LABEL_REF, VOIDmode, table_label),
  2323. gen_rtx (MULT, Pmode, index,
  2324. gen_rtx (CONST_INT, VOIDmode,
  2325. GET_MODE_SIZE (CASE_VECTOR_MODE)))));
  2326. temp = gen_reg_rtx (CASE_VECTOR_MODE);
  2327. convert_move (temp, gen_rtx (MEM, CASE_VECTOR_MODE, index), 0);
  2328. emit_jump_insn (gen_tablejump (temp));
  2329. }
  2330. #endif /* HAVE_tablejump */}