jump.c 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405
  1. /* Optimize jump instructions, for GNU compiler.
  2. Copyright (C) 1988 Free Software Foundation, Inc.
  3. This file is part of GNU CC.
  4. GNU CC is distributed in the hope that it will be useful,
  5. but WITHOUT ANY WARRANTY. No author or distributor
  6. accepts responsibility to anyone for the consequences of using it
  7. or for whether it serves any particular purpose or works at all,
  8. unless he says so in writing. Refer to the GNU CC General Public
  9. License for full details.
  10. Everyone is granted permission to copy, modify and redistribute
  11. GNU CC, but only under the conditions described in the
  12. GNU CC General Public License. A copy of this license is
  13. supposed to have been given to you along with GNU CC so you
  14. can know your rights and responsibilities. It should be in a
  15. file named COPYING. Among other things, the copyright notice
  16. and this notice must be preserved on all copies. */
  17. /* This is the jump-optimization pass of the compiler.
  18. It is run two or three times: once before cse, sometimes once after cse,
  19. and once after reload (before final).
  20. jump_optimize deletes unreachable code and labels that are not used.
  21. It also deletes jumps that jump to the following insn,
  22. and simplifies jumps around unconditional jumps and jumps
  23. to unconditional jumps.
  24. Each CODE_LABEL has a count of the times it is used
  25. stored in the LABEL_NUSES internal field, and each JUMP_INSN
  26. has one label that it refers to stored in the
  27. JUMP_LABEL internal field. With this we can detect labels that
  28. become unused because of the deletion of all the jumps that
  29. formerly used them. The JUMP_LABEL info is sometimes looked
  30. at by later passes.
  31. Optionally, cross-jumping can be done. Currently it is done
  32. only the last time (when after reload and before final).
  33. In fact, the code for cross-jumping now assumes that register
  34. allocation has been done, since it uses `rtx_renumbered_equal_p'.
  35. Jump optimization is done after cse when cse's constant-propagation
  36. causes jumps to become unconditional or to be deleted.
  37. Unreachable loops are not detected here, because the labels
  38. have references and the insns appear reachable from the labels.
  39. find_basic_blocks in flow.c finds and deletes such loops.
  40. The subroutines delete_insn, redirect_jump, invert_jump, next_real_insn
  41. and prev_real_insn are used from other passes as well. */
  42. #include "config.h"
  43. #include "rtl.h"
  44. #include "flags.h"
  45. #include "regs.h"
  46. /* ??? Eventually must record somehow the labels used by jumps
  47. from nested functions. */
  48. /* Pre-record the next or previous real insn for each label?
  49. No, this pass is very fast anyway. */
  50. /* Condense consecutive labels?
  51. This would make life analysis faster, maybe. */
  52. /* Optimize jump y; x: ... y: jumpif... x?
  53. Don't know if it is worth bothering with. */
  54. /* Optimize two cases of conditional jump to conditional jump?
  55. This can never delete any instruction or make anything dead,
  56. or even change what is live at any point.
  57. So perhaps let combiner do it. */
  58. /* Vector indexed by uid.
  59. For each CODE_LABEL, index by its uid to get first unconditional jump
  60. that jumps to the label.
  61. For each JUMP_INSN, index by its uid to get the next unconditional jump
  62. that jumps to the same label.
  63. Element 0 is the start of a chain of all return insns.
  64. (It is safe to use element 0 because insn uid 0 is not used. */
  65. rtx *jump_chain;
  66. rtx delete_insn ();
  67. void redirect_jump ();
  68. void invert_jump ();
  69. rtx next_real_insn ();
  70. rtx prev_real_insn ();
  71. rtx next_label ();
  72. static void mark_jump_label ();
  73. static void delete_jump ();
  74. static void invert_exp ();
  75. static void redirect_exp ();
  76. static rtx follow_jumps ();
  77. static int tension_vector_labels ();
  78. static void find_cross_jump ();
  79. static void do_cross_jump ();
  80. static enum rtx_code reverse_condition ();
  81. static int jump_back_p ();
  82. /* Delete no-op jumps and optimize jumps to jumps
  83. and jumps around jumps.
  84. Delete unused labels and unreachable code.
  85. If CROSS_JUMP is nonzero, detect matching code
  86. before a jump and its destination and unify them.
  87. If NOOP_MOVES is nonzero, also delete no-op move insns
  88. and perform machine-specific peephole optimizations
  89. (but flag_no_peephole inhibits the latter).
  90. If `optimize' is zero, don't change any code,
  91. just determine whether control drops off the end of the function.
  92. This case occurs when we have -W and not -O.
  93. It works because `delete_insn' checks the value of `optimize'
  94. and refrains from actually deleting when that is 0. */
  95. void
  96. jump_optimize (f, cross_jump, noop_moves)
  97. rtx f;
  98. {
  99. register rtx insn;
  100. int changed;
  101. int first = 1;
  102. int max_uid = 0;
  103. rtx last_insn;
  104. /* Initialize LABEL_NUSES and JUMP_LABEL fields. */
  105. for (insn = f; insn; insn = NEXT_INSN (insn))
  106. {
  107. if (GET_CODE (insn) == CODE_LABEL)
  108. LABEL_NUSES (insn) = 0;
  109. if (GET_CODE (insn) == JUMP_INSN)
  110. JUMP_LABEL (insn) = 0;
  111. if (INSN_UID (insn) > max_uid)
  112. max_uid = INSN_UID (insn);
  113. }
  114. max_uid++;
  115. jump_chain = (rtx *) alloca (max_uid * sizeof (rtx));
  116. bzero (jump_chain, max_uid * sizeof (rtx));
  117. /* Delete insns following barriers, up to next label. */
  118. for (insn = f; insn;)
  119. {
  120. if (GET_CODE (insn) == BARRIER)
  121. {
  122. insn = NEXT_INSN (insn);
  123. while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
  124. {
  125. if (GET_CODE (insn) == NOTE
  126. && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
  127. insn = NEXT_INSN (insn);
  128. else
  129. insn = delete_insn (insn);
  130. }
  131. /* INSN is now the code_label. */
  132. }
  133. else
  134. insn = NEXT_INSN (insn);
  135. }
  136. /* Mark the label each jump jumps to.
  137. Combine consecutive labels, and count uses of labels.
  138. For each label, make a chain (using `jump_chain')
  139. of all the *unconditional* jumps that jump to it;
  140. also make a chain of all returns. */
  141. for (insn = f; insn; insn = NEXT_INSN (insn))
  142. if (GET_CODE (insn) == JUMP_INSN && !insn->volatil)
  143. {
  144. mark_jump_label (PATTERN (insn), insn, cross_jump);
  145. if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
  146. {
  147. jump_chain[INSN_UID (insn)]
  148. = jump_chain[INSN_UID (JUMP_LABEL (insn))];
  149. jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
  150. }
  151. if (GET_CODE (PATTERN (insn)) == RETURN)
  152. {
  153. jump_chain[INSN_UID (insn)] = jump_chain[0];
  154. jump_chain[0] = insn;
  155. }
  156. }
  157. /* Delete all labels already not referenced.
  158. Also find the last insn. */
  159. last_insn = 0;
  160. for (insn = f; insn; )
  161. {
  162. if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
  163. insn = delete_insn (insn);
  164. else
  165. {
  166. last_insn = insn;
  167. insn = NEXT_INSN (insn);
  168. }
  169. }
  170. if (!optimize)
  171. {
  172. /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
  173. If so record that this function can drop off the end. */
  174. insn = last_insn;
  175. while (insn && GET_CODE (insn) == CODE_LABEL)
  176. insn = PREV_INSN (insn);
  177. if (GET_CODE (insn) == NOTE
  178. && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
  179. && ! insn->volatil)
  180. {
  181. extern int current_function_returns_null;
  182. current_function_returns_null = 1;
  183. }
  184. /* Zero the "deleted" flag of all the "deleted" insns. */
  185. for (insn = f; insn; insn = NEXT_INSN (insn))
  186. insn->volatil = 0;
  187. return;
  188. }
  189. #if 0
  190. #ifdef EXIT_IGNORE_STACK
  191. /* If the last insn just adjusts the stack,
  192. we can delete it on certain machines,
  193. provided we have a frame pointer. */
  194. if (frame_pointer_needed && EXIT_IGNORE_STACK)
  195. {
  196. insn = last_insn;
  197. while (insn)
  198. {
  199. rtx prev;
  200. /* Back up to a real insn. */
  201. if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN
  202. && GET_CODE (insn) != CALL_INSN)
  203. insn = prev_real_insn (insn);
  204. if (insn == 0)
  205. break;
  206. prev = PREV_INSN (insn);
  207. /* If this insn is a stack adjust, delete it. */
  208. if (GET_CODE (insn) == INSN
  209. && GET_CODE (PATTERN (insn)) == SET
  210. && GET_CODE (SET_DEST (PATTERN (insn))) == REG
  211. && REGNO (SET_DEST (PATTERN (insn))) == STACK_POINTER_REGNUM)
  212. {
  213. delete_insn (insn);
  214. if (insn == last_insn)
  215. last_insn = prev;
  216. }
  217. else
  218. /* If we find an insn that isn't a stack adjust, stop deleting. */
  219. break;
  220. /* Back up to insn before the deleted one and try to delete more. */
  221. insn = prev;
  222. }
  223. }
  224. #endif
  225. #endif
  226. if (noop_moves)
  227. for (insn = f; insn; )
  228. {
  229. register rtx next = NEXT_INSN (insn);
  230. if (GET_CODE (insn) == INSN)
  231. {
  232. register rtx body = PATTERN (insn);
  233. /* Delete insns that existed just to advise flow-analysis. */
  234. if (GET_CODE (body) == USE
  235. || GET_CODE (body) == CLOBBER)
  236. delete_insn (insn);
  237. /* Detect and delete no-op move instructions
  238. resulting from not allocating a parameter in a register. */
  239. else if (GET_CODE (body) == SET
  240. && (SET_DEST (body) == SET_SRC (body)
  241. || (GET_CODE (SET_DEST (body)) == MEM
  242. && GET_CODE (SET_SRC (body)) == MEM
  243. && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
  244. && ! SET_DEST (body)->volatil
  245. && ! SET_SRC (body)->volatil)
  246. delete_insn (insn);
  247. /* Detect and ignore no-op move instructions
  248. resulting from smart or fortuitous register allocation. */
  249. else if (GET_CODE (body) == SET)
  250. {
  251. int sreg = true_regnum (SET_SRC (body));
  252. int dreg = true_regnum (SET_DEST (body));
  253. if (sreg == dreg && sreg >= 0)
  254. delete_insn (insn);
  255. else if (sreg >= 0 && dreg >= 0)
  256. {
  257. rtx tem = find_equiv_reg (0, insn, 0,
  258. sreg, 0, dreg);
  259. if (tem != 0
  260. && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
  261. delete_insn (insn);
  262. }
  263. }
  264. }
  265. insn = next;
  266. }
  267. /* Now iterate optimizing jumps until nothing changes over one pass. */
  268. changed = 1;
  269. while (changed)
  270. {
  271. register rtx next;
  272. changed = 0;
  273. for (insn = f; insn; insn = next)
  274. {
  275. next = NEXT_INSN (insn);
  276. /* On the first iteration, if this is the last jump pass
  277. (just before final), do the special peephole optimizations. */
  278. if (noop_moves && first && !flag_no_peephole)
  279. if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
  280. peephole (insn);
  281. /* Tension the labels in dispatch tables. */
  282. if (GET_CODE (insn) == JUMP_INSN)
  283. {
  284. if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
  285. changed |= tension_vector_labels (PATTERN (insn), 0);
  286. if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
  287. changed |= tension_vector_labels (PATTERN (insn), 1);
  288. }
  289. if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
  290. {
  291. register rtx reallabelprev = prev_real_insn (JUMP_LABEL (insn));
  292. /* Delete insns that adjust stack pointer before a return,
  293. if this is the last jump-optimization before final
  294. and we need to have a frame pointer. */
  295. #if 0
  296. #ifdef EXIT_IGNORE_STACK
  297. if (noop_moves && frame_pointer_needed && EXIT_IGNORE_STACK
  298. && NEXT_INSN (JUMP_LABEL (insn)) == 0)
  299. {
  300. rtx prev = prev_real_insn (insn);
  301. if (prev != 0
  302. && GET_CODE (prev) == INSN
  303. && GET_CODE (PATTERN (prev)) == SET
  304. && GET_CODE (SET_DEST (PATTERN (prev))) == REG
  305. && REGNO (SET_DEST (PATTERN (prev))) == STACK_POINTER_REGNUM)
  306. {
  307. delete_insn (prev);
  308. changed = 1;
  309. }
  310. }
  311. #endif
  312. #endif
  313. /* Detect jump to following insn. */
  314. if (reallabelprev == insn && condjump_p (insn))
  315. {
  316. reallabelprev = PREV_INSN (insn);
  317. delete_jump (insn);
  318. changed = 1;
  319. }
  320. /* Detect jumping over an unconditional jump. */
  321. else if (reallabelprev != 0
  322. && GET_CODE (reallabelprev) == JUMP_INSN
  323. && prev_real_insn (reallabelprev) == insn
  324. && no_labels_between_p (insn, reallabelprev)
  325. && simplejump_p (reallabelprev)
  326. /* Ignore this if INSN is a hairy kind of jump,
  327. since they may not be invertible.
  328. This is conservative; could instead construct
  329. the inverted insn and try recognizing it. */
  330. && condjump_p (insn))
  331. {
  332. /* Delete the original unconditional jump (and barrier). */
  333. /* But don't let its destination go with it. */
  334. ++LABEL_NUSES (JUMP_LABEL (reallabelprev));
  335. delete_insn (reallabelprev);
  336. /* Now change the condition, and make it go to the
  337. place the deleted jump went to.
  338. This may cause the label after the deletion to go away.
  339. But now that the unconditional jump and its barrier
  340. are gone, that is ok. */
  341. invert_jump (insn, JUMP_LABEL (reallabelprev));
  342. --LABEL_NUSES (JUMP_LABEL (reallabelprev));
  343. next = insn;
  344. changed = 1;
  345. }
  346. else
  347. {
  348. /* Detect a jump to a jump. */
  349. {
  350. register rtx nlabel = follow_jumps (JUMP_LABEL (insn));
  351. if (nlabel != JUMP_LABEL (insn))
  352. {
  353. redirect_jump (insn, nlabel);
  354. changed = 1;
  355. next = insn;
  356. }
  357. }
  358. /* Look for if (foo) bar; else break; */
  359. /* The insns look like this:
  360. insn = condjump label1;
  361. ...range1 (some insns)...
  362. jump label2;
  363. label1:
  364. ...range2 (some insns)...
  365. jump somewhere unconditionally
  366. label2: */
  367. {
  368. rtx label1 = next_label (insn);
  369. rtx range1end = label1 ? prev_real_insn (label1) : 0;
  370. /* Don't do this optimization on the first round, so that
  371. jump-around-a-jump gets simplified before we ask here
  372. whether a jump is unconditional. */
  373. if (! first
  374. && JUMP_LABEL (insn) == label1
  375. && LABEL_NUSES (label1) == 1
  376. && GET_CODE (range1end) == JUMP_INSN
  377. && simplejump_p (range1end))
  378. {
  379. rtx label2 = next_label (label1);
  380. rtx range2end = label2 ? prev_real_insn (label2) : 0;
  381. if (range1end != range2end
  382. && JUMP_LABEL (range1end) == label2
  383. && GET_CODE (range2end) == JUMP_INSN
  384. && GET_CODE (NEXT_INSN (range2end)) == BARRIER)
  385. {
  386. rtx range1beg = NEXT_INSN (insn);
  387. rtx range2beg = NEXT_INSN (label1);
  388. rtx range1after = NEXT_INSN (range1end);
  389. rtx range2after = NEXT_INSN (range2end);
  390. /* Splice range2 between INSN and LABEL1. */
  391. NEXT_INSN (insn) = range2beg;
  392. PREV_INSN (range2beg) = insn;
  393. NEXT_INSN (range2end) = range1after;
  394. PREV_INSN (range1after) = range2end;
  395. /* Splice range1 between LABEL1 and LABEL2. */
  396. NEXT_INSN (label1) = range1beg;
  397. PREV_INSN (range1beg) = label1;
  398. NEXT_INSN (range1end) = range2after;
  399. PREV_INSN (range2after) = range1end;
  400. /* Invert the jump condition, so we
  401. still execute the same insns in each case. */
  402. invert_jump (insn, label1);
  403. changed = 1;
  404. continue;
  405. }
  406. }
  407. }
  408. /* Now that the jump has been tensioned,
  409. try cross jumping: check for identical code
  410. before the jump and before its target label. */
  411. /* First, cross jumping of conditional jumps: */
  412. if (cross_jump && condjump_p (insn))
  413. {
  414. rtx newjpos, newlpos;
  415. rtx x = prev_real_insn (JUMP_LABEL (insn));
  416. /* A conditional jump may be crossjumped
  417. only if the place it jumps to follows
  418. an opposing jump that comes back here. */
  419. if (x != 0 && ! jump_back_p (x, insn))
  420. /* We have no opposing jump;
  421. cannot cross jump this insn. */
  422. x = 0;
  423. newjpos = 0;
  424. /* TARGET is nonzero if it is ok to cross jump
  425. to code before TARGET. If so, see if matches. */
  426. if (x != 0)
  427. find_cross_jump (insn, x, 2,
  428. &newjpos, &newlpos);
  429. if (newjpos != 0)
  430. {
  431. do_cross_jump (insn, newjpos, newlpos);
  432. /* Make the old conditional jump
  433. into an unconditional one. */
  434. SET_SRC (PATTERN (insn))
  435. = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
  436. emit_barrier_after (insn);
  437. changed = 1;
  438. next = insn;
  439. }
  440. }
  441. /* Cross jumping of unconditional jumps:
  442. a few differences. */
  443. if (cross_jump && simplejump_p (insn))
  444. {
  445. rtx newjpos, newlpos;
  446. rtx target;
  447. newjpos = 0;
  448. /* TARGET is nonzero if it is ok to cross jump
  449. to code before TARGET. If so, see if matches. */
  450. find_cross_jump (insn, JUMP_LABEL (insn), 1,
  451. &newjpos, &newlpos);
  452. /* If cannot cross jump to code before the label,
  453. see if we can cross jump to another jump to
  454. the same label. */
  455. /* Try each other jump to this label. */
  456. if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
  457. for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
  458. target != 0 && newjpos == 0;
  459. target = jump_chain[INSN_UID (target)])
  460. if (target != insn
  461. && JUMP_LABEL (target) == JUMP_LABEL (insn)
  462. /* Ignore TARGET if it's deleted. */
  463. && ! target->volatil)
  464. find_cross_jump (insn, target, 2,
  465. &newjpos, &newlpos);
  466. if (newjpos != 0)
  467. {
  468. do_cross_jump (insn, newjpos, newlpos);
  469. changed = 1;
  470. next = insn;
  471. }
  472. }
  473. }
  474. }
  475. else if (GET_CODE (insn) == JUMP_INSN
  476. && GET_CODE (PATTERN (insn)) == RETURN)
  477. {
  478. /* Return insns all "jump to the same place"
  479. so we can cross-jump between any two of them. */
  480. if (cross_jump)
  481. {
  482. rtx newjpos, newlpos, target;
  483. newjpos = 0;
  484. /* If cannot cross jump to code before the label,
  485. see if we can cross jump to another jump to
  486. the same label. */
  487. /* Try each other jump to this label. */
  488. for (target = jump_chain[0];
  489. target != 0 && newjpos == 0;
  490. target = jump_chain[INSN_UID (target)])
  491. if (target != insn
  492. && ! target->volatil
  493. && GET_CODE (PATTERN (target)) == RETURN)
  494. find_cross_jump (insn, target, 2,
  495. &newjpos, &newlpos);
  496. if (newjpos != 0)
  497. {
  498. do_cross_jump (insn, newjpos, newlpos);
  499. changed = 1;
  500. next = insn;
  501. }
  502. }
  503. }
  504. }
  505. first = 0;
  506. }
  507. /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
  508. If so, delete it, and record that this function can drop off the end. */
  509. insn = last_insn;
  510. while (insn && GET_CODE (insn) == CODE_LABEL)
  511. insn = PREV_INSN (insn);
  512. if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
  513. {
  514. extern int current_function_returns_null;
  515. current_function_returns_null = 1;
  516. delete_insn (insn);
  517. }
  518. }
  519. /* Compare the instructions before insn E1 with those before E2.
  520. Assume E1 is a jump that jumps to label E2
  521. (that is not always true but it might as well be).
  522. Find the longest possible equivalent sequences
  523. and store the first insns of those sequences into *F1 and *F2.
  524. Store zero there if no equivalent preceding instructions are found.
  525. We give up if we find a label in stream 1.
  526. Actually we could transfer that label into stream 2. */
  527. static void
  528. find_cross_jump (e1, e2, minimum, f1, f2)
  529. rtx e1, e2;
  530. int minimum;
  531. rtx *f1, *f2;
  532. {
  533. register rtx i1 = e1, i2 = e2;
  534. register rtx p1, p2;
  535. rtx last1 = 0, last2 = 0;
  536. rtx afterlast1 = 0, afterlast2 = 0;
  537. *f1 = 0;
  538. *f2 = 0;
  539. while (1)
  540. {
  541. i1 = PREV_INSN (i1);
  542. while (i1 && GET_CODE (i1) == NOTE)
  543. i1 = PREV_INSN (i1);
  544. i2 = PREV_INSN (i2);
  545. while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
  546. i2 = PREV_INSN (i2);
  547. if (i1 == 0)
  548. break;
  549. /* If we will get to this code by jumping, those jumps will be
  550. tensioned to go directly to the new label (before I2),
  551. so this cross-jumping won't cost extra. So reduce the minimum. */
  552. if (GET_CODE (i1) == CODE_LABEL)
  553. {
  554. --minimum;
  555. break;
  556. }
  557. if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
  558. break;
  559. p1 = PATTERN (i1);
  560. p2 = PATTERN (i2);
  561. if (GET_CODE (p1) != GET_CODE (p2)
  562. || !rtx_renumbered_equal_p (p1, p2))
  563. {
  564. /* Insns fail to match; cross jumping is limited to the following
  565. insns. */
  566. /* Don't allow the insn after a compare to be shared by cross-jumping
  567. unless the compare is also shared.
  568. Here, if either of these non-matching insns is a compare,
  569. exclude the following insn from possible cross-jumping. */
  570. if ((GET_CODE (p1) == SET && SET_DEST (p1) == cc0_rtx)
  571. || (GET_CODE (p2) == SET && SET_DEST (p2) == cc0_rtx))
  572. last1 = afterlast1, last2 = afterlast2, ++minimum;
  573. /* If cross-jumping here will feed a jump-around-jump optimization,
  574. this jump won't cost extra, so reduce the minimum. */
  575. if (GET_CODE (i1) == JUMP_INSN
  576. && JUMP_LABEL (i1)
  577. && prev_real_insn (JUMP_LABEL (i1)) == e1)
  578. --minimum;
  579. break;
  580. }
  581. if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
  582. {
  583. /* Ok, this insn is potentially includable in a cross-jump here. */
  584. afterlast1 = last1, afterlast2 = last2;
  585. last1 = i1, last2 = i2, --minimum;
  586. }
  587. }
  588. if (minimum <= 0 && last1 != 0)
  589. *f1 = last1, *f2 = last2;
  590. }
  591. static void
  592. do_cross_jump (insn, newjpos, newlpos)
  593. rtx insn, newjpos, newlpos;
  594. {
  595. register rtx label;
  596. /* Find an existing label at this point
  597. or make a new one if there is none. */
  598. label = PREV_INSN (newlpos);
  599. if (GET_CODE (label) != CODE_LABEL)
  600. {
  601. label = gen_label_rtx ();
  602. emit_label_after (label, PREV_INSN (newlpos));
  603. LABEL_NUSES (label) = 0;
  604. }
  605. /* Make the same jump insn jump to the new point. */
  606. if (GET_CODE (PATTERN (insn)) == RETURN)
  607. {
  608. extern rtx gen_jump ();
  609. PATTERN (insn) = gen_jump (label);
  610. INSN_CODE (insn) = -1;
  611. JUMP_LABEL (insn) = label;
  612. LABEL_NUSES (label)++;
  613. }
  614. else
  615. redirect_jump (insn, label);
  616. /* Delete the matching insns before the jump. */
  617. newjpos = PREV_INSN (newjpos);
  618. while (NEXT_INSN (newjpos) != insn)
  619. /* Don't delete line numbers. */
  620. if (GET_CODE (NEXT_INSN (newjpos)) != NOTE)
  621. delete_insn (NEXT_INSN (newjpos));
  622. else
  623. newjpos = NEXT_INSN (newjpos);
  624. }
  625. /* Return 1 if INSN is a jump that jumps to right after TARGET
  626. only on the condition that TARGET itself would drop through.
  627. Assumes that TARGET is a conditional jump. */
  628. static int
  629. jump_back_p (insn, target)
  630. rtx insn, target;
  631. {
  632. rtx cinsn, ctarget;
  633. enum rtx_code codei, codet;
  634. if (simplejump_p (insn) || ! condjump_p (insn)
  635. || simplejump_p (target))
  636. return 0;
  637. if (target != prev_real_insn (JUMP_LABEL (insn)))
  638. return 0;
  639. cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
  640. ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
  641. codei = GET_CODE (cinsn);
  642. codet = GET_CODE (ctarget);
  643. if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
  644. codei = reverse_condition (codei);
  645. if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
  646. codet = reverse_condition (codet);
  647. return (codei == codet
  648. && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
  649. && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
  650. }
  651. /* Given an rtx-code for a comparison, return the code
  652. for the negated comparison. */
  653. static enum rtx_code
  654. reverse_condition (code)
  655. enum rtx_code code;
  656. {
  657. switch (code)
  658. {
  659. case EQ:
  660. return NE;
  661. case NE:
  662. return EQ;
  663. case GT:
  664. return LE;
  665. case GE:
  666. return LT;
  667. case LT:
  668. return GE;
  669. case LE:
  670. return GT;
  671. case GTU:
  672. return LEU;
  673. case GEU:
  674. return LTU;
  675. case LTU:
  676. return GEU;
  677. case LEU:
  678. return GTU;
  679. default:
  680. abort ();
  681. return UNKNOWN;
  682. }
  683. }
  684. /* Return 1 if INSN is an unconditional jump and nothing else. */
  685. static int
  686. simplejump_p (insn)
  687. rtx insn;
  688. {
  689. register rtx x = PATTERN (insn);
  690. if (GET_CODE (x) != SET)
  691. return 0;
  692. if (GET_CODE (SET_DEST (x)) != PC)
  693. return 0;
  694. if (GET_CODE (SET_SRC (x)) != LABEL_REF)
  695. return 0;
  696. return 1;
  697. }
  698. /* Return nonzero if INSN is a (possibly) conditional jump
  699. and nothing more. */
  700. static int
  701. condjump_p (insn)
  702. rtx insn;
  703. {
  704. register rtx x = PATTERN (insn);
  705. if (GET_CODE (x) != SET)
  706. return 0;
  707. if (GET_CODE (SET_DEST (x)) != PC)
  708. return 0;
  709. if (GET_CODE (SET_SRC (x)) == LABEL_REF)
  710. return 1;
  711. if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
  712. return 0;
  713. if (XEXP (SET_SRC (x), 2) == pc_rtx
  714. && GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF)
  715. return 1;
  716. if (XEXP (SET_SRC (x), 1) == pc_rtx
  717. && GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF)
  718. return 1;
  719. return 0;
  720. }
  721. /* Return 1 if in between BEG and END there is no CODE_LABEL insn. */
  722. int
  723. no_labels_between_p (beg, end)
  724. rtx beg, end;
  725. {
  726. register rtx p;
  727. for (p = beg; p != end; p = NEXT_INSN (p))
  728. if (GET_CODE (p) == CODE_LABEL)
  729. return 0;
  730. return 1;
  731. }
  732. /* Return the last INSN, CALL_INSN or JUMP_INSN before LABEL;
  733. or 0, if there is none. */
  734. rtx
  735. prev_real_insn (label)
  736. rtx label;
  737. {
  738. register rtx insn = PREV_INSN (label);
  739. register RTX_CODE code;
  740. while (1)
  741. {
  742. if (insn == 0)
  743. return 0;
  744. code = GET_CODE (insn);
  745. if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
  746. break;
  747. insn = PREV_INSN (insn);
  748. }
  749. return insn;
  750. }
  751. /* Return the next INSN, CALL_INSN or JUMP_INSN after LABEL;
  752. or 0, if there is none. */
  753. rtx
  754. next_real_insn (label)
  755. rtx label;
  756. {
  757. register rtx insn = NEXT_INSN (label);
  758. register RTX_CODE code;
  759. while (1)
  760. {
  761. if (insn == 0)
  762. return insn;
  763. code = GET_CODE (insn);
  764. if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
  765. break;
  766. insn = NEXT_INSN (insn);
  767. }
  768. return insn;
  769. }
  770. /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
  771. rtx
  772. next_label (insn)
  773. rtx insn;
  774. {
  775. do insn = NEXT_INSN (insn);
  776. while (insn != 0 && GET_CODE (insn) != CODE_LABEL);
  777. return insn;
  778. }
  779. /* Follow any unconditional jump at LABEL;
  780. return the ultimate label reached by any such chain of jumps.
  781. If LABEL is not followed by a jump, return LABEL. */
  782. static rtx
  783. follow_jumps (label)
  784. rtx label;
  785. {
  786. register rtx insn;
  787. register rtx next;
  788. register rtx value = label;
  789. register int depth;
  790. for (depth = 0;
  791. (depth < 10
  792. && (insn = next_real_insn (value)) != 0
  793. && GET_CODE (insn) == JUMP_INSN
  794. && JUMP_LABEL (insn) != 0
  795. && (next = NEXT_INSN (insn))
  796. && GET_CODE (next) == BARRIER);
  797. depth++)
  798. {
  799. /* If we have found a cycle, make the insn jump to itself. */
  800. if (JUMP_LABEL (insn) == label)
  801. break;
  802. value = JUMP_LABEL (insn);
  803. }
  804. return value;
  805. }
  806. /* Assuming that field IDX of X is a vector of label_refs,
  807. replace each of them by the ultimate label reached by it.
  808. Return nonzero if a change is made. */
  809. static int
  810. tension_vector_labels (x, idx)
  811. register rtx x;
  812. register int idx;
  813. {
  814. int changed = 0;
  815. register int i;
  816. for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
  817. {
  818. register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
  819. register rtx nlabel = follow_jumps (olabel);
  820. if (nlabel != olabel)
  821. {
  822. XEXP (XVECEXP (x, idx, i), 0) = nlabel;
  823. ++LABEL_NUSES (nlabel);
  824. if (--LABEL_NUSES (olabel) == 0)
  825. delete_insn (olabel);
  826. changed = 1;
  827. }
  828. }
  829. return changed;
  830. }
  831. /* Find all CODE_LABELs referred to in X,
  832. and increment their use counts.
  833. Also store one of them in JUMP_LABEL (INSN) if INSN is nonzero.
  834. Also, when there are consecutive labels,
  835. canonicalize on the last of them.
  836. Note that two labels separated by a loop-beginning note
  837. must be kept distinct if we have not yet done loop-optimization,
  838. because the gap between them is where loop-optimize
  839. will want to move invariant code to. CROSS_JUMP tells us
  840. that loop-optimization is done with. */
  841. static void
  842. mark_jump_label (x, insn, cross_jump)
  843. register rtx x;
  844. rtx insn;
  845. int cross_jump;
  846. {
  847. register RTX_CODE code = GET_CODE (x);
  848. register int i;
  849. register char *fmt;
  850. if (code == LABEL_REF)
  851. {
  852. register rtx label = XEXP (x, 0);
  853. register rtx next;
  854. if (GET_CODE (label) != CODE_LABEL)
  855. return;
  856. /* If there are other labels following this one,
  857. replace it with the last of the consecutive labels. */
  858. for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
  859. {
  860. if (GET_CODE (next) == CODE_LABEL)
  861. label = next;
  862. else if (GET_CODE (next) != NOTE
  863. || NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
  864. || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END)
  865. break;
  866. }
  867. XEXP (x, 0) = label;
  868. ++LABEL_NUSES (label);
  869. if (insn)
  870. JUMP_LABEL (insn) = label;
  871. return;
  872. }
  873. /* Do walk the labels in a vector,
  874. but don't set its JUMP_LABEL. */
  875. if (code == ADDR_VEC || code == ADDR_DIFF_VEC)
  876. insn = 0;
  877. fmt = GET_RTX_FORMAT (code);
  878. for (i = GET_RTX_LENGTH (code); i >= 0; i--)
  879. {
  880. if (fmt[i] == 'e')
  881. mark_jump_label (XEXP (x, i), insn, cross_jump);
  882. else if (fmt[i] == 'E')
  883. {
  884. register int j;
  885. for (j = 0; j < XVECLEN (x, i); j++)
  886. mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
  887. }
  888. }
  889. }
  890. /* If all INSN does is set the pc, delete it,
  891. and delete the insn that set the condition codes for it
  892. if that's what the previous thing was. */
  893. static void
  894. delete_jump (insn)
  895. rtx insn;
  896. {
  897. register rtx x = PATTERN (insn);
  898. register rtx prev;
  899. if (GET_CODE (x) == SET
  900. && GET_CODE (SET_DEST (x)) == PC)
  901. {
  902. prev = PREV_INSN (insn);
  903. delete_insn (insn);
  904. /* We assume that at this stage
  905. CC's are always set explicitly
  906. and always immediately before the jump that
  907. will use them. So if the previous insn
  908. exists to set the CC's, delete it. */
  909. while (prev && GET_CODE (prev) == NOTE)
  910. prev = PREV_INSN (prev);
  911. if (prev && GET_CODE (prev) == INSN
  912. && GET_CODE (PATTERN (prev)) == SET
  913. && SET_DEST (PATTERN (prev)) == cc0_rtx)
  914. delete_insn (prev);
  915. }
  916. }
  917. /* Delete insn INSN from the chain of insns and update label ref counts.
  918. May delete some following insns as a consequence; may even delete
  919. a label elsewhere and insns that follow it.
  920. Returns the first insn after INSN that was not deleted. */
  921. rtx
  922. delete_insn (insn)
  923. register rtx insn;
  924. {
  925. register rtx next = NEXT_INSN (insn);
  926. register rtx prev = PREV_INSN (insn);
  927. if (insn->volatil)
  928. {
  929. /* This insn is already deleted => return first following nondeleted. */
  930. while (next && next->volatil)
  931. next = NEXT_INSN (next);
  932. return next;
  933. }
  934. /* Mark this insn as deleted. */
  935. insn->volatil = 1;
  936. /* If instruction is followed by a barrier,
  937. delete the barrier too. */
  938. if (next != 0 && GET_CODE (next) == BARRIER)
  939. {
  940. next->volatil = 1;
  941. next = NEXT_INSN (next);
  942. }
  943. /* Patch out INSN (and the barrier if any) */
  944. if (optimize)
  945. {
  946. if (prev)
  947. NEXT_INSN (prev) = next;
  948. if (next)
  949. PREV_INSN (next)= prev;
  950. }
  951. /* If deleting a jump, decrement the count of the label,
  952. and delete the label if it is now unused. */
  953. if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
  954. if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
  955. {
  956. /* This can delete NEXT or PREV,
  957. either directly if NEXT is JUMP_LABEL (INSN),
  958. or indirectly through more levels of jumps. */
  959. delete_insn (JUMP_LABEL (insn));
  960. /* I feel a little doubtful about this loop,
  961. but I see no clean and sure alternative way
  962. to find the first insn after INSN that is not now deleted.
  963. I hope this works. */
  964. while (next && next->volatil)
  965. next = NEXT_INSN (next);
  966. return next;
  967. }
  968. while (prev && GET_CODE (prev) == NOTE)
  969. prev = PREV_INSN (prev);
  970. /* If INSN was a label, delete insns following it if now unreachable. */
  971. if (GET_CODE (insn) == CODE_LABEL && prev
  972. && GET_CODE (prev) == BARRIER)
  973. {
  974. register RTX_CODE code;
  975. while (next != 0
  976. && ((code = GET_CODE (next)) == INSN
  977. || code == JUMP_INSN || code == CALL_INSN
  978. || code == NOTE))
  979. {
  980. if (code == NOTE
  981. && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
  982. next = NEXT_INSN (next);
  983. else
  984. /* Note: if this deletes a jump, it can cause more
  985. deletion of unreachable code, after a different label.
  986. As long as the value from this recursive call is correct,
  987. this invocation functions correctly. */
  988. next = delete_insn (next);
  989. }
  990. }
  991. return next;
  992. }
  993. /* Advance from INSN till reaching something not deleted
  994. then return that. May return INSN itself. */
  995. rtx
  996. next_nondeleted_insn (insn)
  997. rtx insn;
  998. {
  999. while (insn->volatil)
  1000. insn = NEXT_INSN (insn);
  1001. return insn;
  1002. }
  1003. /* Invert the condition of the jump JUMP, and make it jump
  1004. to label NLABEL instead of where it jumps now. */
  1005. void
  1006. invert_jump (jump, nlabel)
  1007. rtx jump, nlabel;
  1008. {
  1009. register rtx olabel = JUMP_LABEL (jump);
  1010. invert_exp (PATTERN (jump), olabel, nlabel);
  1011. JUMP_LABEL (jump) = nlabel;
  1012. ++LABEL_NUSES (nlabel);
  1013. INSN_CODE (jump) = -1;
  1014. if (--LABEL_NUSES (olabel) == 0)
  1015. delete_insn (olabel);
  1016. }
  1017. /* Invert the jump condition of rtx X,
  1018. and replace OLABEL with NLABEL throughout. */
  1019. static void
  1020. invert_exp (x, olabel, nlabel)
  1021. rtx x;
  1022. rtx olabel, nlabel;
  1023. {
  1024. register RTX_CODE code = GET_CODE (x);
  1025. register int i;
  1026. register char *fmt;
  1027. if (code == IF_THEN_ELSE)
  1028. {
  1029. /* Inverting the jump condition of an IF_THEN_ELSE
  1030. means exchanging the THEN-part with the ELSE-part. */
  1031. register rtx tem = XEXP (x, 1);
  1032. XEXP (x, 1) = XEXP (x, 2);
  1033. XEXP (x, 2) = tem;
  1034. }
  1035. if (code == LABEL_REF)
  1036. {
  1037. if (XEXP (x, 0) == olabel)
  1038. XEXP (x, 0) = nlabel;
  1039. return;
  1040. }
  1041. fmt = GET_RTX_FORMAT (code);
  1042. for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
  1043. {
  1044. if (fmt[i] == 'e')
  1045. invert_exp (XEXP (x, i), olabel, nlabel);
  1046. if (fmt[i] == 'E')
  1047. {
  1048. register int j;
  1049. for (j = 0; j < XVECLEN (x, i); j++)
  1050. invert_exp (XVECEXP (x, i, j), olabel, nlabel);
  1051. }
  1052. }
  1053. }
  1054. /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
  1055. If the old jump target label is unused as a result,
  1056. it and the code following it may be deleted. */
  1057. void
  1058. redirect_jump (jump, nlabel)
  1059. rtx jump, nlabel;
  1060. {
  1061. register rtx olabel = JUMP_LABEL (jump);
  1062. if (nlabel == olabel)
  1063. return;
  1064. redirect_exp (PATTERN (jump), olabel, nlabel);
  1065. JUMP_LABEL (jump) = nlabel;
  1066. ++LABEL_NUSES (nlabel);
  1067. INSN_CODE (jump) = -1;
  1068. if (--LABEL_NUSES (olabel) == 0)
  1069. delete_insn (olabel);
  1070. }
  1071. /* Throughout the rtx X,
  1072. alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). */
  1073. static void
  1074. redirect_exp (x, olabel, nlabel)
  1075. rtx x;
  1076. rtx olabel, nlabel;
  1077. {
  1078. register RTX_CODE code = GET_CODE (x);
  1079. register int i;
  1080. register char *fmt;
  1081. if (code == LABEL_REF)
  1082. {
  1083. if (XEXP (x, 0) == olabel)
  1084. XEXP (x, 0) = nlabel;
  1085. return;
  1086. }
  1087. fmt = GET_RTX_FORMAT (code);
  1088. for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
  1089. {
  1090. if (fmt[i] == 'e')
  1091. redirect_exp (XEXP (x, i), olabel, nlabel);
  1092. if (fmt[i] == 'E')
  1093. {
  1094. register int j;
  1095. for (j = 0; j < XVECLEN (x, i); j++)
  1096. redirect_exp (XVECEXP (x, i, j), olabel, nlabel);
  1097. }
  1098. }
  1099. }
  1100. /* Like rtx_equal_p except that it considers two REGs as equal
  1101. if they renumber to the same value. */
  1102. int
  1103. rtx_renumbered_equal_p (x, y)
  1104. rtx x, y;
  1105. {
  1106. register int i;
  1107. register RTX_CODE code = GET_CODE (x);
  1108. register char *fmt;
  1109. if (x == y)
  1110. return 1;
  1111. if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
  1112. && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
  1113. && GET_CODE (SUBREG_REG (y)) == REG)))
  1114. {
  1115. register int j;
  1116. if (code == SUBREG)
  1117. {
  1118. i = REGNO (SUBREG_REG (x));
  1119. if (reg_renumber[i] >= 0)
  1120. i = reg_renumber[i];
  1121. i += SUBREG_WORD (x);
  1122. }
  1123. else
  1124. {
  1125. i = REGNO (x);
  1126. if (reg_renumber[i] >= 0)
  1127. i = reg_renumber[i];
  1128. }
  1129. if (GET_CODE (y) == SUBREG)
  1130. {
  1131. j = REGNO (SUBREG_REG (y));
  1132. if (reg_renumber[j] >= 0)
  1133. j = reg_renumber[j];
  1134. j += SUBREG_WORD (y);
  1135. }
  1136. else
  1137. {
  1138. j = REGNO (y);
  1139. if (reg_renumber[j] >= 0)
  1140. j = reg_renumber[j];
  1141. }
  1142. return i == j;
  1143. }
  1144. /* Now we have disposed of all the cases
  1145. in which different rtx codes can match. */
  1146. if (code != GET_CODE (y))
  1147. return 0;
  1148. /* Two label-refs are equivalent if they point at labels
  1149. in the same position in the instruction stream. */
  1150. if (code == LABEL_REF)
  1151. return (next_real_insn (XEXP (x, 0))
  1152. == next_real_insn (XEXP (y, 0)));
  1153. if (code == SYMBOL_REF)
  1154. return XSTR (x, 0) == XSTR (y, 0);
  1155. /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
  1156. if (GET_MODE (x) != GET_MODE (y))
  1157. return 0;
  1158. /* Compare the elements. If any pair of corresponding elements
  1159. fail to match, return 0 for the whole things. */
  1160. fmt = GET_RTX_FORMAT (code);
  1161. for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
  1162. {
  1163. register int j;
  1164. switch (fmt[i])
  1165. {
  1166. case 'i':
  1167. if (XINT (x, i) != XINT (y, i))
  1168. return 0;
  1169. break;
  1170. case 's':
  1171. if (strcmp (XSTR (x, i), XSTR (y, i)))
  1172. return 0;
  1173. break;
  1174. case 'e':
  1175. if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
  1176. return 0;
  1177. break;
  1178. case '0':
  1179. break;
  1180. case 'E':
  1181. if (XVECLEN (x, i) != XVECLEN (y, i))
  1182. return 0;
  1183. for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  1184. if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
  1185. return 0;
  1186. break;
  1187. /* It is believed that rtx's at this level will never
  1188. contain anything but integers and other rtx's,
  1189. except for within LABEL_REFs and SYMBOL_REFs. */
  1190. default:
  1191. abort ();
  1192. }
  1193. }
  1194. return 1;
  1195. }
  1196. /* If X is a hard register or equivalent to one or a subregister of one,
  1197. return the hard register number. Otherwise, return -1.
  1198. Any rtx is valid for X. */
  1199. int
  1200. true_regnum (x)
  1201. rtx x;
  1202. {
  1203. if (GET_CODE (x) == REG)
  1204. {
  1205. if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
  1206. return reg_renumber[REGNO (x)];
  1207. return REGNO (x);
  1208. }
  1209. if (GET_CODE (x) == SUBREG)
  1210. {
  1211. int base = true_regnum (SUBREG_REG (x));
  1212. if (base >= 0)
  1213. return SUBREG_WORD (x) + base;
  1214. }
  1215. return -1;
  1216. }