combine.c 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560
  1. /* Optimize by combining instructions for GNU compiler.
  2. Copyright (C) 1987 Free Software Foundation, Inc.
  3. This file is part of GNU CC.
  4. GNU CC is distributed in the hope that it will be useful,
  5. but WITHOUT ANY WARRANTY. No author or distributor
  6. accepts responsibility to anyone for the consequences of using it
  7. or for whether it serves any particular purpose or works at all,
  8. unless he says so in writing. Refer to the GNU CC General Public
  9. License for full details.
  10. Everyone is granted permission to copy, modify and redistribute
  11. GNU CC, but only under the conditions described in the
  12. GNU CC General Public License. A copy of this license is
  13. supposed to have been given to you along with GNU CC so you
  14. can know your rights and responsibilities. It should be in a
  15. file named COPYING. Among other things, the copyright notice
  16. and this notice must be preserved on all copies. */
  17. /* This module is essentially the "combiner" phase of the U. of Arizona
  18. Portable Optimizer, but redone to work on our list-structured
  19. representation for RTL instead of their string representation.
  20. The LOG_LINKS of each insn identify the most recent assignment
  21. to each REG used in the insn. It is a list of previous insns,
  22. each of which contains a SET for a REG that is used in this insn
  23. and not used or set in between. LOG_LINKs never cross basic blocks.
  24. They were set up by the preceding pass (lifetime analysis).
  25. We try to combine each pair of insns joined by a logical link.
  26. We also try to combine triples of insns A, B and C when
  27. C has a link back to B and B has a link back to A.
  28. LOG_LINKS does not have links for use of the CC0. They don't
  29. need to, because the insn that sets the CC0 is always immediately
  30. before the insn that tests it. So we always regard a branch
  31. insn as having a logical link to the preceding insn.
  32. We check (with use_crosses_set_p) to avoid combining in such a way
  33. as to move a computation to a place where its value would be different.
  34. Combination is done by mathematically substituting the previous
  35. insn(s) values for the regs they set into the expressions in
  36. the later insns that refer to these regs. If the result is a valid insn
  37. for our target machine, according to the machine description,
  38. we install it, delete the earlier insns, and update the data flow
  39. information (LOG_LINKS and REG_NOTES) for what we did.
  40. To simplify substitution, we combine only when the earlier insn(s)
  41. consist of only a single assignment. To simplify updating afterward,
  42. we never combine when a subroutine call appears in the middle.
  43. Since we do not represent assignments to CC0 explicitly except when that
  44. is all an insn does, there is no LOG_LINKS entry in an insn that uses
  45. the condition code for the insn that set the condition code.
  46. Fortunately, these two insns must be consecutive.
  47. Therefore, every JUMP_INSN is taken to have an implicit logical link
  48. to the preceding insn. This is not quite right, since non-jumps can
  49. also use the condition code; but in practice such insns would not
  50. combine anyway. */
  51. #include "config.h"
  52. #include "rtl.h"
  53. #include "regs.h"
  54. #include "basic-block.h"
  55. #include "insn-config.h"
  56. #include "recog.h"
  57. #define max(A,B) ((A) > (B) ? (A) : (B))
  58. #define min(A,B) ((A) < (B) ? (A) : (B))
  59. /* Number of attempts to combine instructions in this function. */
  60. static int combine_attempts;
  61. /* Number of attempts that got as far as substitution in this function. */
  62. static int combine_merges;
  63. /* Number of instructions combined with added SETs in this function. */
  64. static int combine_extras;
  65. /* Number of instructions combined in this function. */
  66. static int combine_successes;
  67. /* Totals over entire compilation. */
  68. static int total_attempts, total_merges, total_extras, total_successes;
  69. /* Vector mapping INSN_UIDs to cuids.
  70. The cuids are like uids but increase monononically always.
  71. Combine always uses cuids so that it can compare them.
  72. But actually renumbering the uids, which we used to do,
  73. proves to be a bad idea because it makes it hard to compare
  74. the dumps produced by earlier passes with those from later passes. */
  75. static short *uid_cuid;
  76. /* Get the cuid of an insn. */
  77. #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
  78. /* Record last point of death of (hard or pseudo) register n. */
  79. static rtx *reg_last_death;
  80. /* Record last point of modification of (hard or pseudo) register n. */
  81. static rtx *reg_last_set;
  82. /* Record the cuid of the last insn that invalidated memory
  83. (anything that writes memory, and subroutine calls). */
  84. static int mem_last_set;
  85. /* Record the cuid of the last CALL_INSN
  86. so we can tell whether a potential combination crosses any calls. */
  87. static int last_call_cuid;
  88. /* When `subst' is called, this is the insn that is being modified
  89. (by combining in a previous insn). The PATTERN of this insn
  90. is still the old pattern partially modified and it should not be
  91. looked at, but this may be used to examine the successors of the insn
  92. to judge whether a simplification is valid. */
  93. static rtx subst_insn;
  94. /* Record one modification to rtl structure
  95. to be undone by storing old_contents into *where. */
  96. struct undo
  97. {
  98. rtx *where;
  99. rtx old_contents;
  100. };
  101. /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
  102. num_undo says how many are currently recorded.
  103. storage is nonzero if we must undo the allocation of new storage.
  104. The value of storage is what to pass to obfree. */
  105. #define MAX_UNDO 10
  106. struct undobuf
  107. {
  108. int num_undo;
  109. char *storage;
  110. struct undo undo[MAX_UNDO];
  111. };
  112. static struct undobuf undobuf;
  113. static void move_deaths ();
  114. static void remove_death ();
  115. static void record_dead_and_set_regs ();
  116. int regno_dead_p ();
  117. static int reg_set_in_range_p ();
  118. static int use_crosses_set_p ();
  119. static rtx subst ();
  120. static void undo_all ();
  121. static void add_links ();
  122. static void add_incs ();
  123. static int adjacent_insns_p ();
  124. static rtx simplify_and_const_int ();
  125. static rtx gen_lowpart_for_combine ();
  126. static void simplify_set_cc0_and ();
  127. /* Main entry point for combiner. F is the first insn of the function.
  128. NREGS is the first unused pseudo-reg number. */
  129. void
  130. combine_instructions (f, nregs)
  131. rtx f;
  132. int nregs;
  133. {
  134. register rtx insn;
  135. register int i;
  136. register rtx links, nextlinks;
  137. rtx prev;
  138. combine_attempts = 0;
  139. combine_merges = 0;
  140. combine_extras = 0;
  141. combine_successes = 0;
  142. reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
  143. reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
  144. bzero (reg_last_death, nregs * sizeof (rtx));
  145. bzero (reg_last_set, nregs * sizeof (rtx));
  146. init_recog ();
  147. /* Compute maximum uid value so uid_cuid can be allocated. */
  148. for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
  149. if (INSN_UID (insn) > i)
  150. i = INSN_UID (insn);
  151. uid_cuid = (short *) alloca ((i + 1) * sizeof (short));
  152. /* Compute the mapping from uids to cuids.
  153. Cuids are numbers assigned to insns, like uids,
  154. except that cuids increase monotonically through the code. */
  155. for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
  156. INSN_CUID (insn) = ++i;
  157. /* Now scan all the insns in forward order. */
  158. last_call_cuid = 0;
  159. mem_last_set = 0;
  160. prev = 0;
  161. for (insn = f; insn; insn = NEXT_INSN (insn))
  162. {
  163. if (GET_CODE (insn) == INSN
  164. || GET_CODE (insn) == CALL_INSN
  165. || GET_CODE (insn) == JUMP_INSN)
  166. {
  167. retry:
  168. /* Try this insn with each insn it links back to. */
  169. for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
  170. if (try_combine (insn, XEXP (links, 0), 0))
  171. goto retry;
  172. /* Try each sequence of three linked insns ending with this one. */
  173. for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
  174. if (GET_CODE (XEXP (links, 0)) != NOTE)
  175. for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
  176. nextlinks = XEXP (nextlinks, 1))
  177. if (try_combine (insn, XEXP (links, 0), XEXP (nextlinks, 0)))
  178. goto retry;
  179. /* Try to combine a jump insn that uses CC0
  180. with a preceding insn that sets CC0, and maybe with its
  181. logical predecessor as well.
  182. This is how we make decrement-and-branch insns.
  183. We need this special code because data flow connections
  184. via CC0 do not get entered in LOG_LINKS. */
  185. if (GET_CODE (insn) == JUMP_INSN
  186. && prev != 0
  187. && GET_CODE (prev) == INSN
  188. && GET_CODE (PATTERN (prev)) == SET
  189. && GET_CODE (SET_DEST (PATTERN (prev))) == CC0)
  190. {
  191. if (try_combine (insn, prev, 0))
  192. goto retry;
  193. if (GET_CODE (prev) != NOTE)
  194. for (nextlinks = LOG_LINKS (prev); nextlinks;
  195. nextlinks = XEXP (nextlinks, 1))
  196. if (try_combine (insn, prev, XEXP (nextlinks, 0)))
  197. goto retry;
  198. }
  199. #if 0
  200. /* Turned off because on 68020 it takes four insns to make
  201. something like (a[b / 32] & (1 << (31 - (b % 32)))) != 0
  202. that could actually be optimized, and that's an unlikely piece of code. */
  203. /* If an insn gets or sets a bit field, try combining it
  204. with two different insns whose results it uses. */
  205. if (GET_CODE (insn) == INSN
  206. && GET_CODE (PATTERN (insn)) == SET
  207. && (GET_CODE (SET_DEST (PATTERN (insn))) == ZERO_EXTRACT
  208. || GET_CODE (SET_DEST (PATTERN (insn))) == SIGN_EXTRACT
  209. || GET_CODE (SET_SRC (PATTERN (insn))) == ZERO_EXTRACT
  210. || GET_CODE (SET_SRC (PATTERN (insn))) == SIGN_EXTRACT))
  211. {
  212. for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
  213. if (GET_CODE (XEXP (links, 0)) != NOTE)
  214. for (nextlinks = XEXP (links, 1); nextlinks;
  215. nextlinks = XEXP (nextlinks, 1))
  216. if (try_combine (insn, XEXP (links, 0), XEXP (nextlinks, 0)))
  217. goto retry;
  218. }
  219. #endif
  220. record_dead_and_set_regs (insn);
  221. prev = insn;
  222. }
  223. else if (GET_CODE (insn) != NOTE)
  224. prev = 0;
  225. }
  226. total_attempts += combine_attempts;
  227. total_merges += combine_merges;
  228. total_extras += combine_extras;
  229. total_successes += combine_successes;
  230. }
  231. /* Try to combine the insns I1 and I2 into I3.
  232. Here I1 appears earlier than I2, which is earlier than I3.
  233. I1 can be zero; then we combine just I2 into I3.
  234. Return 1 if successful; if that happens, I1 and I2 are pseudo-deleted
  235. by turning them into NOTEs, and I3 is modified.
  236. Return 0 if the combination does not work. Then nothing is changed. */
  237. static int
  238. try_combine (i3, i2, i1)
  239. register rtx i3, i2, i1;
  240. {
  241. register rtx newpat;
  242. int added_sets_1 = 0;
  243. int added_sets_2 = 0;
  244. int total_sets;
  245. int i2_is_used;
  246. register rtx link;
  247. int insn_code_number;
  248. int recog_flags = 0;
  249. rtx i2dest, i2src;
  250. rtx i1dest, i1src;
  251. combine_attempts++;
  252. /* Don't combine with something already used up by combination. */
  253. if (GET_CODE (i2) == NOTE
  254. || (i1 && GET_CODE (i1) == NOTE))
  255. return 0;
  256. /* Don't combine across a CALL_INSN, because that would possibly
  257. change whether the life span of some REGs crosses calls or not,
  258. and it is a pain to update that information. */
  259. if (INSN_CUID (i2) < last_call_cuid
  260. || (i1 && INSN_CUID (i1) < last_call_cuid))
  261. return 0;
  262. /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
  263. That REG must be either set or dead by the final instruction
  264. (so that we can safely forget about setting it).
  265. Also test use_crosses_set_p to make sure that the value
  266. that is to be substituted for the register
  267. does not use any registers whose values alter in between.
  268. Do not try combining with moves from one register to another
  269. since it is better to let them be tied by register allocation.
  270. A set of a SUBREG is considered as if it were a set from
  271. SUBREG. Thus, (SET (SUBREG:X (REG:Y...)) (something:X...))
  272. is handled by substituting (SUBREG:Y (something:X...)) for (REG:Y...). */
  273. if (GET_CODE (PATTERN (i2)) != SET)
  274. return 0;
  275. i2dest = SET_DEST (PATTERN (i2));
  276. i2src = SET_SRC (PATTERN (i2));
  277. if (GET_CODE (i2dest) == SUBREG)
  278. {
  279. i2dest = SUBREG_REG (i2dest);
  280. i2src = gen_rtx (SUBREG, GET_MODE (i2dest), i2src, 0);
  281. }
  282. if (GET_CODE (i2dest) != CC0
  283. && (GET_CODE (i2dest) != REG
  284. || GET_CODE (i2src) == REG
  285. || use_crosses_set_p (i2src, INSN_CUID (i2))))
  286. return 0;
  287. if (i1 != 0)
  288. {
  289. if (GET_CODE (PATTERN (i1)) != SET)
  290. return 0;
  291. i1dest = SET_DEST (PATTERN (i1));
  292. i1src = SET_SRC (PATTERN (i1));
  293. if (GET_CODE (i1dest) == SUBREG)
  294. {
  295. i1dest = SUBREG_REG (i1dest);
  296. i1src = gen_rtx (SUBREG, GET_MODE (i1dest), i1src, 0);
  297. }
  298. if (GET_CODE (i1dest) != CC0
  299. && (GET_CODE (i1dest) != REG
  300. || GET_CODE (i1src) == REG
  301. || use_crosses_set_p (i1src, INSN_CUID (i1))))
  302. return 0;
  303. }
  304. /* If I1 or I2 contains an autoincrement or autodecrement,
  305. make sure that register is not used between there and I3.
  306. Also insist that I3 not be a jump; if it were one
  307. and the incremented register were spilled, we would lose. */
  308. for (link = REG_NOTES (i2); link; link = XEXP (link, 1))
  309. if ((enum reg_note) GET_MODE (link) == REG_INC)
  310. if (GET_CODE (i3) == JUMP_INSN
  311. || reg_used_between_p (XEXP (link, 0), i2, i3))
  312. return 0;
  313. if (i1)
  314. for (link = REG_NOTES (i1); link; link = XEXP (link, 1))
  315. if ((enum reg_note) GET_MODE (link) == REG_INC)
  316. if (GET_CODE (i3) == JUMP_INSN
  317. || reg_used_between_p (XEXP (link, 0), i1, i3))
  318. return 0;
  319. /* See if the SETs in i1 or i2 need to be kept around in the merged
  320. instruction: whenever the value set there is still needed past i3. */
  321. added_sets_2 = (GET_CODE (i2dest) != CC0
  322. && ! dead_or_set_p (i3, i2dest));
  323. if (i1)
  324. added_sets_1 = ! (dead_or_set_p (i3, i1dest)
  325. || dead_or_set_p (i2, i1dest));
  326. combine_merges++;
  327. undobuf.num_undo = 0;
  328. undobuf.storage = 0;
  329. /* Substitute in the latest insn for the regs set by the earlier ones. */
  330. subst_insn = i3;
  331. newpat = subst (PATTERN (i3), i2dest, i2src);
  332. /* Record whether i2's body now appears within i3's body. */
  333. i2_is_used = undobuf.num_undo;
  334. if (i1)
  335. newpat = subst (newpat, i1dest, i1src);
  336. if (GET_CODE (PATTERN (i3)) == SET
  337. && SET_DEST (PATTERN (i3)) == cc0_rtx
  338. && GET_CODE (SET_SRC (PATTERN (i3))) == AND
  339. && next_insn_tests_no_inequality (i3))
  340. simplify_set_cc0_and (i3);
  341. /* If the actions of the earler insns must be kept
  342. in addition to substituting them into the latest one,
  343. we must make a new PARALLEL for the latest insn
  344. to hold additional the SETs. */
  345. if (added_sets_1 || added_sets_2)
  346. {
  347. combine_extras++;
  348. /* Arrange to free later what we allocate now
  349. if we don't accept this combination. */
  350. if (!undobuf.storage)
  351. undobuf.storage = (char *) oballoc (0);
  352. if (GET_CODE (newpat) == PARALLEL)
  353. {
  354. total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
  355. newpat = gen_rtx (PARALLEL, VOIDmode,
  356. gen_rtvec_v (total_sets,
  357. &XVECEXP (newpat, 0, 0)));
  358. }
  359. else
  360. {
  361. total_sets = 1 + added_sets_1 + added_sets_2;
  362. newpat = gen_rtx (PARALLEL, VOIDmode,
  363. gen_rtvec (total_sets, newpat));
  364. }
  365. if (added_sets_1)
  366. {
  367. XVECEXP (newpat, 0, --total_sets) = PATTERN (i1);
  368. }
  369. if (added_sets_2)
  370. {
  371. /* If there is no I1, use I2's body as is. */
  372. if (i1 == 0
  373. /* If I2 was stuck into I3, then anything within it has
  374. already had I1 substituted into it when that was done to I3. */
  375. || i2_is_used)
  376. {
  377. XVECEXP (newpat, 0, --total_sets) = PATTERN (i2);
  378. }
  379. else
  380. XVECEXP (newpat, 0, --total_sets)
  381. = subst (PATTERN (i2), i1dest, i1src);
  382. }
  383. }
  384. /* Is the result of combination a valid instruction? */
  385. insn_code_number = recog (newpat, i3);
  386. if (insn_code_number >= 0)
  387. {
  388. /* Yes. Install it. */
  389. register int regno;
  390. INSN_CODE (i3) = insn_code_number;
  391. PATTERN (i3) = newpat;
  392. /* Most REGs that previously died in I2 now die in I3. */
  393. move_deaths (i2src, INSN_CUID (i2), i3);
  394. if (GET_CODE (i2dest) == REG)
  395. {
  396. /* If the reg formerly set in I2 died only once and that was in I3,
  397. zero its use count so it won't make `reload' do any work. */
  398. regno = REGNO (i2dest);
  399. if (! added_sets_2)
  400. reg_n_sets[regno]--;
  401. if (reg_n_sets[regno] == 0 && regno_dead_p (regno, i3))
  402. reg_n_refs[regno] = 0;
  403. /* If a ref to REGNO was substituted into I3 from I2,
  404. then it still dies there if it previously did.
  405. Otherwise either REGNO never did die in I3 so remove_death is safe
  406. or this entire life of REGNO is gone so remove its death. */
  407. if (!added_sets_2
  408. && ! reg_mentioned_p (i2dest, PATTERN (i3)))
  409. remove_death (regno, i3);
  410. }
  411. /* The data flowing into I2 now flows into I3.
  412. But we cannot always move I2's LOG_LINKS into I3,
  413. since they must go to a setting of a REG from the
  414. first use following. If I2 was the first use following a set,
  415. I3 is now a use, but it is not the first use
  416. if some instruction between I2 and I3 is also a use.
  417. Here, for simplicity, we move the links only if
  418. there are no real insns between I2 and I3. */
  419. if (adjacent_insns_p (i2, i3))
  420. add_links (i3, LOG_LINKS (i2));
  421. /* Any registers previously autoincremented in I2
  422. are now incremented in I3. */
  423. add_incs (i3, REG_NOTES (i2));
  424. /* Get rid of I2. */
  425. LOG_LINKS (i2) = 0;
  426. PUT_CODE (i2, NOTE);
  427. NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
  428. NOTE_SOURCE_FILE (i2) = 0;
  429. if (i1)
  430. {
  431. /* Likewise, merge the info from I1 and get rid of it. */
  432. move_deaths (i1src, INSN_CUID (i1), i3);
  433. if (GET_CODE (i1dest) == REG)
  434. {
  435. regno = REGNO (i1dest);
  436. if (! added_sets_1)
  437. reg_n_sets[regno]--;
  438. if (reg_n_sets[regno] == 0 && regno_dead_p (regno, i3))
  439. reg_n_refs[regno] = 0;
  440. /* If a ref to REGNO was substituted into I3 from I1,
  441. then it still dies there if it previously did.
  442. Else either REGNO never did die in I3 so remove_death is safe
  443. or this entire life of REGNO is gone so remove its death. */
  444. if (! added_sets_1
  445. && ! reg_mentioned_p (i1dest, PATTERN (i3)))
  446. remove_death (regno, i3);
  447. }
  448. if (adjacent_insns_p (i2, i3))
  449. add_links (i3, LOG_LINKS (i1));
  450. add_incs (i3, REG_NOTES (i1));
  451. LOG_LINKS (i1) = 0;
  452. PUT_CODE (i1, NOTE);
  453. NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
  454. NOTE_SOURCE_FILE (i1) = 0;
  455. }
  456. combine_successes++;
  457. return 1;
  458. }
  459. /* Failure: change I3 back the way it was. */
  460. undo_all ();
  461. return 0;
  462. }
  463. /* Undo all the modifications recorded in undobuf. */
  464. static void
  465. undo_all ()
  466. {
  467. register int i;
  468. if (undobuf.num_undo > MAX_UNDO)
  469. undobuf.num_undo = MAX_UNDO;
  470. for (i = undobuf.num_undo - 1; i >= 0; i--)
  471. *undobuf.undo[i].where = undobuf.undo[i].old_contents;
  472. if (undobuf.storage)
  473. obfree (undobuf.storage);
  474. undobuf.num_undo = 0;
  475. undobuf.storage = 0;
  476. }
  477. /* Throughout X, replace FROM with TO, and return the result.
  478. The result is TO if X is FROM;
  479. otherwise the result is X, but its contents may have been modified.
  480. If they were modified, a record was made in undobuf so that
  481. undo_all will (among other things) return X to its original state.
  482. If the number of changes necessary is too much to record to undo,
  483. the excess changes are not made, so the result is invalid.
  484. The changes already made can still be undone.
  485. undobuf.num_undo is incremented for such changes, so by testing that
  486. the caller can tell whether the result is valid. */
  487. static rtx
  488. subst (x, from, to)
  489. register rtx x, from, to;
  490. {
  491. register char *fmt;
  492. register int len, i;
  493. register enum rtx_code code;
  494. /* THIS_TO is used to replace FROM if it appears exactly one
  495. level down in X. Simplifications often work by changing
  496. THIS_TO after observing that FROM appears in a specific way
  497. one level down in X. Since only THIS_TO is changed, and TO
  498. is left alone, further occurrences of FROM within the operands
  499. of X are replaced normally. */
  500. rtx this_to;
  501. if (x == from)
  502. return to;
  503. code = GET_CODE (x);
  504. this_to = to;
  505. /* A little bit of algebraic simplification here. */
  506. switch (code)
  507. {
  508. /* This case has no effect except to speed things up. */
  509. case REG:
  510. case CONST_INT:
  511. case CONST:
  512. case SYMBOL_REF:
  513. case LABEL_REF:
  514. case PC:
  515. case CC0:
  516. return x;
  517. case NOT:
  518. case NEG:
  519. /* Don't let substitution introduce double-negatives. */
  520. if (XEXP (x, 0) == from
  521. && GET_CODE (to) == code)
  522. return XEXP (to, 0);
  523. break;
  524. case PLUS:
  525. /* In (plus <foo> (ashift <bar> <n>))
  526. change the shift to a multiply so we can recognize
  527. scaled indexed addresses. */
  528. if ((XEXP (x, 0) == from
  529. || XEXP (x, 1) == from)
  530. && GET_CODE (to) == ASHIFT
  531. && GET_CODE (XEXP (to, 1)) == CONST_INT)
  532. {
  533. if (!undobuf.storage)
  534. undobuf.storage = (char *) oballoc (0);
  535. this_to = gen_rtx (MULT, GET_MODE (to),
  536. XEXP (to, 0),
  537. gen_rtx (CONST_INT, VOIDmode,
  538. 1 << INTVAL (XEXP (to, 1))));
  539. }
  540. /* If we have something (putative index) being added to a sum,
  541. associate it so that any constant term is outermost.
  542. That's because that's the way indexed addresses are
  543. now supposed to appear. */
  544. if (((XEXP (x, 0) == from && GET_CODE (XEXP (x, 1)) == PLUS)
  545. || (XEXP (x, 1) == from && GET_CODE (XEXP (x, 0)) == PLUS))
  546. ||
  547. ((XEXP (x, 0) == from || XEXP (x, 1) == from)
  548. && GET_CODE (this_to) == PLUS))
  549. {
  550. rtx offset = 0, base, index;
  551. if (GET_CODE (this_to) != PLUS)
  552. {
  553. index = this_to;
  554. base = XEXP (x, 0) == from ? XEXP (x, 1) : XEXP (x, 0);
  555. }
  556. else
  557. {
  558. index = XEXP (x, 0) == from ? XEXP (x, 1) : XEXP (x, 0);
  559. base = this_to;
  560. }
  561. if (CONSTANT_ADDRESS_P (XEXP (base, 0)))
  562. {
  563. offset = XEXP (base, 0);
  564. base = XEXP (base, 1);
  565. }
  566. else if (CONSTANT_ADDRESS_P (XEXP (base, 1)))
  567. {
  568. offset = XEXP (base, 1);
  569. base = XEXP (base, 0);
  570. }
  571. if (offset != 0)
  572. {
  573. if (!undobuf.storage)
  574. undobuf.storage = (char *) oballoc (0);
  575. return gen_rtx (PLUS, GET_MODE (index), offset,
  576. gen_rtx (PLUS, GET_MODE (index),
  577. index, base));
  578. }
  579. }
  580. break;
  581. case MINUS:
  582. /* Can simplify (minus:VOIDmode (zero/sign_extend FOO) CONST)
  583. (which is a compare instruction, not a subtract instruction)
  584. to (minus FOO CONST) if CONST fits in FOO's mode
  585. and we are only testing equality.
  586. In fact, this is valid for zero_extend if what follows is an
  587. unsigned comparison, and for sign_extend with a signed comparison. */
  588. if (GET_MODE (x) == VOIDmode
  589. && XEXP (x, 0) == from
  590. && (GET_CODE (to) == ZERO_EXTEND || GET_CODE (to) == SIGN_EXTEND)
  591. && next_insn_tests_no_inequality (subst_insn)
  592. && GET_CODE (XEXP (x, 1)) == CONST_INT
  593. && ((unsigned) INTVAL (XEXP (x, 1))
  594. < (1 << (BITS_PER_UNIT * GET_MODE_SIZE (GET_MODE (XEXP (to, 0)))))))
  595. this_to = XEXP (to, 0);
  596. break;
  597. case EQ:
  598. case NE:
  599. /* If comparing a subreg against zero, discard the subreg. */
  600. if (XEXP (x, 0) == from
  601. && GET_CODE (to) == SUBREG
  602. && SUBREG_WORD (to) == 0
  603. && XEXP (x, 1) == const0_rtx)
  604. this_to = SUBREG_REG (to);
  605. /* If comparing a ZERO_EXTRACT against zero,
  606. canonicalize to a SIGN_EXTRACT,
  607. since the two are equivalent here. */
  608. if (XEXP (x, 0) == from
  609. && GET_CODE (this_to) == ZERO_EXTRACT
  610. && XEXP (x, 1) == const0_rtx)
  611. {
  612. if (!undobuf.storage)
  613. undobuf.storage = (char *) oballoc (0);
  614. this_to = gen_rtx (SIGN_EXTRACT, GET_MODE (this_to),
  615. XEXP (this_to, 0), XEXP (this_to, 1),
  616. XEXP (this_to, 2));
  617. }
  618. /* If we are putting (ASHIFT 1 x) into (EQ (AND ... y) 0),
  619. arrange to return (EQ (SIGN_EXTRACT y 1 x) 0),
  620. which is what jump-on-bit instructions are written with. */
  621. else if (XEXP (x, 1) == const0_rtx
  622. && GET_CODE (XEXP (x, 0)) == AND
  623. && (XEXP (XEXP (x, 0), 0) == from
  624. || XEXP (XEXP (x, 0), 1) == from)
  625. && GET_CODE (this_to) == ASHIFT
  626. && XEXP (this_to, 0) == const1_rtx)
  627. {
  628. register rtx y = XEXP (XEXP (x, 0),
  629. XEXP (XEXP (x, 0), 0) == from);
  630. if (!undobuf.storage)
  631. undobuf.storage = (char *) oballoc (0);
  632. this_to = gen_rtx (SIGN_EXTRACT, GET_MODE (this_to),
  633. y,
  634. const1_rtx, XEXP (this_to, 1));
  635. from = XEXP (x, 0);
  636. }
  637. break;
  638. case ZERO_EXTEND:
  639. if (XEXP (x, 0) == from
  640. && GET_CODE (to) == ZERO_EXTEND)
  641. this_to = XEXP (to, 0);
  642. /* Zero-extending the result of an and with a constant can be done
  643. with a wider and. */
  644. if (XEXP (x, 0) == from
  645. && GET_CODE (to) == AND
  646. && GET_CODE (XEXP (to, 1)) == CONST_INT
  647. && (GET_CODE (XEXP (to, 0)) == REG
  648. || offsetable_address_p (XEXP (to, 0)))
  649. /* Avoid getting wrong result if the constant has high bits set
  650. that are irrelevant in the narrow mode where it is being used. */
  651. && ((INTVAL (XEXP (to, 1))
  652. & (-1 << (GET_MODE_SIZE (GET_MODE (to)) * BITS_PER_UNIT)))
  653. == 0))
  654. {
  655. if (!undobuf.storage)
  656. undobuf.storage = (char *) oballoc (0);
  657. return gen_rtx (AND, GET_MODE (x),
  658. gen_lowpart (GET_MODE (x), XEXP (to, 0)),
  659. XEXP (to, 1));
  660. }
  661. break;
  662. case SIGN_EXTEND:
  663. if (XEXP (x, 0) == from
  664. && GET_CODE (to) == SIGN_EXTEND)
  665. this_to = XEXP (to, 0);
  666. /* Sign-extending the result of an and with a constant can be done
  667. with a wider and, provided the high bit of the constant is 0. */
  668. if (XEXP (x, 0) == from
  669. && GET_CODE (to) == AND
  670. && GET_CODE (XEXP (to, 1)) == CONST_INT
  671. && (GET_CODE (XEXP (to, 0)) == REG
  672. || offsetable_address_p (XEXP (to, 0)))
  673. && ((INTVAL (XEXP (to, 1))
  674. & (-1 << (GET_MODE_SIZE (GET_MODE (to)) * BITS_PER_UNIT - 1)))
  675. == 0))
  676. {
  677. if (!undobuf.storage)
  678. undobuf.storage = (char *) oballoc (0);
  679. return gen_rtx (AND, GET_MODE (x),
  680. gen_lowpart (GET_MODE (x), XEXP (to, 0)),
  681. XEXP (to, 1));
  682. }
  683. break;
  684. case SET:
  685. /* In (set (zero-extract <x> <1> <y>) (and <foo> <1>))
  686. the `and' can be deleted. This can happen when storing a bit
  687. that came from a set-flag insn followed by masking to one bit.
  688. There is probably no need to optimize other field widths similarly
  689. because on machines with bit-field insns `and' is not needed
  690. to extract the fields. */
  691. if (GET_CODE (XEXP (x, 0)) == ZERO_EXTRACT
  692. && XEXP (XEXP (x, 0), 1) == const1_rtx
  693. && XEXP (x, 1) == from
  694. && GET_CODE (to) == AND
  695. && XEXP (to, 1) == const1_rtx)
  696. {
  697. this_to = XEXP (to, 0);
  698. }
  699. break;
  700. case AND:
  701. if (GET_CODE (XEXP (x, 1)) == CONST_INT)
  702. {
  703. rtx tem = simplify_and_const_int (x, from, to);
  704. if (tem)
  705. return tem;
  706. }
  707. break;
  708. case FLOAT:
  709. /* (float (sign_extend <X>)) = (float <X>). */
  710. if (XEXP (x, 0) == from
  711. && GET_CODE (to) == SIGN_EXTEND)
  712. this_to = XEXP (to, 0);
  713. break;
  714. case ZERO_EXTRACT:
  715. /* Extracting a single bit from the result of a shift:
  716. see which bit it was before the shift and extract that directly. */
  717. if (XEXP (x, 0) == from
  718. && (GET_CODE (to) == ASHIFTRT || GET_CODE (to) == LSHIFTRT
  719. || GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT)
  720. && GET_CODE (XEXP (to, 1)) == CONST_INT
  721. && XEXP (x, 1) == const1_rtx
  722. && GET_CODE (XEXP (x, 2)) == CONST_INT)
  723. {
  724. int shift = INTVAL (XEXP (to, 1));
  725. int newpos;
  726. if (GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT)
  727. shift = - shift;
  728. #ifdef BITS_BIG_ENDIAN
  729. shift = - shift;
  730. #endif
  731. newpos = INTVAL (XEXP (x, 2)) + shift;
  732. if (newpos >= 0 &&
  733. newpos < BITS_PER_UNIT * GET_MODE_SIZE (GET_MODE (from)))
  734. {
  735. if (!undobuf.storage)
  736. undobuf.storage = (char *) oballoc (0);
  737. return gen_rtx (ZERO_EXTRACT, GET_MODE (x),
  738. XEXP (to, 0), const1_rtx,
  739. gen_rtx (CONST_INT, VOIDmode, newpos));
  740. }
  741. }
  742. break;
  743. case LSHIFTRT:
  744. case ASHIFTRT:
  745. case ROTATE:
  746. case ROTATERT:
  747. #ifdef SHIFT_COUNT_TRUNCATED
  748. /* (lshift <X> (sign_extend <Y>)) = (lshift <X> <Y>) (most machines).
  749. True for all kinds of shifts and also for zero_extend. */
  750. if (XEXP (x, 1) == from
  751. && (GET_CODE (to) == SIGN_EXTEND
  752. || GET_CODE (to) == ZERO_EXTEND))
  753. {
  754. if (!undobuf.storage)
  755. undobuf.storage = (char *) oballoc (0);
  756. this_to = gen_rtx (SUBREG, GET_MODE (to), XEXP (to, 0), 0);
  757. }
  758. #endif
  759. /* Two shifts in a row of same kind
  760. in same direction with constant counts
  761. may be combined. */
  762. if (XEXP (x, 0) == from
  763. && GET_CODE (to) == GET_CODE (x)
  764. && GET_CODE (XEXP (x, 1)) == CONST_INT
  765. && GET_CODE (XEXP (to, 1)) == CONST_INT
  766. && INTVAL (XEXP (to, 1)) > 0
  767. && INTVAL (XEXP (x, 1)) > 0
  768. && (INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1))
  769. < BITS_PER_UNIT * GET_MODE_SIZE (GET_MODE (x))))
  770. {
  771. if (!undobuf.storage)
  772. undobuf.storage = (char *) oballoc (0);
  773. return gen_rtx (GET_CODE (x), GET_MODE (x),
  774. XEXP (to, 0),
  775. gen_rtx (CONST_INT, VOIDmode,
  776. INTVAL (XEXP (x, 1))
  777. + INTVAL (XEXP (to, 1))));
  778. }
  779. break;
  780. case LSHIFT:
  781. case ASHIFT:
  782. #ifdef SHIFT_COUNT_TRUNCATED
  783. /* (lshift <X> (sign_extend <Y>)) = (lshift <X> <Y>) (most machines).
  784. True for all kinds of shifts and also for zero_extend. */
  785. if (XEXP (x, 1) == from
  786. && (GET_CODE (to) == SIGN_EXTEND
  787. || GET_CODE (to) == ZERO_EXTEND))
  788. {
  789. if (!undobuf.storage)
  790. undobuf.storage = (char *) oballoc (0);
  791. this_to = gen_rtx (SUBREG, GET_MODE (to), XEXP (to, 0), 0);
  792. }
  793. #endif
  794. /* (lshift (and (lshiftrt <foo> <X>) <Y>) <X>)
  795. happens copying between bit fields in similar structures.
  796. It can be replaced by one and instruction.
  797. It does not matter whether the shifts are logical or arithmetic. */
  798. if (GET_CODE (XEXP (x, 0)) == AND
  799. && GET_CODE (XEXP (x, 1)) == CONST_INT
  800. && INTVAL (XEXP (x, 1)) > 0
  801. && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
  802. && XEXP (XEXP (x, 0), 0) == from
  803. && (GET_CODE (to) == LSHIFTRT
  804. || GET_CODE (to) == ASHIFTRT)
  805. #if 0
  806. /* I now believe this restriction is unnecessary.
  807. The outer shift will discard those bits in any case, right? */
  808. /* If inner shift is arithmetic, either it shifts left or
  809. the bits it shifts the sign into are zeroed by the and. */
  810. && (INTVAL (XEXP (x, 1)) < 0
  811. || ((unsigned) INTVAL (XEXP (XEXP (x, 0), 1))
  812. < 1 << (GET_MODE_BITSIZE (GET_MODE (x))
  813. - INTVAL (XEXP (x, 0)))))
  814. #endif
  815. && GET_CODE (XEXP (to, 1)) == CONST_INT
  816. && INTVAL (XEXP (x, 1)) == INTVAL (XEXP (to, 1)))
  817. {
  818. if (!undobuf.storage)
  819. undobuf.storage = (char *) oballoc (0);
  820. /* The constant in the new `and' is <Y> << <X>
  821. but clear out all bits that don't belong in our mode. */
  822. return gen_rtx (AND, GET_MODE (x), XEXP (to, 0),
  823. gen_rtx (CONST_INT, VOIDmode,
  824. (GET_MODE_MASK (GET_MODE (x))
  825. & ((GET_MODE_MASK (GET_MODE (x))
  826. & INTVAL (XEXP (XEXP (x, 0), 1)))
  827. << INTVAL (XEXP (x, 1))))));
  828. }
  829. /* Two shifts in a row in same direction with constant counts
  830. may be combined. */
  831. if (XEXP (x, 0) == from
  832. && (GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT)
  833. && GET_CODE (XEXP (x, 1)) == CONST_INT
  834. && GET_CODE (XEXP (to, 1)) == CONST_INT
  835. && INTVAL (XEXP (to, 1)) > 0
  836. && INTVAL (XEXP (x, 1)) > 0
  837. && (INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1))
  838. < BITS_PER_UNIT * GET_MODE_SIZE (GET_MODE (x))))
  839. {
  840. if (!undobuf.storage)
  841. undobuf.storage = (char *) oballoc (0);
  842. return gen_rtx (GET_CODE (x), GET_MODE (x),
  843. XEXP (to, 0),
  844. gen_rtx (CONST_INT, VOIDmode,
  845. INTVAL (XEXP (x, 1))
  846. + INTVAL (XEXP (to, 1))));
  847. }
  848. /* (ashift (ashiftrt <foo> <X>) <X>)
  849. (or, on some machines, (ashift (ashift <foo> <-X>) <X>) instead)
  850. happens if you divide by 2**N and then multiply by 2**N.
  851. It can be replaced by one `and' instruction.
  852. It does not matter whether the shifts are logical or arithmetic. */
  853. if (GET_CODE (XEXP (x, 1)) == CONST_INT
  854. && INTVAL (XEXP (x, 1)) > 0
  855. && XEXP (x, 0) == from
  856. && (((GET_CODE (to) == LSHIFTRT || GET_CODE (to) == ASHIFTRT)
  857. && GET_CODE (XEXP (to, 1)) == CONST_INT
  858. && INTVAL (XEXP (x, 1)) == INTVAL (XEXP (to, 1)))
  859. ||
  860. ((GET_CODE (to) == LSHIFT || GET_CODE (to) == ASHIFT)
  861. && GET_CODE (XEXP (to, 1)) == CONST_INT
  862. && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (to, 1)))))
  863. {
  864. if (!undobuf.storage)
  865. undobuf.storage = (char *) oballoc (0);
  866. /* The constant in the new `and' is <Y> << <X>
  867. but clear out all bits that don't belong in our mode. */
  868. return gen_rtx (AND, GET_MODE (x), XEXP (to, 0),
  869. gen_rtx (CONST_INT, VOIDmode,
  870. (GET_MODE_MASK (GET_MODE (x))
  871. & (GET_MODE_MASK (GET_MODE (x))
  872. << INTVAL (XEXP (x, 1))))));
  873. }
  874. }
  875. len = GET_RTX_LENGTH (code);
  876. fmt = GET_RTX_FORMAT (code);
  877. /* Don't replace FROM where it is being stored in rather than used. */
  878. if (code == SET && SET_DEST (x) == from)
  879. fmt = "ie";
  880. for (i = 0; i < len; i++)
  881. {
  882. if (fmt[i] == 'E')
  883. {
  884. register int j;
  885. for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  886. {
  887. register rtx new;
  888. if (XVECEXP (x, i, j) == from)
  889. new = this_to;
  890. else
  891. new = subst (XVECEXP (x, i, j), from, to);
  892. if (new != XVECEXP (x, i, j))
  893. {
  894. if (undobuf.num_undo < MAX_UNDO)
  895. {
  896. undobuf.undo[undobuf.num_undo].where = &XVECEXP (x, i, j);
  897. undobuf.undo[undobuf.num_undo].old_contents = XVECEXP (x, i, j);
  898. XVECEXP (x, i, j) = new;
  899. }
  900. undobuf.num_undo++;
  901. }
  902. }
  903. }
  904. else if (fmt[i] == 'e')
  905. {
  906. register rtx new;
  907. if (XEXP (x, i) == from)
  908. new = this_to;
  909. else
  910. new = subst (XEXP (x, i), from, to);
  911. if (new != XEXP (x, i))
  912. {
  913. if (undobuf.num_undo < MAX_UNDO)
  914. {
  915. undobuf.undo[undobuf.num_undo].where = &XEXP (x, i);
  916. undobuf.undo[undobuf.num_undo].old_contents = XEXP (x, i);
  917. XEXP (x, i) = new;
  918. }
  919. undobuf.num_undo++;
  920. }
  921. }
  922. }
  923. return x;
  924. }
  925. /* This is the AND case of the function subst. */
  926. static rtx
  927. simplify_and_const_int (x, from, to)
  928. rtx x, from, to;
  929. {
  930. register rtx varop = XEXP (x, 0);
  931. register int constop = INTVAL (XEXP (x, 1));
  932. /* (and (subreg (and <foo> <constant>) 0) <constant>)
  933. results from an andsi followed by an andqi,
  934. which happens frequently when storing bit-fields
  935. on something whose result comes from an andsi. */
  936. if (GET_CODE (varop) == SUBREG
  937. && XEXP (varop, 0) == from
  938. && subreg_lowpart_p (varop)
  939. && GET_CODE (to) == AND
  940. && GET_CODE (XEXP (to, 1)) == CONST_INT
  941. /* Verify that the result of the outer `and'
  942. is not affected by any bits not defined in the inner `and'.
  943. True if the outer mode is narrower, or if the outer constant
  944. masks to zero all the bits that the inner mode doesn't have. */
  945. && (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (from))
  946. || constop & (-1 << (BITS_PER_UNIT * GET_MODE_SIZE (GET_MODE (from)))) == 0))
  947. {
  948. if (!undobuf.storage)
  949. undobuf.storage = (char *) oballoc (0);
  950. return gen_rtx (AND, GET_MODE (x),
  951. gen_lowpart (GET_MODE (x), XEXP (to, 0)),
  952. gen_rtx (CONST_INT, VOIDmode,
  953. constop
  954. /* Remember that the bits outside that mode
  955. are not being changed, so the effect
  956. is as if they were all 1. */
  957. & INTVAL (XEXP (to, 1))));
  958. }
  959. /* (and (zero_extend <foo>) <constant>)
  960. often results from storing in a bit-field something
  961. that was calculated as a short. Replace with a single `and'
  962. in whose constant all bits not in <foo>'s mode are zero. */
  963. if (varop == from
  964. && GET_CODE (to) == ZERO_EXTEND)
  965. {
  966. if (!undobuf.storage)
  967. undobuf.storage = (char *) oballoc (0);
  968. return gen_rtx (AND, GET_MODE (x),
  969. gen_rtx (SUBREG, GET_MODE (x),
  970. XEXP (to, 0), 0),
  971. gen_rtx (CONST_INT, VOIDmode,
  972. constop
  973. & ((1 << (BITS_PER_UNIT * GET_MODE_SIZE (GET_MODE (XEXP (to, 0))))) - 1)));
  974. }
  975. /* (and (sign_extend <foo>) <constant>)
  976. can be replaced with (and (subreg <foo>) <constant>)
  977. if <constant> is narrower than <foo>'s mode,
  978. or with (zero_extend <foo>) if <constant> is a mask for that mode. */
  979. if (varop == from
  980. && GET_CODE (to) == SIGN_EXTEND
  981. && ((unsigned) constop
  982. <= ((1 << (BITS_PER_UNIT
  983. * GET_MODE_SIZE (GET_MODE (XEXP (to, 0)))))
  984. - 1)))
  985. {
  986. if (!undobuf.storage)
  987. undobuf.storage = (char *) oballoc (0);
  988. if (constop == ((1 << (BITS_PER_UNIT
  989. * GET_MODE_SIZE (GET_MODE (XEXP (to, 0)))))
  990. - 1))
  991. return gen_rtx (ZERO_EXTEND, GET_MODE (x), XEXP (to, 0));
  992. return gen_rtx (AND, GET_MODE (x),
  993. gen_rtx (SUBREG, GET_MODE (x),
  994. XEXP (to, 0), 0),
  995. XEXP (x, 1));
  996. }
  997. /* (and (and <foo> <constant>) <constant>)
  998. comes from two and instructions in a row. */
  999. if (varop == from
  1000. && GET_CODE (to) == AND
  1001. && GET_CODE (XEXP (to, 1)) == CONST_INT)
  1002. {
  1003. if (!undobuf.storage)
  1004. undobuf.storage = (char *) oballoc (0);
  1005. return gen_rtx (AND, GET_MODE (x),
  1006. XEXP (to, 0),
  1007. gen_rtx (CONST_INT, VOIDmode,
  1008. constop
  1009. & INTVAL (XEXP (to, 1))));
  1010. }
  1011. /* (and (ashiftrt (ashift FOO N) N) CONST)
  1012. may be simplified to (and FOO CONST) if CONST masks off the bits
  1013. changed by the two shifts. */
  1014. if (GET_CODE (varop) == ASHIFTRT
  1015. && GET_CODE (XEXP (varop, 1)) == CONST_INT
  1016. && XEXP (varop, 0) == from
  1017. && GET_CODE (to) == ASHIFT
  1018. && GET_CODE (XEXP (to, 1)) == CONST_INT
  1019. && INTVAL (XEXP (varop, 1)) == INTVAL (XEXP (to, 1))
  1020. && ((unsigned) constop >> INTVAL (XEXP (varop, 1))) == 0)
  1021. {
  1022. if (!undobuf.storage)
  1023. undobuf.storage = (char *) oballoc (0);
  1024. /* If CONST is a mask for the low byte,
  1025. change this into a zero-extend instruction
  1026. from just the low byte of FOO. */
  1027. if (constop == (1 << BITS_PER_UNIT) - 1)
  1028. {
  1029. rtx temp = gen_lowpart_for_combine (QImode, XEXP (to, 0));
  1030. if (temp)
  1031. return gen_rtx (ZERO_EXTEND, GET_MODE (x), temp);
  1032. }
  1033. return gen_rtx (AND, GET_MODE (x),
  1034. XEXP (to, 0), XEXP (x, 1));
  1035. }
  1036. /* No simplification applies. */
  1037. return 0;
  1038. }
  1039. /* Like gen_lowpart but for use by combine. In combine it is not possible
  1040. to create any new pseudoregs. However, it is safe to create
  1041. invalid memory addresses, because combine will try to recognize
  1042. them and all they will do is make the combine attempt fail.
  1043. Also, return zero if we don't see a way to make a lowpart. */
  1044. static rtx
  1045. gen_lowpart_for_combine (mode, x)
  1046. enum machine_mode mode;
  1047. register rtx x;
  1048. {
  1049. if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
  1050. return gen_lowpart (mode, x);
  1051. if (GET_MODE (x) == mode)
  1052. return 0;
  1053. if (GET_CODE (x) == VOLATILE)
  1054. return 0;
  1055. if (GET_CODE (x) == MEM)
  1056. {
  1057. register int offset = 0;
  1058. #ifdef WORDS_BIG_ENDIAN
  1059. offset = (max (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
  1060. - max (GET_MODE_SIZE (mode), UNITS_PER_WORD));
  1061. #endif
  1062. #ifdef BYTES_BIG_ENDIAN
  1063. if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
  1064. offset -= (GET_MODE_SIZE (mode)
  1065. - min (UNITS_PER_WORD,
  1066. GET_MODE_SIZE (GET_MODE (x))));
  1067. #endif
  1068. return gen_rtx (MEM, mode, plus_constant (XEXP (x, 0),
  1069. offset));
  1070. }
  1071. else
  1072. return 0;
  1073. }
  1074. /* After substitution, if the resulting pattern looks like
  1075. (set (cc0) (and ...)), this function is called to simplify the
  1076. pattern into a bit-field operation if possible. */
  1077. static void
  1078. simplify_set_cc0_and (insn)
  1079. rtx insn;
  1080. {
  1081. register rtx value = XEXP (PATTERN (insn), 1);
  1082. register rtx op0 = XEXP (value, 0);
  1083. register rtx op1 = XEXP (value, 1);
  1084. int offset = 0;
  1085. rtx var = 0;
  1086. rtx bitnum = 0;
  1087. int temp;
  1088. int unit;
  1089. /* Look for a constant power of 2 or a shifted 1
  1090. on either side of the AND. Set VAR to the other side.
  1091. Set BITNUM to the shift count of the 1 (as an rtx).
  1092. Or, if bit number is constant, set OFFSET to the bit number. */
  1093. switch (GET_CODE (op0))
  1094. {
  1095. case CONST_INT:
  1096. temp = exact_log2 (INTVAL (op0));
  1097. if (temp < 0)
  1098. return;
  1099. offset = temp;
  1100. var = op1;
  1101. break;
  1102. case ASHIFT:
  1103. case LSHIFT:
  1104. if (XEXP (op0, 0) == const1_rtx)
  1105. {
  1106. bitnum = XEXP (op0, 1);
  1107. var = op1;
  1108. }
  1109. }
  1110. if (var == 0)
  1111. switch (GET_CODE (op1))
  1112. {
  1113. case CONST_INT:
  1114. temp = exact_log2 (INTVAL (op1));
  1115. if (temp < 0)
  1116. return;
  1117. offset = temp;
  1118. var = op0;
  1119. break;
  1120. case ASHIFT:
  1121. case LSHIFT:
  1122. if (XEXP (op1, 0) == const1_rtx)
  1123. {
  1124. bitnum = XEXP (op1, 1);
  1125. var = op0;
  1126. }
  1127. }
  1128. /* If VAR is 0, we didn't find something recognizable. */
  1129. if (var == 0)
  1130. return;
  1131. if (!undobuf.storage)
  1132. undobuf.storage = (char *) oballoc (0);
  1133. /* If the bit position is currently exactly 0,
  1134. extract a right-shift from the variable portion. */
  1135. if (offset == 0
  1136. && (GET_CODE (var) == ASHIFTRT || GET_CODE (var) == LSHIFTRT))
  1137. {
  1138. bitnum = XEXP (var, 1);
  1139. var = XEXP (var, 0);
  1140. }
  1141. #ifdef BITS_BIG_ENDIAN
  1142. unit = GET_MODE_SIZE (GET_MODE (var)) * BITS_PER_UNIT - 1;
  1143. if (bitnum != 0)
  1144. bitnum = gen_rtx (MINUS, SImode,
  1145. gen_rtx (CONST_INT, VOIDmode, unit), bitnum);
  1146. else
  1147. offset = unit - offset;
  1148. #endif
  1149. if (bitnum == 0)
  1150. bitnum = gen_rtx (CONST_INT, VOIDmode, offset);
  1151. if (GET_CODE (var) == SUBREG && SUBREG_WORD (var) == 0)
  1152. var = SUBREG_REG (var);
  1153. if (undobuf.num_undo < MAX_UNDO)
  1154. {
  1155. undobuf.undo[undobuf.num_undo].where = &XEXP (PATTERN (insn), 1);
  1156. undobuf.undo[undobuf.num_undo].old_contents = value;
  1157. XEXP (PATTERN (insn), 1)
  1158. = gen_rtx (ZERO_EXTRACT, VOIDmode, var, const1_rtx, bitnum);
  1159. }
  1160. undobuf.num_undo++;
  1161. }
  1162. /* Update the records of when each REG was most recently set or killed
  1163. for the things done by INSN. This is the last thing done in processing
  1164. INSN in the combiner loop.
  1165. We update reg_last_set, reg_last_death, and also the similar information
  1166. mem_last_set (which insn most recently modified memory)
  1167. and last_call_cuid (which insn was the most recent subroutine call). */
  1168. static void
  1169. record_dead_and_set_regs (insn)
  1170. rtx insn;
  1171. {
  1172. register rtx link;
  1173. for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
  1174. {
  1175. if ((enum reg_note) GET_MODE (link) == REG_DEAD)
  1176. reg_last_death[REGNO (XEXP (link, 0))] = insn;
  1177. else if ((enum reg_note) GET_MODE (link) == REG_INC)
  1178. reg_last_set[REGNO (XEXP (link, 0))] = insn;
  1179. }
  1180. if (GET_CODE (insn) == CALL_INSN)
  1181. last_call_cuid = mem_last_set = INSN_CUID (insn);
  1182. if (GET_CODE (PATTERN (insn)) == PARALLEL)
  1183. {
  1184. register int i;
  1185. for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
  1186. {
  1187. register rtx elt = XVECEXP (PATTERN (insn), 0, i);
  1188. register enum rtx_code code = GET_CODE (elt);
  1189. if (code == SET || code == CLOBBER)
  1190. {
  1191. if (GET_CODE (XEXP (elt, 0)) == REG)
  1192. reg_last_set[REGNO (XEXP (elt, 0))] = insn;
  1193. else if (GET_CODE (XEXP (elt, 0)) == MEM)
  1194. mem_last_set = INSN_CUID (insn);
  1195. }
  1196. }
  1197. }
  1198. else if (GET_CODE (PATTERN (insn)) == SET
  1199. || GET_CODE (PATTERN (insn)) == CLOBBER)
  1200. {
  1201. register rtx x = XEXP (PATTERN (insn), 0);
  1202. if (GET_CODE (x) == REG)
  1203. reg_last_set[REGNO (x)] = insn;
  1204. else if (GET_CODE (x) == MEM)
  1205. mem_last_set = INSN_CUID (insn);
  1206. }
  1207. }
  1208. /* Return nonzero if expression X refers to a REG or to memory
  1209. that is set in an instruction more recent than FROM_CUID. */
  1210. static int
  1211. use_crosses_set_p (x, from_cuid)
  1212. register rtx x;
  1213. int from_cuid;
  1214. {
  1215. register char *fmt;
  1216. register int i;
  1217. register enum rtx_code code = GET_CODE (x);
  1218. if (code == REG)
  1219. {
  1220. register int regno = REGNO (x);
  1221. return (reg_last_set[regno]
  1222. && INSN_CUID (reg_last_set[regno]) > from_cuid);
  1223. }
  1224. if (code == MEM && mem_last_set > from_cuid)
  1225. return 1;
  1226. fmt = GET_RTX_FORMAT (code);
  1227. for (i = GET_RTX_LENGTH (code); i >= 0; i--)
  1228. {
  1229. if (fmt[i] == 'E')
  1230. {
  1231. register int j;
  1232. for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  1233. if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
  1234. return 1;
  1235. }
  1236. else if (fmt[i] == 'e'
  1237. && use_crosses_set_p (XEXP (x, i), from_cuid))
  1238. return 1;
  1239. }
  1240. return 0;
  1241. }
  1242. /* Return nonzero if reg REGNO is marked as dying in INSN. */
  1243. int
  1244. regno_dead_p (regno, insn)
  1245. int regno;
  1246. rtx insn;
  1247. {
  1248. register rtx link;
  1249. for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
  1250. if (REGNO (XEXP (link, 0)) == regno
  1251. && ((enum reg_note) GET_MODE (link) == REG_DEAD
  1252. || (enum reg_note) GET_MODE (link) == REG_INC))
  1253. return 1;
  1254. return 0;
  1255. }
  1256. /* Remove register number REGNO from the dead registers list of INSN. */
  1257. static void
  1258. remove_death (regno, insn)
  1259. int regno;
  1260. rtx insn;
  1261. {
  1262. register rtx link, next;
  1263. while ((link = REG_NOTES (insn))
  1264. && REGNO (XEXP (link, 0)) == regno
  1265. && (enum reg_note) GET_MODE (link) == REG_DEAD)
  1266. REG_NOTES (insn) = XEXP (link, 1);
  1267. if (link)
  1268. while (next = XEXP (link, 1))
  1269. {
  1270. if (REGNO (XEXP (next, 0)) == regno
  1271. && (enum reg_note) GET_MODE (next) == REG_DEAD)
  1272. XEXP (link, 1) = XEXP (next, 1);
  1273. else
  1274. link = next;
  1275. }
  1276. }
  1277. /* Return nonzero if J is the first insn following I,
  1278. not counting labels, line numbers, etc.
  1279. We assume that J follows I. */
  1280. static int
  1281. adjacent_insns_p (i, j)
  1282. rtx i, j;
  1283. {
  1284. register rtx insn;
  1285. for (insn = NEXT_INSN (i); insn != j; insn = NEXT_INSN (insn))
  1286. if (GET_CODE (insn) == INSN
  1287. || GET_CODE (insn) == CALL_INSN
  1288. || GET_CODE (insn) == JUMP_INSN)
  1289. return 0;
  1290. return 1;
  1291. }
  1292. /* Concatenate the list of logical links LINKS
  1293. into INSN's list of logical links.
  1294. Modifies LINKS destructively. */
  1295. static void
  1296. add_links (insn, links)
  1297. rtx insn, links;
  1298. {
  1299. if (LOG_LINKS (insn) == 0)
  1300. LOG_LINKS (insn) = links;
  1301. else
  1302. {
  1303. register rtx next, prev = LOG_LINKS (insn);
  1304. while (next = XEXP (prev, 1))
  1305. prev = next;
  1306. XEXP (prev, 1) = links;
  1307. }
  1308. }
  1309. /* Concatenate the any elements of the list of reg-notes INCS
  1310. which are of type REG_INC
  1311. into INSN's list of reg-notes. */
  1312. static void
  1313. add_incs (insn, incs)
  1314. rtx insn, incs;
  1315. {
  1316. register rtx tail;
  1317. for (tail = incs; tail; tail = XEXP (tail, 1))
  1318. if ((enum reg_note) GET_MODE (tail) == REG_INC)
  1319. REG_NOTES (insn)
  1320. = gen_rtx (EXPR_LIST, REG_INC, XEXP (tail, 0), REG_NOTES (insn));
  1321. }
  1322. /* For each register (hardware or pseudo) used within expression X,
  1323. if its death is in an instruction with cuid
  1324. between FROM_CUID (inclusive) and TO_INSN (exclusive),
  1325. mark it as dead in TO_INSN instead.
  1326. This is done when X is being merged by combination into TO_INSN. */
  1327. static void
  1328. move_deaths (x, from_cuid, to_insn)
  1329. rtx x;
  1330. int from_cuid;
  1331. rtx to_insn;
  1332. {
  1333. register char *fmt;
  1334. register int len, i;
  1335. register enum rtx_code code = GET_CODE (x);
  1336. if (code == REG)
  1337. {
  1338. register rtx where_dead = reg_last_death[REGNO (x)];
  1339. if (where_dead && INSN_CUID (where_dead) >= from_cuid
  1340. && INSN_CUID (where_dead) < INSN_CUID (to_insn))
  1341. {
  1342. remove_death (REGNO (x), reg_last_death[REGNO (x)]);
  1343. if (! dead_or_set_p (to_insn, x))
  1344. REG_NOTES (to_insn)
  1345. = gen_rtx (EXPR_LIST, REG_DEAD, x, REG_NOTES (to_insn));
  1346. }
  1347. return;
  1348. }
  1349. len = GET_RTX_LENGTH (code);
  1350. fmt = GET_RTX_FORMAT (code);
  1351. for (i = 0; i < len; i++)
  1352. {
  1353. if (fmt[i] == 'E')
  1354. {
  1355. register int j;
  1356. for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  1357. move_deaths (XVECEXP (x, i, j), from_cuid, to_insn);
  1358. }
  1359. else if (fmt[i] == 'e')
  1360. move_deaths (XEXP (x, i), from_cuid, to_insn);
  1361. }
  1362. }
  1363. dump_combine_stats (file)
  1364. char *file;
  1365. {
  1366. fprintf
  1367. (file,
  1368. ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n"
  1369. , combine_attempts, combine_merges, combine_extras, combine_successes);
  1370. }
  1371. dump_combine_total_stats (file)
  1372. char *file;
  1373. {
  1374. fprintf
  1375. (file,
  1376. "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
  1377. total_attempts, total_merges, total_extras, total_successes);
  1378. }