milli.S 49 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813
  1. ; $OpenBSD: milli.S,v 1.5 2001/03/29 04:08:20 mickey Exp $
  2. ;
  3. ; (c) Copyright 1986 HEWLETT-PACKARD COMPANY
  4. ;
  5. ; To anyone who acknowledges that this file is provided "AS IS"
  6. ; without any express or implied warranty:
  7. ; permission to use, copy, modify, and distribute this file
  8. ; for any purpose is hereby granted without fee, provided that
  9. ; the above copyright notice and this notice appears in all
  10. ; copies, and that the name of Hewlett-Packard Company not be
  11. ; used in advertising or publicity pertaining to distribution
  12. ; of the software without specific, written prior permission.
  13. ; Hewlett-Packard Company makes no representations about the
  14. ; suitability of this software for any purpose.
  15. ;
  16. ; Standard Hardware Register Definitions for Use with Assembler
  17. ; version A.08.06
  18. ; - fr16-31 added at Utah
  19. ;~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  20. ; Hardware General Registers
  21. r0: .equ 0
  22. r1: .equ 1
  23. r2: .equ 2
  24. r3: .equ 3
  25. r4: .equ 4
  26. r5: .equ 5
  27. r6: .equ 6
  28. r7: .equ 7
  29. r8: .equ 8
  30. r9: .equ 9
  31. r10: .equ 10
  32. r11: .equ 11
  33. r12: .equ 12
  34. r13: .equ 13
  35. r14: .equ 14
  36. r15: .equ 15
  37. r16: .equ 16
  38. r17: .equ 17
  39. r18: .equ 18
  40. r19: .equ 19
  41. r20: .equ 20
  42. r21: .equ 21
  43. r22: .equ 22
  44. r23: .equ 23
  45. r24: .equ 24
  46. r25: .equ 25
  47. r26: .equ 26
  48. r27: .equ 27
  49. r28: .equ 28
  50. r29: .equ 29
  51. r30: .equ 30
  52. r31: .equ 31
  53. ; Hardware Space Registers
  54. sr0: .equ 0
  55. sr1: .equ 1
  56. sr2: .equ 2
  57. sr3: .equ 3
  58. sr4: .equ 4
  59. sr5: .equ 5
  60. sr6: .equ 6
  61. sr7: .equ 7
  62. ; Hardware Floating Point Registers
  63. fr0: .equ 0
  64. fr1: .equ 1
  65. fr2: .equ 2
  66. fr3: .equ 3
  67. fr4: .equ 4
  68. fr5: .equ 5
  69. fr6: .equ 6
  70. fr7: .equ 7
  71. fr8: .equ 8
  72. fr9: .equ 9
  73. fr10: .equ 10
  74. fr11: .equ 11
  75. fr12: .equ 12
  76. fr13: .equ 13
  77. fr14: .equ 14
  78. fr15: .equ 15
  79. fr16: .equ 16
  80. fr17: .equ 17
  81. fr18: .equ 18
  82. fr19: .equ 19
  83. fr20: .equ 20
  84. fr21: .equ 21
  85. fr22: .equ 22
  86. fr23: .equ 23
  87. fr24: .equ 24
  88. fr25: .equ 25
  89. fr26: .equ 26
  90. fr27: .equ 27
  91. fr28: .equ 28
  92. fr29: .equ 29
  93. fr30: .equ 30
  94. fr31: .equ 31
  95. ; Hardware Control Registers
  96. cr0: .equ 0
  97. rctr: .equ 0 ; Recovery Counter Register
  98. cr8: .equ 8 ; Protection ID 1
  99. pidr1: .equ 8
  100. cr9: .equ 9 ; Protection ID 2
  101. pidr2: .equ 9
  102. cr10: .equ 10
  103. ccr: .equ 10 ; Coprocessor Confiquration Register
  104. cr11: .equ 11
  105. sar: .equ 11 ; Shift Amount Register
  106. cr12: .equ 12
  107. pidr3: .equ 12 ; Protection ID 3
  108. cr13: .equ 13
  109. pidr4: .equ 13 ; Protection ID 4
  110. cr14: .equ 14
  111. iva: .equ 14 ; Interrupt Vector Address
  112. cr15: .equ 15
  113. eiem: .equ 15 ; External Interrupt Enable Mask
  114. cr16: .equ 16
  115. itmr: .equ 16 ; Interval Timer
  116. cr17: .equ 17
  117. pcsq: .equ 17 ; Program Counter Space queue
  118. cr18: .equ 18
  119. pcoq: .equ 18 ; Program Counter Offset queue
  120. cr19: .equ 19
  121. iir: .equ 19 ; Interruption Instruction Register
  122. cr20: .equ 20
  123. isr: .equ 20 ; Interruption Space Register
  124. cr21: .equ 21
  125. ior: .equ 21 ; Interruption Offset Register
  126. cr22: .equ 22
  127. ipsw: .equ 22 ; Interrpution Processor Status Word
  128. cr23: .equ 23
  129. eirr: .equ 23 ; External Interrupt Request
  130. cr24: .equ 24
  131. ppda: .equ 24 ; Physcial Page Directory Address
  132. tr0: .equ 24 ; Temporary register 0
  133. cr25: .equ 25
  134. hta: .equ 25 ; Hash Table Address
  135. tr1: .equ 25 ; Temporary register 1
  136. cr26: .equ 26
  137. tr2: .equ 26 ; Temporary register 2
  138. cr27: .equ 27
  139. tr3: .equ 27 ; Temporary register 3
  140. cr28: .equ 28
  141. tr4: .equ 28 ; Temporary register 4
  142. cr29: .equ 29
  143. tr5: .equ 29 ; Temporary register 5
  144. cr30: .equ 30
  145. tr6: .equ 30 ; Temporary register 6
  146. cr31: .equ 31
  147. tr7: .equ 31 ; Temporary register 7
  148. ;~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  149. ; Procedure Call Convention ~
  150. ; Register Definitions for Use with Assembler ~
  151. ; version A.08.06 ~
  152. ;~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  153. ; Software Architecture General Registers
  154. rp: .equ r2 ; return pointer
  155. mrp: .equ r31 ; millicode return pointer
  156. ret0: .equ r28 ; return value
  157. ret1: .equ r29 ; return value (high part of double)
  158. sl: .equ r29 ; static link
  159. sp: .equ r30 ; stack pointer
  160. dp: .equ r27 ; data pointer
  161. arg0: .equ r26 ; argument
  162. arg1: .equ r25 ; argument or high part of double argument
  163. arg2: .equ r24 ; argument
  164. arg3: .equ r23 ; argument or high part of double argument
  165. ;_____________________________________________________________________________
  166. ; Software Architecture Space Registers
  167. ; sr0 ; return link form BLE
  168. sret: .equ sr1 ; return value
  169. sarg: .equ sr1 ; argument
  170. ; sr4 ; PC SPACE tracker
  171. ; sr5 ; process private data
  172. ;_____________________________________________________________________________
  173. ; Software Architecture Pseudo Registers
  174. previous_sp: .equ 64 ; old stack pointer (locates previous frame)
  175. ;~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  176. ; Standard space and subspace definitions. version A.08.06
  177. ; These are generally suitable for programs on HP_UX and HPE.
  178. ; Statements commented out are used when building such things as operating
  179. ; system kernels.
  180. ;;;;;;;;;;;;;;;;
  181. ; Additional code subspaces should have ALIGN=8 for an interspace BV
  182. ; and should have SORT=24.
  183. ;
  184. ; For an incomplete executable (program bound to shared libraries),
  185. ; sort keys $GLOBAL$ -1 and $GLOBAL$ -2 are reserved for the $DLT$
  186. ; and $PLT$ subspaces respectively.
  187. ;;;;;;;;;;;;;;;
  188. .text
  189. .EXPORT $$remI,millicode
  190. ; .IMPORT cerror
  191. $$remI:
  192. .PROC
  193. .CALLINFO NO_CALLS
  194. .ENTRY
  195. addit,= 0,arg1,r0
  196. add,>= r0,arg0,ret1
  197. sub r0,ret1,ret1
  198. sub r0,arg1,r1
  199. ds r0,r1,r0
  200. or r0,r0,r1
  201. add ret1,ret1,ret1
  202. ds r1,arg1,r1
  203. addc ret1,ret1,ret1
  204. ds r1,arg1,r1
  205. addc ret1,ret1,ret1
  206. ds r1,arg1,r1
  207. addc ret1,ret1,ret1
  208. ds r1,arg1,r1
  209. addc ret1,ret1,ret1
  210. ds r1,arg1,r1
  211. addc ret1,ret1,ret1
  212. ds r1,arg1,r1
  213. addc ret1,ret1,ret1
  214. ds r1,arg1,r1
  215. addc ret1,ret1,ret1
  216. ds r1,arg1,r1
  217. addc ret1,ret1,ret1
  218. ds r1,arg1,r1
  219. addc ret1,ret1,ret1
  220. ds r1,arg1,r1
  221. addc ret1,ret1,ret1
  222. ds r1,arg1,r1
  223. addc ret1,ret1,ret1
  224. ds r1,arg1,r1
  225. addc ret1,ret1,ret1
  226. ds r1,arg1,r1
  227. addc ret1,ret1,ret1
  228. ds r1,arg1,r1
  229. addc ret1,ret1,ret1
  230. ds r1,arg1,r1
  231. addc ret1,ret1,ret1
  232. ds r1,arg1,r1
  233. addc ret1,ret1,ret1
  234. ds r1,arg1,r1
  235. addc ret1,ret1,ret1
  236. ds r1,arg1,r1
  237. addc ret1,ret1,ret1
  238. ds r1,arg1,r1
  239. addc ret1,ret1,ret1
  240. ds r1,arg1,r1
  241. addc ret1,ret1,ret1
  242. ds r1,arg1,r1
  243. addc ret1,ret1,ret1
  244. ds r1,arg1,r1
  245. addc ret1,ret1,ret1
  246. ds r1,arg1,r1
  247. addc ret1,ret1,ret1
  248. ds r1,arg1,r1
  249. addc ret1,ret1,ret1
  250. ds r1,arg1,r1
  251. addc ret1,ret1,ret1
  252. ds r1,arg1,r1
  253. addc ret1,ret1,ret1
  254. ds r1,arg1,r1
  255. addc ret1,ret1,ret1
  256. ds r1,arg1,r1
  257. addc ret1,ret1,ret1
  258. ds r1,arg1,r1
  259. addc ret1,ret1,ret1
  260. ds r1,arg1,r1
  261. addc ret1,ret1,ret1
  262. ds r1,arg1,r1
  263. addc ret1,ret1,ret1
  264. ds r1,arg1,r1
  265. addc ret1,ret1,ret1
  266. movb,>=,n r1,ret1,remI300
  267. add,< arg1,r0,r0
  268. add,tr r1,arg1,ret1
  269. sub r1,arg1,ret1
  270. remI300: add,>= arg0,r0,r0
  271. sub r0,ret1,ret1
  272. bv r0(r31)
  273. nop
  274. .EXIT
  275. .PROCEND
  276. bit1: .equ 1
  277. bit30: .equ 30
  278. bit31: .equ 31
  279. len2: .equ 2
  280. len4: .equ 4
  281. #if 0
  282. $$dyncall:
  283. .proc
  284. .callinfo NO_CALLS
  285. .export $$dyncall,MILLICODE
  286. bb,>=,n 22,bit30,noshlibs
  287. depi 0,bit31,len2,22
  288. ldw 4(22),19
  289. ldw 0(22),22
  290. noshlibs:
  291. ldsid (22),r1
  292. mtsp r1,sr0
  293. be 0(sr0,r22)
  294. stw rp,-24(sp)
  295. .procend
  296. #endif
  297. $$sh_func_adrs:
  298. .proc
  299. .callinfo NO_CALLS
  300. .export $$sh_func_adrs, millicode
  301. ldo 0(r26),ret1
  302. dep r0,30,1,r26
  303. probew (r26),r31,r22
  304. extru,= r22,31,1,r22
  305. bv r0(r31)
  306. ldws 0(r26),ret1
  307. .procend
  308. temp: .EQU r1
  309. retreg: .EQU ret1 ; r29
  310. .export $$divU,millicode
  311. .import $$divU_3,millicode
  312. .import $$divU_5,millicode
  313. .import $$divU_6,millicode
  314. .import $$divU_7,millicode
  315. .import $$divU_9,millicode
  316. .import $$divU_10,millicode
  317. .import $$divU_12,millicode
  318. .import $$divU_14,millicode
  319. .import $$divU_15,millicode
  320. $$divU:
  321. .proc
  322. .callinfo NO_CALLS
  323. ; The subtract is not nullified since it does no harm and can be used
  324. ; by the two cases that branch back to "normal".
  325. comib,>= 15,arg1,special_divisor
  326. sub r0,arg1,temp ; clear carry, negate the divisor
  327. ds r0,temp,r0 ; set V-bit to 1
  328. normal:
  329. add arg0,arg0,retreg ; shift msb bit into carry
  330. ds r0,arg1,temp ; 1st divide step, if no carry
  331. addc retreg,retreg,retreg ; shift retreg with/into carry
  332. ds temp,arg1,temp ; 2nd divide step
  333. addc retreg,retreg,retreg ; shift retreg with/into carry
  334. ds temp,arg1,temp ; 3rd divide step
  335. addc retreg,retreg,retreg ; shift retreg with/into carry
  336. ds temp,arg1,temp ; 4th divide step
  337. addc retreg,retreg,retreg ; shift retreg with/into carry
  338. ds temp,arg1,temp ; 5th divide step
  339. addc retreg,retreg,retreg ; shift retreg with/into carry
  340. ds temp,arg1,temp ; 6th divide step
  341. addc retreg,retreg,retreg ; shift retreg with/into carry
  342. ds temp,arg1,temp ; 7th divide step
  343. addc retreg,retreg,retreg ; shift retreg with/into carry
  344. ds temp,arg1,temp ; 8th divide step
  345. addc retreg,retreg,retreg ; shift retreg with/into carry
  346. ds temp,arg1,temp ; 9th divide step
  347. addc retreg,retreg,retreg ; shift retreg with/into carry
  348. ds temp,arg1,temp ; 10th divide step
  349. addc retreg,retreg,retreg ; shift retreg with/into carry
  350. ds temp,arg1,temp ; 11th divide step
  351. addc retreg,retreg,retreg ; shift retreg with/into carry
  352. ds temp,arg1,temp ; 12th divide step
  353. addc retreg,retreg,retreg ; shift retreg with/into carry
  354. ds temp,arg1,temp ; 13th divide step
  355. addc retreg,retreg,retreg ; shift retreg with/into carry
  356. ds temp,arg1,temp ; 14th divide step
  357. addc retreg,retreg,retreg ; shift retreg with/into carry
  358. ds temp,arg1,temp ; 15th divide step
  359. addc retreg,retreg,retreg ; shift retreg with/into carry
  360. ds temp,arg1,temp ; 16th divide step
  361. addc retreg,retreg,retreg ; shift retreg with/into carry
  362. ds temp,arg1,temp ; 17th divide step
  363. addc retreg,retreg,retreg ; shift retreg with/into carry
  364. ds temp,arg1,temp ; 18th divide step
  365. addc retreg,retreg,retreg ; shift retreg with/into carry
  366. ds temp,arg1,temp ; 19th divide step
  367. addc retreg,retreg,retreg ; shift retreg with/into carry
  368. ds temp,arg1,temp ; 20th divide step
  369. addc retreg,retreg,retreg ; shift retreg with/into carry
  370. ds temp,arg1,temp ; 21st divide step
  371. addc retreg,retreg,retreg ; shift retreg with/into carry
  372. ds temp,arg1,temp ; 22nd divide step
  373. addc retreg,retreg,retreg ; shift retreg with/into carry
  374. ds temp,arg1,temp ; 23rd divide step
  375. addc retreg,retreg,retreg ; shift retreg with/into carry
  376. ds temp,arg1,temp ; 24th divide step
  377. addc retreg,retreg,retreg ; shift retreg with/into carry
  378. ds temp,arg1,temp ; 25th divide step
  379. addc retreg,retreg,retreg ; shift retreg with/into carry
  380. ds temp,arg1,temp ; 26th divide step
  381. addc retreg,retreg,retreg ; shift retreg with/into carry
  382. ds temp,arg1,temp ; 27th divide step
  383. addc retreg,retreg,retreg ; shift retreg with/into carry
  384. ds temp,arg1,temp ; 28th divide step
  385. addc retreg,retreg,retreg ; shift retreg with/into carry
  386. ds temp,arg1,temp ; 29th divide step
  387. addc retreg,retreg,retreg ; shift retreg with/into carry
  388. ds temp,arg1,temp ; 30th divide step
  389. addc retreg,retreg,retreg ; shift retreg with/into carry
  390. ds temp,arg1,temp ; 31st divide step
  391. addc retreg,retreg,retreg ; shift retreg with/into carry
  392. ds temp,arg1,temp ; 32nd divide step,
  393. bv 0(r31)
  394. addc retreg,retreg,retreg ; shift last retreg bit into retreg
  395. ;_____________________________________________________________________________
  396. ; handle the cases where divisor is a small constant or has high bit on
  397. special_divisor:
  398. blr arg1,r0
  399. comib,>,n 0,arg1,big_divisor ; nullify previous instruction
  400. zero_divisor: ; this label is here to provide external visibility
  401. addit,= 0,arg1,0 ; trap for zero dvr
  402. nop
  403. bv 0(r31) ; divisor == 1
  404. copy arg0,retreg
  405. bv 0(r31) ; divisor == 2
  406. extru arg0,30,31,retreg
  407. b,n $$divU_3 ; divisor == 3
  408. nop
  409. bv 0(r31) ; divisor == 4
  410. extru arg0,29,30,retreg
  411. b,n $$divU_5 ; divisor == 5
  412. nop
  413. b,n $$divU_6 ; divisor == 6
  414. nop
  415. b,n $$divU_7 ; divisor == 7
  416. nop
  417. bv 0(r31) ; divisor == 8
  418. extru arg0,28,29,retreg
  419. b,n $$divU_9 ; divisor == 9
  420. nop
  421. b,n $$divU_10 ; divisor == 10
  422. nop
  423. b normal ; divisor == 11
  424. ds r0,temp,r0 ; set V-bit to 1
  425. b,n $$divU_12 ; divisor == 12
  426. nop
  427. b normal ; divisor == 13
  428. ds r0,temp,r0 ; set V-bit to 1
  429. b,n $$divU_14 ; divisor == 14
  430. nop
  431. b,n $$divU_15 ; divisor == 15
  432. nop
  433. ;_____________________________________________________________________________
  434. ; Handle the case where the high bit is on in the divisor.
  435. ; Compute: if( dividend>=divisor) quotient=1; else quotient=0;
  436. ; Note: dividend>==divisor iff dividend-divisor does not borrow
  437. ; and not borrow iff carry
  438. big_divisor:
  439. sub arg0,arg1,r0
  440. bv 0(r31)
  441. addc r0,r0,retreg
  442. .procend
  443. .end
  444. t2: .EQU r1
  445. ; x2 .EQU arg0 ; r26
  446. t1: .EQU arg1 ; r25
  447. ; x1 .EQU ret1 ; r29
  448. ;_____________________________________________________________________________
  449. $$divide_by_constant:
  450. .PROC
  451. .CALLINFO NO_CALLS
  452. .export $$divide_by_constant,millicode
  453. ; Provides a "nice" label for the code covered by the unwind descriptor
  454. ; for things like gprof.
  455. $$divI_2:
  456. .EXPORT $$divI_2,MILLICODE
  457. COMCLR,>= arg0,0,0
  458. ADDI 1,arg0,arg0
  459. bv 0(r31)
  460. EXTRS arg0,30,31,ret1
  461. $$divI_4:
  462. .EXPORT $$divI_4,MILLICODE
  463. COMCLR,>= arg0,0,0
  464. ADDI 3,arg0,arg0
  465. bv 0(r31)
  466. EXTRS arg0,29,30,ret1
  467. $$divI_8:
  468. .EXPORT $$divI_8,MILLICODE
  469. COMCLR,>= arg0,0,0
  470. ADDI 7,arg0,arg0
  471. bv 0(r31)
  472. EXTRS arg0,28,29,ret1
  473. $$divI_16:
  474. .EXPORT $$divI_16,MILLICODE
  475. COMCLR,>= arg0,0,0
  476. ADDI 15,arg0,arg0
  477. bv 0(r31)
  478. EXTRS arg0,27,28,ret1
  479. $$divI_3:
  480. .EXPORT $$divI_3,MILLICODE
  481. COMB,<,N arg0,0,$neg3
  482. ADDI 1,arg0,arg0
  483. EXTRU arg0,1,2,ret1
  484. SH2ADD arg0,arg0,arg0
  485. B $pos
  486. ADDC ret1,0,ret1
  487. $neg3:
  488. SUBI 1,arg0,arg0
  489. EXTRU arg0,1,2,ret1
  490. SH2ADD arg0,arg0,arg0
  491. B $neg
  492. ADDC ret1,0,ret1
  493. $$divU_3:
  494. .EXPORT $$divU_3,MILLICODE
  495. ADDI 1,arg0,arg0
  496. ADDC 0,0,ret1
  497. SHD ret1,arg0,30,t1
  498. SH2ADD arg0,arg0,arg0
  499. B $pos
  500. ADDC ret1,t1,ret1
  501. $$divI_5:
  502. .EXPORT $$divI_5,MILLICODE
  503. COMB,<,N arg0,0,$neg5
  504. ADDI 3,arg0,t1
  505. SH1ADD arg0,t1,arg0
  506. B $pos
  507. ADDC 0,0,ret1
  508. $neg5:
  509. SUB 0,arg0,arg0
  510. ADDI 1,arg0,arg0
  511. SHD 0,arg0,31,ret1
  512. SH1ADD arg0,arg0,arg0
  513. B $neg
  514. ADDC ret1,0,ret1
  515. $$divU_5:
  516. .EXPORT $$divU_5,MILLICODE
  517. ADDI 1,arg0,arg0
  518. ADDC 0,0,ret1
  519. SHD ret1,arg0,31,t1
  520. SH1ADD arg0,arg0,arg0
  521. B $pos
  522. ADDC t1,ret1,ret1
  523. $$divI_6:
  524. .EXPORT $$divI_6,MILLICODE
  525. COMB,<,N arg0,0,$neg6
  526. EXTRU arg0,30,31,arg0
  527. ADDI 5,arg0,t1
  528. SH2ADD arg0,t1,arg0
  529. B $pos
  530. ADDC 0,0,ret1
  531. $neg6:
  532. SUBI 2,arg0,arg0
  533. EXTRU arg0,30,31,arg0
  534. SHD 0,arg0,30,ret1
  535. SH2ADD arg0,arg0,arg0
  536. B $neg
  537. ADDC ret1,0,ret1
  538. $$divU_6:
  539. .EXPORT $$divU_6,MILLICODE
  540. EXTRU arg0,30,31,arg0
  541. ADDI 1,arg0,arg0
  542. SHD 0,arg0,30,ret1
  543. SH2ADD arg0,arg0,arg0
  544. B $pos
  545. ADDC ret1,0,ret1
  546. $$divU_10:
  547. .EXPORT $$divU_10,MILLICODE
  548. EXTRU arg0,30,31,arg0
  549. ADDI 3,arg0,t1
  550. SH1ADD arg0,t1,arg0
  551. ADDC 0,0,ret1
  552. $pos:
  553. SHD ret1,arg0,28,t1
  554. SHD arg0,0,28,t2
  555. ADD arg0,t2,arg0
  556. ADDC ret1,t1,ret1
  557. $pos_for_17:
  558. SHD ret1,arg0,24,t1
  559. SHD arg0,0,24,t2
  560. ADD arg0,t2,arg0
  561. ADDC ret1,t1,ret1
  562. SHD ret1,arg0,16,t1
  563. SHD arg0,0,16,t2
  564. ADD arg0,t2,arg0
  565. bv 0(r31)
  566. ADDC ret1,t1,ret1
  567. $$divI_10:
  568. .EXPORT $$divI_10,MILLICODE
  569. COMB,< arg0,0,$neg10
  570. COPY 0,ret1
  571. EXTRU arg0,30,31,arg0
  572. ADDIB,TR 1,arg0,$pos
  573. SH1ADD arg0,arg0,arg0
  574. $neg10:
  575. SUBI 2,arg0,arg0
  576. EXTRU arg0,30,31,arg0
  577. SH1ADD arg0,arg0,arg0
  578. $neg:
  579. SHD ret1,arg0,28,t1
  580. SHD arg0,0,28,t2
  581. ADD arg0,t2,arg0
  582. ADDC ret1,t1,ret1
  583. $neg_for_17:
  584. SHD ret1,arg0,24,t1
  585. SHD arg0,0,24,t2
  586. ADD arg0,t2,arg0
  587. ADDC ret1,t1,ret1
  588. SHD ret1,arg0,16,t1
  589. SHD arg0,0,16,t2
  590. ADD arg0,t2,arg0
  591. ADDC ret1,t1,ret1
  592. bv 0(r31)
  593. SUB 0,ret1,ret1
  594. $$divI_12:
  595. .EXPORT $$divI_12,MILLICODE
  596. COMB,< arg0,0,$neg12
  597. COPY 0,ret1
  598. EXTRU arg0,29,30,arg0
  599. ADDIB,TR 1,arg0,$pos
  600. SH2ADD arg0,arg0,arg0
  601. $neg12:
  602. SUBI 4,arg0,arg0
  603. EXTRU arg0,29,30,arg0
  604. B $neg
  605. SH2ADD arg0,arg0,arg0
  606. $$divU_12:
  607. .EXPORT $$divU_12,MILLICODE
  608. EXTRU arg0,29,30,arg0
  609. ADDI 5,arg0,t1
  610. SH2ADD arg0,t1,arg0
  611. B $pos
  612. ADDC 0,0,ret1
  613. $$divI_15:
  614. .EXPORT $$divI_15,MILLICODE
  615. COMB,< arg0,0,$neg15
  616. COPY 0,ret1
  617. ADDIB,TR 1,arg0,$pos+4
  618. SHD ret1,arg0,28,t1
  619. $neg15:
  620. B $neg
  621. SUBI 1,arg0,arg0
  622. $$divU_15:
  623. .EXPORT $$divU_15,MILLICODE
  624. ADDI 1,arg0,arg0
  625. B $pos
  626. ADDC 0,0,ret1
  627. $$divI_17:
  628. .EXPORT $$divI_17,MILLICODE
  629. COMB,<,N arg0,0,$neg17
  630. ADDI 1,arg0,arg0
  631. SHD 0,arg0,28,t1
  632. SHD arg0,0,28,t2
  633. SUB t2,arg0,arg0
  634. B $pos_for_17
  635. SUBB t1,0,ret1
  636. $neg17:
  637. SUBI 1,arg0,arg0
  638. SHD 0,arg0,28,t1
  639. SHD arg0,0,28,t2
  640. SUB t2,arg0,arg0
  641. B $neg_for_17
  642. SUBB t1,0,ret1
  643. $$divU_17:
  644. .EXPORT $$divU_17,MILLICODE
  645. ADDI 1,arg0,arg0
  646. ADDC 0,0,ret1
  647. SHD ret1,arg0,28,t1
  648. $u17:
  649. SHD arg0,0,28,t2
  650. SUB t2,arg0,arg0
  651. B $pos_for_17
  652. SUBB t1,ret1,ret1
  653. $$divI_7:
  654. .EXPORT $$divI_7,MILLICODE
  655. COMB,<,N arg0,0,$neg7
  656. $7:
  657. ADDI 1,arg0,arg0
  658. SHD 0,arg0,29,ret1
  659. SH3ADD arg0,arg0,arg0
  660. ADDC ret1,0,ret1
  661. $pos7:
  662. SHD ret1,arg0,26,t1
  663. SHD arg0,0,26,t2
  664. ADD arg0,t2,arg0
  665. ADDC ret1,t1,ret1
  666. SHD ret1,arg0,20,t1
  667. SHD arg0,0,20,t2
  668. ADD arg0,t2,arg0
  669. ADDC ret1,t1,t1
  670. COPY 0,ret1
  671. SHD,= t1,arg0,24,t1
  672. $1:
  673. ADDB,TR t1,ret1,$2
  674. EXTRU arg0,31,24,arg0
  675. bv,n 0(r31)
  676. $2:
  677. ADDB,TR t1,arg0,$1
  678. EXTRU,= arg0,7,8,t1
  679. $neg7:
  680. SUBI 1,arg0,arg0
  681. $8:
  682. SHD 0,arg0,29,ret1
  683. SH3ADD arg0,arg0,arg0
  684. ADDC ret1,0,ret1
  685. $neg7_shift:
  686. SHD ret1,arg0,26,t1
  687. SHD arg0,0,26,t2
  688. ADD arg0,t2,arg0
  689. ADDC ret1,t1,ret1
  690. SHD ret1,arg0,20,t1
  691. SHD arg0,0,20,t2
  692. ADD arg0,t2,arg0
  693. ADDC ret1,t1,t1
  694. COPY 0,ret1
  695. SHD,= t1,arg0,24,t1
  696. $3:
  697. ADDB,TR t1,ret1,$4
  698. EXTRU arg0,31,24,arg0
  699. bv 0(r31)
  700. SUB 0,ret1,ret1
  701. $4:
  702. ADDB,TR t1,arg0,$3
  703. EXTRU,= arg0,7,8,t1
  704. $$divU_7:
  705. .EXPORT $$divU_7,MILLICODE
  706. ADDI 1,arg0,arg0
  707. ADDC 0,0,ret1
  708. SHD ret1,arg0,29,t1
  709. SH3ADD arg0,arg0,arg0
  710. B $pos7
  711. ADDC t1,ret1,ret1
  712. $$divI_9:
  713. .EXPORT $$divI_9,MILLICODE
  714. COMB,<,N arg0,0,$neg9
  715. ADDI 1,arg0,arg0
  716. SHD 0,arg0,29,t1
  717. SHD arg0,0,29,t2
  718. SUB t2,arg0,arg0
  719. B $pos7
  720. SUBB t1,0,ret1
  721. $neg9:
  722. SUBI 1,arg0,arg0
  723. SHD 0,arg0,29,t1
  724. SHD arg0,0,29,t2
  725. SUB t2,arg0,arg0
  726. B $neg7_shift
  727. SUBB t1,0,ret1
  728. $$divU_9:
  729. .EXPORT $$divU_9,MILLICODE
  730. ADDI 1,arg0,arg0
  731. ADDC 0,0,ret1
  732. SHD ret1,arg0,29,t1
  733. SHD arg0,0,29,t2
  734. SUB t2,arg0,arg0
  735. B $pos7
  736. SUBB t1,ret1,ret1
  737. $$divI_14:
  738. .EXPORT $$divI_14,MILLICODE
  739. COMB,<,N arg0,0,$neg14
  740. $$divU_14:
  741. .EXPORT $$divU_14,MILLICODE
  742. B $7
  743. EXTRU arg0,30,31,arg0
  744. $neg14:
  745. SUBI 2,arg0,arg0
  746. B $8
  747. EXTRU arg0,30,31,arg0
  748. .PROCEND
  749. .END
  750. rmndr: .EQU ret1 ; r29
  751. .export $$remU,millicode
  752. $$remU:
  753. .proc
  754. .callinfo NO_CALLS
  755. .entry
  756. comib,>=,n 0,arg1,special_case
  757. sub r0,arg1,rmndr ; clear carry, negate the divisor
  758. ds r0,rmndr,r0 ; set V-bit to 1
  759. add arg0,arg0,temp ; shift msb bit into carry
  760. ds r0,arg1,rmndr ; 1st divide step, if no carry
  761. addc temp,temp,temp ; shift temp with/into carry
  762. ds rmndr,arg1,rmndr ; 2nd divide step
  763. addc temp,temp,temp ; shift temp with/into carry
  764. ds rmndr,arg1,rmndr ; 3rd divide step
  765. addc temp,temp,temp ; shift temp with/into carry
  766. ds rmndr,arg1,rmndr ; 4th divide step
  767. addc temp,temp,temp ; shift temp with/into carry
  768. ds rmndr,arg1,rmndr ; 5th divide step
  769. addc temp,temp,temp ; shift temp with/into carry
  770. ds rmndr,arg1,rmndr ; 6th divide step
  771. addc temp,temp,temp ; shift temp with/into carry
  772. ds rmndr,arg1,rmndr ; 7th divide step
  773. addc temp,temp,temp ; shift temp with/into carry
  774. ds rmndr,arg1,rmndr ; 8th divide step
  775. addc temp,temp,temp ; shift temp with/into carry
  776. ds rmndr,arg1,rmndr ; 9th divide step
  777. addc temp,temp,temp ; shift temp with/into carry
  778. ds rmndr,arg1,rmndr ; 10th divide step
  779. addc temp,temp,temp ; shift temp with/into carry
  780. ds rmndr,arg1,rmndr ; 11th divide step
  781. addc temp,temp,temp ; shift temp with/into carry
  782. ds rmndr,arg1,rmndr ; 12th divide step
  783. addc temp,temp,temp ; shift temp with/into carry
  784. ds rmndr,arg1,rmndr ; 13th divide step
  785. addc temp,temp,temp ; shift temp with/into carry
  786. ds rmndr,arg1,rmndr ; 14th divide step
  787. addc temp,temp,temp ; shift temp with/into carry
  788. ds rmndr,arg1,rmndr ; 15th divide step
  789. addc temp,temp,temp ; shift temp with/into carry
  790. ds rmndr,arg1,rmndr ; 16th divide step
  791. addc temp,temp,temp ; shift temp with/into carry
  792. ds rmndr,arg1,rmndr ; 17th divide step
  793. addc temp,temp,temp ; shift temp with/into carry
  794. ds rmndr,arg1,rmndr ; 18th divide step
  795. addc temp,temp,temp ; shift temp with/into carry
  796. ds rmndr,arg1,rmndr ; 19th divide step
  797. addc temp,temp,temp ; shift temp with/into carry
  798. ds rmndr,arg1,rmndr ; 20th divide step
  799. addc temp,temp,temp ; shift temp with/into carry
  800. ds rmndr,arg1,rmndr ; 21st divide step
  801. addc temp,temp,temp ; shift temp with/into carry
  802. ds rmndr,arg1,rmndr ; 22nd divide step
  803. addc temp,temp,temp ; shift temp with/into carry
  804. ds rmndr,arg1,rmndr ; 23rd divide step
  805. addc temp,temp,temp ; shift temp with/into carry
  806. ds rmndr,arg1,rmndr ; 24th divide step
  807. addc temp,temp,temp ; shift temp with/into carry
  808. ds rmndr,arg1,rmndr ; 25th divide step
  809. addc temp,temp,temp ; shift temp with/into carry
  810. ds rmndr,arg1,rmndr ; 26th divide step
  811. addc temp,temp,temp ; shift temp with/into carry
  812. ds rmndr,arg1,rmndr ; 27th divide step
  813. addc temp,temp,temp ; shift temp with/into carry
  814. ds rmndr,arg1,rmndr ; 28th divide step
  815. addc temp,temp,temp ; shift temp with/into carry
  816. ds rmndr,arg1,rmndr ; 29th divide step
  817. addc temp,temp,temp ; shift temp with/into carry
  818. ds rmndr,arg1,rmndr ; 30th divide step
  819. addc temp,temp,temp ; shift temp with/into carry
  820. ds rmndr,arg1,rmndr ; 31st divide step
  821. addc temp,temp,temp ; shift temp with/into carry
  822. ds rmndr,arg1,rmndr ; 32nd divide step,
  823. comiclr,<= 0,rmndr,r0
  824. add rmndr,arg1,rmndr ; correction
  825. ; .exit
  826. bv,n 0(r31)
  827. nop
  828. ; Putting >= on the last DS and deleting COMICLR does not work!
  829. ;_____________________________________________________________________________
  830. special_case:
  831. addit,= 0,arg1,r0 ; trap on div by zero
  832. sub,>>= arg0,arg1,rmndr
  833. copy arg0,rmndr
  834. .exit
  835. bv,n 0(r31)
  836. nop
  837. .procend
  838. .end
  839. ; Use bv 0(r31) and bv,n 0(r31) instead.
  840. ; #define return bv 0(%mrp)
  841. ; #define return_n bv,n 0(%mrp)
  842. .align 16
  843. $$mulI:
  844. .proc
  845. .callinfo NO_CALLS
  846. .export $$mulI, millicode
  847. combt,<<= %r25,%r26,l4 ; swap args if unsigned %r25>%r26
  848. copy 0,%r29 ; zero out the result
  849. xor %r26,%r25,%r26 ; swap %r26 & %r25 using the
  850. xor %r26,%r25,%r25 ; old xor trick
  851. xor %r26,%r25,%r26
  852. l4: combt,<= 0,%r26,l3 ; if %r26>=0 then proceed like unsigned
  853. zdep %r25,30,8,%r1 ; %r1 = (%r25&0xff)<<1 *********
  854. sub,> 0,%r25,%r1 ; otherwise negate both and
  855. combt,<=,n %r26,%r1,l2 ; swap back if |%r26|<|%r25|
  856. sub 0,%r26,%r25
  857. movb,tr,n %r1,%r26,l2 ; 10th inst.
  858. l0: add %r29,%r1,%r29 ; add in this partial product
  859. l1: zdep %r26,23,24,%r26 ; %r26 <<= 8 ******************
  860. l2: zdep %r25,30,8,%r1 ; %r1 = (%r25&0xff)<<1 *********
  861. l3: blr %r1,0 ; case on these 8 bits ******
  862. extru %r25,23,24,%r25 ; %r25 >>= 8 ******************
  863. ;16 insts before this.
  864. ; %r26 <<= 8 **************************
  865. x0: comb,<> %r25,0,l2 ! zdep %r26,23,24,%r26 ! bv,n 0(r31) ! nop
  866. x1: comb,<> %r25,0,l1 ! add %r29,%r26,%r29 ! bv,n 0(r31) ! nop
  867. x2: comb,<> %r25,0,l1 ! sh1add %r26,%r29,%r29 ! bv,n 0(r31) ! nop
  868. x3: comb,<> %r25,0,l0 ! sh1add %r26,%r26,%r1 ! bv 0(r31) ! add %r29,%r1,%r29
  869. x4: comb,<> %r25,0,l1 ! sh2add %r26,%r29,%r29 ! bv,n 0(r31) ! nop
  870. x5: comb,<> %r25,0,l0 ! sh2add %r26,%r26,%r1 ! bv 0(r31) ! add %r29,%r1,%r29
  871. x6: sh1add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh1add %r1,%r29,%r29 ! bv,n 0(r31)
  872. x7: sh1add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh2add %r26,%r29,%r29 ! b,n ret_t0
  873. x8: comb,<> %r25,0,l1 ! sh3add %r26,%r29,%r29 ! bv,n 0(r31) ! nop
  874. x9: comb,<> %r25,0,l0 ! sh3add %r26,%r26,%r1 ! bv 0(r31) ! add %r29,%r1,%r29
  875. x10: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh1add %r1,%r29,%r29 ! bv,n 0(r31)
  876. x11: sh1add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh3add %r26,%r29,%r29 ! b,n ret_t0
  877. x12: sh1add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh2add %r1,%r29,%r29 ! bv,n 0(r31)
  878. x13: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh3add %r26,%r29,%r29 ! b,n ret_t0
  879. x14: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  880. x15: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh1add %r1,%r1,%r1 ! b,n ret_t0
  881. x16: zdep %r26,27,28,%r1 ! comb,<> %r25,0,l1 ! add %r29,%r1,%r29 ! bv,n 0(r31)
  882. x17: sh3add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh3add %r26,%r1,%r1 ! b,n ret_t0
  883. x18: sh3add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh1add %r1,%r29,%r29 ! bv,n 0(r31)
  884. x19: sh3add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh1add %r1,%r26,%r1 ! b,n ret_t0
  885. x20: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh2add %r1,%r29,%r29 ! bv,n 0(r31)
  886. x21: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh2add %r1,%r26,%r1 ! b,n ret_t0
  887. x22: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  888. x23: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  889. x24: sh1add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh3add %r1,%r29,%r29 ! bv,n 0(r31)
  890. x25: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh2add %r1,%r1,%r1 ! b,n ret_t0
  891. x26: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  892. x27: sh1add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh3add %r1,%r1,%r1 ! b,n ret_t0
  893. x28: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  894. x29: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  895. x30: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  896. x31: zdep %r26,26,27,%r1 ! comb,<> %r25,0,l0 ! sub %r1,%r26,%r1 ! b,n ret_t0
  897. x32: zdep %r26,26,27,%r1 ! comb,<> %r25,0,l1 ! add %r29,%r1,%r29 ! bv,n 0(r31)
  898. x33: sh3add %r26,0,%r1 ! comb,<> %r25,0,l0 ! sh2add %r1,%r26,%r1 ! b,n ret_t0
  899. x34: zdep %r26,27,28,%r1 ! add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  900. x35: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %r26,%r1,%r1
  901. x36: sh3add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh2add %r1,%r29,%r29 ! bv,n 0(r31)
  902. x37: sh3add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh2add %r1,%r26,%r1 ! b,n ret_t0
  903. x38: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  904. x39: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  905. x40: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh3add %r1,%r29,%r29 ! bv,n 0(r31)
  906. x41: sh2add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh3add %r1,%r26,%r1 ! b,n ret_t0
  907. x42: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  908. x43: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  909. x44: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  910. x45: sh3add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! sh2add %r1,%r1,%r1 ! b,n ret_t0
  911. x46: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! add %r1,%r26,%r1
  912. x47: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh1add %r26,%r1,%r1
  913. x48: sh1add %r26,%r26,%r1 ! comb,<> %r25,0,l0 ! zdep %r1,27,28,%r1 ! b,n ret_t0
  914. x49: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r26,%r1,%r1
  915. x50: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  916. x51: sh3add %r26,%r26,%r1 ! sh3add %r26,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  917. x52: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  918. x53: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  919. x54: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  920. x55: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  921. x56: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  922. x57: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  923. x58: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  924. x59: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t02a0 ! sh1add %r1,%r1,%r1
  925. x60: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  926. x61: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  927. x62: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  928. x63: zdep %r26,25,26,%r1 ! comb,<> %r25,0,l0 ! sub %r1,%r26,%r1 ! b,n ret_t0
  929. x64: zdep %r26,25,26,%r1 ! comb,<> %r25,0,l1 ! add %r29,%r1,%r29 ! bv,n 0(r31)
  930. x65: sh3add %r26,0,%r1 ! comb,<> %r25,0,l0 ! sh3add %r1,%r26,%r1 ! b,n ret_t0
  931. x66: zdep %r26,26,27,%r1 ! add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  932. x67: sh3add %r26,0,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  933. x68: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  934. x69: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  935. x70: zdep %r26,25,26,%r1 ! sh2add %r26,%r1,%r1 ! b e_t0 ! sh1add %r26,%r1,%r1
  936. x71: sh3add %r26,%r26,%r1 ! sh3add %r1,0,%r1 ! b e_t0 ! sub %r1,%r26,%r1
  937. x72: sh3add %r26,%r26,%r1 ! comb,<> %r25,0,l1 ! sh3add %r1,%r29,%r29 ! bv,n 0(r31)
  938. x73: sh3add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_shift ! add %r29,%r1,%r29
  939. x74: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  940. x75: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  941. x76: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  942. x77: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  943. x78: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r26,%r1
  944. x79: zdep %r26,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%r26,%r1
  945. x80: zdep %r26,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! add %r29,%r1,%r29
  946. x81: sh3add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_shift ! add %r29,%r1,%r29
  947. x82: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  948. x83: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  949. x84: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  950. x85: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  951. x86: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r26,%r1
  952. x87: sh3add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_t02a0 ! sh2add %r26,%r1,%r1
  953. x88: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  954. x89: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  955. x90: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  956. x91: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  957. x92: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0 ! sh1add %r1,%r26,%r1
  958. x93: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  959. x94: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh1add %r26,%r1,%r1
  960. x95: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  961. x96: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  962. x97: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  963. x98: zdep %r26,26,27,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh1add %r26,%r1,%r1
  964. x99: sh3add %r26,0,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  965. x100: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  966. x101: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  967. x102: zdep %r26,26,27,%r1 ! sh1add %r26,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  968. x103: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t02a0 ! sh2add %r1,%r26,%r1
  969. x104: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  970. x105: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  971. x106: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  972. x107: sh3add %r26,%r26,%r1 ! sh2add %r26,%r1,%r1 ! b e_t02a0 ! sh3add %r1,%r26,%r1
  973. x108: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  974. x109: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  975. x110: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh1add %r1,%r26,%r1
  976. x111: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  977. x112: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! zdep %r1,27,28,%r1
  978. x113: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t02a0 ! sh1add %r1,%r1,%r1
  979. x114: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
  980. x115: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
  981. x116: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0 ! sh2add %r1,%r26,%r1
  982. x117: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r1,%r1
  983. x118: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0a0 ! sh3add %r1,%r1,%r1
  984. x119: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t02a0 ! sh3add %r1,%r1,%r1
  985. x120: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  986. x121: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  987. x122: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  988. x123: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  989. x124: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  990. x125: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  991. x126: zdep %r26,25,26,%r1 ! sub %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  992. x127: zdep %r26,24,25,%r1 ! comb,<> %r25,0,l0 ! sub %r1,%r26,%r1 ! b,n ret_t0
  993. x128: zdep %r26,24,25,%r1 ! comb,<> %r25,0,l1 ! add %r29,%r1,%r29 ! bv,n 0(r31)
  994. x129: zdep %r26,24,25,%r1 ! comb,<> %r25,0,l0 ! add %r1,%r26,%r1 ! b,n ret_t0
  995. x130: zdep %r26,25,26,%r1 ! add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  996. x131: sh3add %r26,0,%r1 ! sh3add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  997. x132: sh3add %r26,0,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  998. x133: sh3add %r26,0,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  999. x134: sh3add %r26,0,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r26,%r1
  1000. x135: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  1001. x136: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1002. x137: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  1003. x138: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  1004. x139: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0a0 ! sh2add %r1,%r26,%r1
  1005. x140: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0 ! sh2add %r1,%r1,%r1
  1006. x141: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0a0 ! sh1add %r1,%r26,%r1
  1007. x142: sh3add %r26,%r26,%r1 ! sh3add %r1,0,%r1 ! b e_2t0 ! sub %r1,%r26,%r1
  1008. x143: zdep %r26,27,28,%r1 ! sh3add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%r26,%r1
  1009. x144: sh3add %r26,%r26,%r1 ! sh3add %r1,0,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  1010. x145: sh3add %r26,%r26,%r1 ! sh3add %r1,0,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  1011. x146: sh3add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  1012. x147: sh3add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  1013. x148: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  1014. x149: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  1015. x150: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r26,%r1
  1016. x151: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0a0 ! sh1add %r1,%r26,%r1
  1017. x152: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1018. x153: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  1019. x154: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  1020. x155: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  1021. x156: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0 ! sh1add %r1,%r26,%r1
  1022. x157: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_t02a0 ! sh2add %r1,%r1,%r1
  1023. x158: zdep %r26,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sub %r1,%r26,%r1
  1024. x159: zdep %r26,26,27,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%r26,%r1
  1025. x160: sh2add %r26,%r26,%r1 ! sh2add %r1,0,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1026. x161: sh3add %r26,0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  1027. x162: sh3add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  1028. x163: sh3add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%r26,%r1
  1029. x164: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  1030. x165: sh3add %r26,0,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  1031. x166: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r26,%r1
  1032. x167: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_2t0a0 ! sh1add %r1,%r26,%r1
  1033. x168: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1034. x169: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  1035. x170: zdep %r26,26,27,%r1 ! sh1add %r26,%r1,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  1036. x171: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r1,%r1
  1037. x172: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_4t0 ! sh1add %r1,%r26,%r1
  1038. x173: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t02a0 ! sh3add %r1,%r1,%r1
  1039. x174: zdep %r26,26,27,%r1 ! sh1add %r26,%r1,%r1 ! b e_t04a0 ! sh2add %r1,%r1,%r1
  1040. x175: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_5t0 ! sh1add %r1,%r26,%r1
  1041. x176: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_8t0 ! add %r1,%r26,%r1
  1042. x177: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_8t0a0 ! add %r1,%r26,%r1
  1043. x178: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0 ! sh3add %r1,%r26,%r1
  1044. x179: sh2add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0a0 ! sh3add %r1,%r26,%r1
  1045. x180: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  1046. x181: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  1047. x182: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh1add %r1,%r26,%r1
  1048. x183: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0a0 ! sh1add %r1,%r26,%r1
  1049. x184: sh2add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_4t0 ! add %r1,%r26,%r1
  1050. x185: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  1051. x186: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
  1052. x187: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t02a0 ! sh2add %r1,%r1,%r1
  1053. x188: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_4t0 ! sh1add %r26,%r1,%r1
  1054. x189: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r1,%r1
  1055. x190: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_2t0 ! sh2add %r1,%r1,%r1
  1056. x191: zdep %r26,25,26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%r26,%r1
  1057. x192: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1058. x193: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  1059. x194: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  1060. x195: sh3add %r26,0,%r1 ! sh3add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  1061. x196: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_4t0 ! sh1add %r1,%r26,%r1
  1062. x197: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_4t0a0 ! sh1add %r1,%r26,%r1
  1063. x198: zdep %r26,25,26,%r1 ! sh1add %r26,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  1064. x199: sh3add %r26,0,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
  1065. x200: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1066. x201: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  1067. x202: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  1068. x203: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0a0 ! sh2add %r1,%r26,%r1
  1069. x204: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0 ! sh1add %r1,%r1,%r1
  1070. x205: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  1071. x206: zdep %r26,25,26,%r1 ! sh2add %r26,%r1,%r1 ! b e_t02a0 ! sh1add %r1,%r1,%r1
  1072. x207: sh3add %r26,0,%r1 ! sh1add %r1,%r26,%r1 ! b e_3t0 ! sh2add %r1,%r26,%r1
  1073. x208: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_8t0 ! add %r1,%r26,%r1
  1074. x209: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_8t0a0 ! add %r1,%r26,%r1
  1075. x210: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0 ! sh2add %r1,%r1,%r1
  1076. x211: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0a0 ! sh2add %r1,%r1,%r1
  1077. x212: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_4t0 ! sh2add %r1,%r26,%r1
  1078. x213: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_4t0a0 ! sh2add %r1,%r26,%r1
  1079. x214: sh3add %r26,%r26,%r1 ! sh2add %r26,%r1,%r1 ! b e2t04a0 ! sh3add %r1,%r26,%r1
  1080. x215: sh2add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_5t0 ! sh1add %r1,%r26,%r1
  1081. x216: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1082. x217: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  1083. x218: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%r26,%r1
  1084. x219: sh3add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  1085. x220: sh1add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_4t0 ! sh1add %r1,%r26,%r1
  1086. x221: sh1add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_4t0a0 ! sh1add %r1,%r26,%r1
  1087. x222: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
  1088. x223: sh3add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
  1089. x224: sh3add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_8t0 ! add %r1,%r26,%r1
  1090. x225: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
  1091. x226: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_t02a0 ! zdep %r1,26,27,%r1
  1092. x227: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_t02a0 ! sh2add %r1,%r1,%r1
  1093. x228: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0 ! sh1add %r1,%r1,%r1
  1094. x229: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_4t0a0 ! sh1add %r1,%r1,%r1
  1095. x230: sh3add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_5t0 ! add %r1,%r26,%r1
  1096. x231: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_3t0 ! sh2add %r1,%r26,%r1
  1097. x232: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_8t0 ! sh2add %r1,%r26,%r1
  1098. x233: sh1add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e_8t0a0 ! sh2add %r1,%r26,%r1
  1099. x234: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0 ! sh3add %r1,%r1,%r1
  1100. x235: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e_2t0a0 ! sh3add %r1,%r1,%r1
  1101. x236: sh3add %r26,%r26,%r1 ! sh1add %r1,%r26,%r1 ! b e4t08a0 ! sh1add %r1,%r1,%r1
  1102. x237: zdep %r26,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_3t0 ! sub %r1,%r26,%r1
  1103. x238: sh1add %r26,%r26,%r1 ! sh2add %r1,%r26,%r1 ! b e2t04a0 ! sh3add %r1,%r1,%r1
  1104. x239: zdep %r26,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0ma0 ! sh1add %r1,%r1,%r1
  1105. x240: sh3add %r26,%r26,%r1 ! add %r1,%r26,%r1 ! b e_8t0 ! sh1add %r1,%r1,%r1
  1106. x241: sh3add %r26,%r26,%r1 ! add %r1,%r26,%r1 ! b e_8t0a0 ! sh1add %r1,%r1,%r1
  1107. x242: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh3add %r1,%r26,%r1
  1108. x243: sh3add %r26,%r26,%r1 ! sh3add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
  1109. x244: sh2add %r26,%r26,%r1 ! sh1add %r1,%r1,%r1 ! b e_4t0 ! sh2add %r1,%r26,%r1
  1110. x245: sh3add %r26,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_5t0 ! sh1add %r1,%r26,%r1
  1111. x246: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
  1112. x247: sh2add %r26,%r26,%r1 ! sh3add %r1,%r26,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
  1113. x248: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_shift ! sh3add %r1,%r29,%r29
  1114. x249: zdep %r26,26,27,%r1 ! sub %r1,%r26,%r1 ! b e_t0 ! sh3add %r1,%r26,%r1
  1115. x250: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%r1,%r1
  1116. x251: sh2add %r26,%r26,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0a0 ! sh2add %r1,%r1,%r1
  1117. x252: zdep %r26,25,26,%r1 ! sub %r1,%r26,%r1 ! b e_shift ! sh2add %r1,%r29,%r29
  1118. x253: zdep %r26,25,26,%r1 ! sub %r1,%r26,%r1 ! b e_t0 ! sh2add %r1,%r26,%r1
  1119. x254: zdep %r26,24,25,%r1 ! sub %r1,%r26,%r1 ! b e_shift ! sh1add %r1,%r29,%r29
  1120. x255: zdep %r26,23,24,%r1 ! comb,<> %r25,0,l0 ! sub %r1,%r26,%r1 ! b,n ret_t0
  1121. ;1040 insts before this.
  1122. ret_t0: bv 0(r31)
  1123. e_t0: add %r29,%r1,%r29
  1124. e_shift: comb,<> %r25,0,l2
  1125. zdep %r26,23,24,%r26 ; %r26 <<= 8 ***********
  1126. bv,n 0(r31)
  1127. e_t0ma0: comb,<> %r25,0,l0
  1128. sub %r1,%r26,%r1
  1129. bv 0(r31)
  1130. add %r29,%r1,%r29
  1131. e_t0a0: comb,<> %r25,0,l0
  1132. add %r1,%r26,%r1
  1133. bv 0(r31)
  1134. add %r29,%r1,%r29
  1135. e_t02a0: comb,<> %r25,0,l0
  1136. sh1add %r26,%r1,%r1
  1137. bv 0(r31)
  1138. add %r29,%r1,%r29
  1139. e_t04a0: comb,<> %r25,0,l0
  1140. sh2add %r26,%r1,%r1
  1141. bv 0(r31)
  1142. add %r29,%r1,%r29
  1143. e_2t0: comb,<> %r25,0,l1
  1144. sh1add %r1,%r29,%r29
  1145. bv,n 0(r31)
  1146. e_2t0a0: comb,<> %r25,0,l0
  1147. sh1add %r1,%r26,%r1
  1148. bv 0(r31)
  1149. add %r29,%r1,%r29
  1150. e2t04a0: sh1add %r26,%r1,%r1
  1151. comb,<> %r25,0,l1
  1152. sh1add %r1,%r29,%r29
  1153. bv,n 0(r31)
  1154. e_3t0: comb,<> %r25,0,l0
  1155. sh1add %r1,%r1,%r1
  1156. bv 0(r31)
  1157. add %r29,%r1,%r29
  1158. e_4t0: comb,<> %r25,0,l1
  1159. sh2add %r1,%r29,%r29
  1160. bv,n 0(r31)
  1161. e_4t0a0: comb,<> %r25,0,l0
  1162. sh2add %r1,%r26,%r1
  1163. bv 0(r31)
  1164. add %r29,%r1,%r29
  1165. e4t08a0: sh1add %r26,%r1,%r1
  1166. comb,<> %r25,0,l1
  1167. sh2add %r1,%r29,%r29
  1168. bv,n 0(r31)
  1169. e_5t0: comb,<> %r25,0,l0
  1170. sh2add %r1,%r1,%r1
  1171. bv 0(r31)
  1172. add %r29,%r1,%r29
  1173. e_8t0: comb,<> %r25,0,l1
  1174. sh3add %r1,%r29,%r29
  1175. bv,n 0(r31)
  1176. e_8t0a0: comb,<> %r25,0,l0
  1177. sh3add %r1,%r26,%r1
  1178. bv 0(r31)
  1179. add %r29,%r1,%r29
  1180. .procend
  1181. .end
  1182. .import $$divI_2,millicode
  1183. .import $$divI_3,millicode
  1184. .import $$divI_4,millicode
  1185. .import $$divI_5,millicode
  1186. .import $$divI_6,millicode
  1187. .import $$divI_7,millicode
  1188. .import $$divI_8,millicode
  1189. .import $$divI_9,millicode
  1190. .import $$divI_10,millicode
  1191. .import $$divI_12,millicode
  1192. .import $$divI_14,millicode
  1193. .import $$divI_15,millicode
  1194. .export $$divI,millicode
  1195. .export $$divoI,millicode
  1196. $$divoI:
  1197. .proc
  1198. .callinfo NO_CALLS
  1199. comib,=,n -1,arg1,negative1 ; when divisor == -1
  1200. $$divI:
  1201. comib,>>=,n 15,arg1,small_divisor
  1202. add,>= 0,arg0,retreg ; move dividend, if retreg < 0,
  1203. normal1:
  1204. sub 0,retreg,retreg ; make it positive
  1205. sub 0,arg1,temp ; clear carry,
  1206. ; negate the divisor
  1207. ds 0,temp,0 ; set V-bit to the comple-
  1208. ; ment of the divisor sign
  1209. add retreg,retreg,retreg ; shift msb bit into carry
  1210. ds r0,arg1,temp ; 1st divide step, if no carry
  1211. addc retreg,retreg,retreg ; shift retreg with/into carry
  1212. ds temp,arg1,temp ; 2nd divide step
  1213. addc retreg,retreg,retreg ; shift retreg with/into carry
  1214. ds temp,arg1,temp ; 3rd divide step
  1215. addc retreg,retreg,retreg ; shift retreg with/into carry
  1216. ds temp,arg1,temp ; 4th divide step
  1217. addc retreg,retreg,retreg ; shift retreg with/into carry
  1218. ds temp,arg1,temp ; 5th divide step
  1219. addc retreg,retreg,retreg ; shift retreg with/into carry
  1220. ds temp,arg1,temp ; 6th divide step
  1221. addc retreg,retreg,retreg ; shift retreg with/into carry
  1222. ds temp,arg1,temp ; 7th divide step
  1223. addc retreg,retreg,retreg ; shift retreg with/into carry
  1224. ds temp,arg1,temp ; 8th divide step
  1225. addc retreg,retreg,retreg ; shift retreg with/into carry
  1226. ds temp,arg1,temp ; 9th divide step
  1227. addc retreg,retreg,retreg ; shift retreg with/into carry
  1228. ds temp,arg1,temp ; 10th divide step
  1229. addc retreg,retreg,retreg ; shift retreg with/into carry
  1230. ds temp,arg1,temp ; 11th divide step
  1231. addc retreg,retreg,retreg ; shift retreg with/into carry
  1232. ds temp,arg1,temp ; 12th divide step
  1233. addc retreg,retreg,retreg ; shift retreg with/into carry
  1234. ds temp,arg1,temp ; 13th divide step
  1235. addc retreg,retreg,retreg ; shift retreg with/into carry
  1236. ds temp,arg1,temp ; 14th divide step
  1237. addc retreg,retreg,retreg ; shift retreg with/into carry
  1238. ds temp,arg1,temp ; 15th divide step
  1239. addc retreg,retreg,retreg ; shift retreg with/into carry
  1240. ds temp,arg1,temp ; 16th divide step
  1241. addc retreg,retreg,retreg ; shift retreg with/into carry
  1242. ds temp,arg1,temp ; 17th divide step
  1243. addc retreg,retreg,retreg ; shift retreg with/into carry
  1244. ds temp,arg1,temp ; 18th divide step
  1245. addc retreg,retreg,retreg ; shift retreg with/into carry
  1246. ds temp,arg1,temp ; 19th divide step
  1247. addc retreg,retreg,retreg ; shift retreg with/into carry
  1248. ds temp,arg1,temp ; 20th divide step
  1249. addc retreg,retreg,retreg ; shift retreg with/into carry
  1250. ds temp,arg1,temp ; 21st divide step
  1251. addc retreg,retreg,retreg ; shift retreg with/into carry
  1252. ds temp,arg1,temp ; 22nd divide step
  1253. addc retreg,retreg,retreg ; shift retreg with/into carry
  1254. ds temp,arg1,temp ; 23rd divide step
  1255. addc retreg,retreg,retreg ; shift retreg with/into carry
  1256. ds temp,arg1,temp ; 24th divide step
  1257. addc retreg,retreg,retreg ; shift retreg with/into carry
  1258. ds temp,arg1,temp ; 25th divide step
  1259. addc retreg,retreg,retreg ; shift retreg with/into carry
  1260. ds temp,arg1,temp ; 26th divide step
  1261. addc retreg,retreg,retreg ; shift retreg with/into carry
  1262. ds temp,arg1,temp ; 27th divide step
  1263. addc retreg,retreg,retreg ; shift retreg with/into carry
  1264. ds temp,arg1,temp ; 28th divide step
  1265. addc retreg,retreg,retreg ; shift retreg with/into carry
  1266. ds temp,arg1,temp ; 29th divide step
  1267. addc retreg,retreg,retreg ; shift retreg with/into carry
  1268. ds temp,arg1,temp ; 30th divide step
  1269. addc retreg,retreg,retreg ; shift retreg with/into carry
  1270. ds temp,arg1,temp ; 31st divide step
  1271. addc retreg,retreg,retreg ; shift retreg with/into carry
  1272. ds temp,arg1,temp ; 32nd divide step,
  1273. addc retreg,retreg,retreg ; shift last retreg bit into retreg
  1274. xor,>= arg0,arg1,0 ; get correct sign of quotient
  1275. sub 0,retreg,retreg ; based on operand signs
  1276. bv,n 0(r31)
  1277. nop
  1278. ;______________________________________________________________________
  1279. small_divisor:
  1280. blr,n arg1,r0
  1281. nop
  1282. ; table for divisor == 0,1, ... ,15
  1283. addit,= 0,arg1,r0 ; trap if divisor == 0
  1284. nop
  1285. bv 0(r31) ; divisor == 1
  1286. copy arg0,retreg
  1287. b,n $$divI_2 ; divisor == 2
  1288. nop
  1289. b,n $$divI_3 ; divisor == 3
  1290. nop
  1291. b,n $$divI_4 ; divisor == 4
  1292. nop
  1293. b,n $$divI_5 ; divisor == 5
  1294. nop
  1295. b,n $$divI_6 ; divisor == 6
  1296. nop
  1297. b,n $$divI_7 ; divisor == 7
  1298. nop
  1299. b,n $$divI_8 ; divisor == 8
  1300. nop
  1301. b,n $$divI_9 ; divisor == 9
  1302. nop
  1303. b,n $$divI_10 ; divisor == 10
  1304. nop
  1305. b normal1 ; divisor == 11
  1306. add,>= 0,arg0,retreg
  1307. b,n $$divI_12 ; divisor == 12
  1308. nop
  1309. b normal1 ; divisor == 13
  1310. add,>= 0,arg0,retreg
  1311. b,n $$divI_14 ; divisor == 14
  1312. nop
  1313. b,n $$divI_15 ; divisor == 15
  1314. nop
  1315. ;______________________________________________________________________
  1316. negative1:
  1317. sub 0,arg0,retreg ; result is negation of dividend
  1318. bv 0(r31)
  1319. addo arg0,arg1,r0 ; trap iff dividend==0x80000000 && divisor==-1
  1320. .procend
  1321. .end