MacroAssemblerSH4.h 68 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192
  1. /*
  2. * Copyright (C) 2013 Cisco Systems, Inc. All rights reserved.
  3. * Copyright (C) 2009-2011 STMicroelectronics. All rights reserved.
  4. * Copyright (C) 2008 Apple Inc. All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without
  7. * modification, are permitted provided that the following conditions
  8. * are met:
  9. * 1. Redistributions of source code must retain the above copyright
  10. * notice, this list of conditions and the following disclaimer.
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in the
  13. * documentation and/or other materials provided with the distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  16. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  17. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  18. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  19. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  20. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  21. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  22. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  23. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  24. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  25. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  26. */
  27. #ifndef MacroAssemblerSH4_h
  28. #define MacroAssemblerSH4_h
  29. #if ENABLE(ASSEMBLER) && CPU(SH4)
  30. #include "SH4Assembler.h"
  31. #include "AbstractMacroAssembler.h"
  32. #include <wtf/Assertions.h>
  33. namespace JSC {
  34. class MacroAssemblerSH4 : public AbstractMacroAssembler<SH4Assembler> {
  35. public:
  36. typedef SH4Assembler::FPRegisterID FPRegisterID;
  37. static const Scale ScalePtr = TimesFour;
  38. static const FPRegisterID fscratch = SH4Registers::fr10;
  39. static const RegisterID stackPointerRegister = SH4Registers::sp;
  40. static const RegisterID linkRegister = SH4Registers::pr;
  41. static const RegisterID scratchReg3 = SH4Registers::r13;
  42. static const int MaximumCompactPtrAlignedAddressOffset = 60;
  43. static bool isCompactPtrAlignedAddressOffset(ptrdiff_t value)
  44. {
  45. return (value >= 0) && (value <= MaximumCompactPtrAlignedAddressOffset);
  46. }
  47. enum RelationalCondition {
  48. Equal = SH4Assembler::EQ,
  49. NotEqual = SH4Assembler::NE,
  50. Above = SH4Assembler::HI,
  51. AboveOrEqual = SH4Assembler::HS,
  52. Below = SH4Assembler::LI,
  53. BelowOrEqual = SH4Assembler::LS,
  54. GreaterThan = SH4Assembler::GT,
  55. GreaterThanOrEqual = SH4Assembler::GE,
  56. LessThan = SH4Assembler::LT,
  57. LessThanOrEqual = SH4Assembler::LE
  58. };
  59. enum ResultCondition {
  60. Overflow = SH4Assembler::OF,
  61. Signed = SH4Assembler::SI,
  62. PositiveOrZero = SH4Assembler::NS,
  63. Zero = SH4Assembler::EQ,
  64. NonZero = SH4Assembler::NE
  65. };
  66. enum DoubleCondition {
  67. // These conditions will only evaluate to true if the comparison is ordered - i.e. neither operand is NaN.
  68. DoubleEqual = SH4Assembler::EQ,
  69. DoubleNotEqual = SH4Assembler::NE,
  70. DoubleGreaterThan = SH4Assembler::GT,
  71. DoubleGreaterThanOrEqual = SH4Assembler::GE,
  72. DoubleLessThan = SH4Assembler::LT,
  73. DoubleLessThanOrEqual = SH4Assembler::LE,
  74. // If either operand is NaN, these conditions always evaluate to true.
  75. DoubleEqualOrUnordered = SH4Assembler::EQU,
  76. DoubleNotEqualOrUnordered = SH4Assembler::NEU,
  77. DoubleGreaterThanOrUnordered = SH4Assembler::GTU,
  78. DoubleGreaterThanOrEqualOrUnordered = SH4Assembler::GEU,
  79. DoubleLessThanOrUnordered = SH4Assembler::LTU,
  80. DoubleLessThanOrEqualOrUnordered = SH4Assembler::LEU,
  81. };
  82. RegisterID claimScratch()
  83. {
  84. return m_assembler.claimScratch();
  85. }
  86. void releaseScratch(RegisterID reg)
  87. {
  88. m_assembler.releaseScratch(reg);
  89. }
  90. // Integer arithmetic operations
  91. void add32(RegisterID src, RegisterID dest)
  92. {
  93. m_assembler.addlRegReg(src, dest);
  94. }
  95. void add32(TrustedImm32 imm, RegisterID dest)
  96. {
  97. if (!imm.m_value)
  98. return;
  99. if (m_assembler.isImmediate(imm.m_value)) {
  100. m_assembler.addlImm8r(imm.m_value, dest);
  101. return;
  102. }
  103. RegisterID scr = claimScratch();
  104. m_assembler.loadConstant(imm.m_value, scr);
  105. m_assembler.addlRegReg(scr, dest);
  106. releaseScratch(scr);
  107. }
  108. void add32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  109. {
  110. if (src != dest)
  111. m_assembler.movlRegReg(src, dest);
  112. add32(imm, dest);
  113. }
  114. void add32(TrustedImm32 imm, Address address)
  115. {
  116. if (!imm.m_value)
  117. return;
  118. RegisterID scr = claimScratch();
  119. load32(address, scr);
  120. add32(imm, scr);
  121. store32(scr, address);
  122. releaseScratch(scr);
  123. }
  124. void add32(Address src, RegisterID dest)
  125. {
  126. RegisterID scr = claimScratch();
  127. load32(src, scr);
  128. m_assembler.addlRegReg(scr, dest);
  129. releaseScratch(scr);
  130. }
  131. void add32(AbsoluteAddress src, RegisterID dest)
  132. {
  133. RegisterID scr = claimScratch();
  134. load32(src.m_ptr, scr);
  135. m_assembler.addlRegReg(scr, dest);
  136. releaseScratch(scr);
  137. }
  138. void and32(RegisterID src, RegisterID dest)
  139. {
  140. m_assembler.andlRegReg(src, dest);
  141. }
  142. void and32(TrustedImm32 imm, RegisterID dest)
  143. {
  144. if ((imm.m_value <= 255) && (imm.m_value >= 0) && (dest == SH4Registers::r0)) {
  145. m_assembler.andlImm8r(imm.m_value, dest);
  146. return;
  147. }
  148. RegisterID scr = claimScratch();
  149. m_assembler.loadConstant(imm.m_value, scr);
  150. m_assembler.andlRegReg(scr, dest);
  151. releaseScratch(scr);
  152. }
  153. void and32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  154. {
  155. if (src != dest) {
  156. move(imm, dest);
  157. and32(src, dest);
  158. return;
  159. }
  160. and32(imm, dest);
  161. }
  162. void lshift32(RegisterID shiftamount, RegisterID dest)
  163. {
  164. RegisterID shiftTmp = claimScratch();
  165. m_assembler.loadConstant(0x1f, shiftTmp);
  166. m_assembler.andlRegReg(shiftamount, shiftTmp);
  167. m_assembler.shldRegReg(dest, shiftTmp);
  168. releaseScratch(shiftTmp);
  169. }
  170. void lshift32(TrustedImm32 imm, RegisterID dest)
  171. {
  172. int immMasked = imm.m_value & 0x1f;
  173. if (!immMasked)
  174. return;
  175. if ((immMasked == 1) || (immMasked == 2) || (immMasked == 8) || (immMasked == 16)) {
  176. m_assembler.shllImm8r(immMasked, dest);
  177. return;
  178. }
  179. RegisterID shiftTmp = claimScratch();
  180. m_assembler.loadConstant(immMasked, shiftTmp);
  181. m_assembler.shldRegReg(dest, shiftTmp);
  182. releaseScratch(shiftTmp);
  183. }
  184. void lshift32(RegisterID src, TrustedImm32 shiftamount, RegisterID dest)
  185. {
  186. if (src != dest)
  187. move(src, dest);
  188. lshift32(shiftamount, dest);
  189. }
  190. void mul32(RegisterID src, RegisterID dest)
  191. {
  192. m_assembler.imullRegReg(src, dest);
  193. m_assembler.stsmacl(dest);
  194. }
  195. void mul32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  196. {
  197. RegisterID scr = claimScratch();
  198. move(imm, scr);
  199. if (src != dest)
  200. move(src, dest);
  201. mul32(scr, dest);
  202. releaseScratch(scr);
  203. }
  204. void or32(RegisterID src, RegisterID dest)
  205. {
  206. m_assembler.orlRegReg(src, dest);
  207. }
  208. void or32(TrustedImm32 imm, RegisterID dest)
  209. {
  210. if ((imm.m_value <= 255) && (imm.m_value >= 0) && (dest == SH4Registers::r0)) {
  211. m_assembler.orlImm8r(imm.m_value, dest);
  212. return;
  213. }
  214. RegisterID scr = claimScratch();
  215. m_assembler.loadConstant(imm.m_value, scr);
  216. m_assembler.orlRegReg(scr, dest);
  217. releaseScratch(scr);
  218. }
  219. void or32(RegisterID op1, RegisterID op2, RegisterID dest)
  220. {
  221. if (op1 == op2)
  222. move(op1, dest);
  223. else if (op1 == dest)
  224. or32(op2, dest);
  225. else {
  226. move(op2, dest);
  227. or32(op1, dest);
  228. }
  229. }
  230. void or32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  231. {
  232. if (src != dest) {
  233. move(imm, dest);
  234. or32(src, dest);
  235. return;
  236. }
  237. or32(imm, dest);
  238. }
  239. void xor32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  240. {
  241. if (src != dest) {
  242. move(imm, dest);
  243. xor32(src, dest);
  244. return;
  245. }
  246. xor32(imm, dest);
  247. }
  248. void rshift32(RegisterID shiftamount, RegisterID dest)
  249. {
  250. RegisterID shiftTmp = claimScratch();
  251. m_assembler.loadConstant(0x1f, shiftTmp);
  252. m_assembler.andlRegReg(shiftamount, shiftTmp);
  253. m_assembler.neg(shiftTmp, shiftTmp);
  254. m_assembler.shadRegReg(dest, shiftTmp);
  255. releaseScratch(shiftTmp);
  256. }
  257. void rshift32(TrustedImm32 imm, RegisterID dest)
  258. {
  259. int immMasked = imm.m_value & 0x1f;
  260. if (!immMasked)
  261. return;
  262. if (immMasked == 1) {
  263. m_assembler.sharImm8r(immMasked, dest);
  264. return;
  265. }
  266. RegisterID shiftTmp = claimScratch();
  267. m_assembler.loadConstant(-immMasked, shiftTmp);
  268. m_assembler.shadRegReg(dest, shiftTmp);
  269. releaseScratch(shiftTmp);
  270. }
  271. void rshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
  272. {
  273. if (src != dest)
  274. move(src, dest);
  275. rshift32(imm, dest);
  276. }
  277. void sub32(RegisterID src, RegisterID dest)
  278. {
  279. m_assembler.sublRegReg(src, dest);
  280. }
  281. void sub32(TrustedImm32 imm, AbsoluteAddress address)
  282. {
  283. if (!imm.m_value)
  284. return;
  285. RegisterID result = claimScratch();
  286. RegisterID scratchReg = claimScratch();
  287. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scratchReg);
  288. m_assembler.movlMemReg(scratchReg, result);
  289. if (m_assembler.isImmediate(-imm.m_value))
  290. m_assembler.addlImm8r(-imm.m_value, result);
  291. else {
  292. m_assembler.loadConstant(imm.m_value, scratchReg3);
  293. m_assembler.sublRegReg(scratchReg3, result);
  294. }
  295. store32(result, scratchReg);
  296. releaseScratch(result);
  297. releaseScratch(scratchReg);
  298. }
  299. void add32(TrustedImm32 imm, AbsoluteAddress address)
  300. {
  301. if (!imm.m_value)
  302. return;
  303. RegisterID result = claimScratch();
  304. RegisterID scratchReg = claimScratch();
  305. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scratchReg);
  306. m_assembler.movlMemReg(scratchReg, result);
  307. if (m_assembler.isImmediate(imm.m_value))
  308. m_assembler.addlImm8r(imm.m_value, result);
  309. else {
  310. m_assembler.loadConstant(imm.m_value, scratchReg3);
  311. m_assembler.addlRegReg(scratchReg3, result);
  312. }
  313. store32(result, scratchReg);
  314. releaseScratch(result);
  315. releaseScratch(scratchReg);
  316. }
  317. void add64(TrustedImm32 imm, AbsoluteAddress address)
  318. {
  319. RegisterID scr1 = claimScratch();
  320. RegisterID scr2 = claimScratch();
  321. // Add 32-bit LSB first.
  322. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scr1);
  323. m_assembler.movlMemReg(scr1, scr1); // scr1 = 32-bit LSB of int64 @ address
  324. m_assembler.loadConstant(imm.m_value, scr2);
  325. m_assembler.clrt();
  326. m_assembler.addclRegReg(scr1, scr2);
  327. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scr1);
  328. m_assembler.movlRegMem(scr2, scr1); // Update address with 32-bit LSB result.
  329. // Then add 32-bit MSB.
  330. m_assembler.addlImm8r(4, scr1);
  331. m_assembler.movlMemReg(scr1, scr1); // scr1 = 32-bit MSB of int64 @ address
  332. m_assembler.movt(scr2);
  333. if (imm.m_value < 0)
  334. m_assembler.addlImm8r(-1, scr2); // Sign extend imm value if needed.
  335. m_assembler.addvlRegReg(scr2, scr1);
  336. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr) + 4, scr2);
  337. m_assembler.movlRegMem(scr1, scr2); // Update (address + 4) with 32-bit MSB result.
  338. releaseScratch(scr2);
  339. releaseScratch(scr1);
  340. }
  341. void sub32(TrustedImm32 imm, RegisterID dest)
  342. {
  343. if (!imm.m_value)
  344. return;
  345. if (m_assembler.isImmediate(-imm.m_value)) {
  346. m_assembler.addlImm8r(-imm.m_value, dest);
  347. return;
  348. }
  349. RegisterID scr = claimScratch();
  350. m_assembler.loadConstant(imm.m_value, scr);
  351. m_assembler.sublRegReg(scr, dest);
  352. releaseScratch(scr);
  353. }
  354. void sub32(Address src, RegisterID dest)
  355. {
  356. RegisterID scr = claimScratch();
  357. load32(src, scr);
  358. m_assembler.sublRegReg(scr, dest);
  359. releaseScratch(scr);
  360. }
  361. void xor32(RegisterID src, RegisterID dest)
  362. {
  363. m_assembler.xorlRegReg(src, dest);
  364. }
  365. void xor32(TrustedImm32 imm, RegisterID srcDest)
  366. {
  367. if (imm.m_value == -1) {
  368. m_assembler.notlReg(srcDest, srcDest);
  369. return;
  370. }
  371. if ((srcDest != SH4Registers::r0) || (imm.m_value > 255) || (imm.m_value < 0)) {
  372. RegisterID scr = claimScratch();
  373. m_assembler.loadConstant(imm.m_value, scr);
  374. m_assembler.xorlRegReg(scr, srcDest);
  375. releaseScratch(scr);
  376. return;
  377. }
  378. m_assembler.xorlImm8r(imm.m_value, srcDest);
  379. }
  380. void compare32(int imm, RegisterID dst, RelationalCondition cond)
  381. {
  382. if (((cond == Equal) || (cond == NotEqual)) && (dst == SH4Registers::r0) && m_assembler.isImmediate(imm)) {
  383. m_assembler.cmpEqImmR0(imm, dst);
  384. return;
  385. }
  386. RegisterID scr = claimScratch();
  387. m_assembler.loadConstant(imm, scr);
  388. m_assembler.cmplRegReg(scr, dst, SH4Condition(cond));
  389. releaseScratch(scr);
  390. }
  391. void compare32(int offset, RegisterID base, RegisterID left, RelationalCondition cond)
  392. {
  393. RegisterID scr = claimScratch();
  394. if (!offset) {
  395. m_assembler.movlMemReg(base, scr);
  396. m_assembler.cmplRegReg(scr, left, SH4Condition(cond));
  397. releaseScratch(scr);
  398. return;
  399. }
  400. if ((offset < 0) || (offset >= 64)) {
  401. m_assembler.loadConstant(offset, scr);
  402. m_assembler.addlRegReg(base, scr);
  403. m_assembler.movlMemReg(scr, scr);
  404. m_assembler.cmplRegReg(scr, left, SH4Condition(cond));
  405. releaseScratch(scr);
  406. return;
  407. }
  408. m_assembler.movlMemReg(offset >> 2, base, scr);
  409. m_assembler.cmplRegReg(scr, left, SH4Condition(cond));
  410. releaseScratch(scr);
  411. }
  412. void testImm(int imm, int offset, RegisterID base)
  413. {
  414. RegisterID scr = claimScratch();
  415. RegisterID scr1 = claimScratch();
  416. if ((offset < 0) || (offset >= 64)) {
  417. m_assembler.loadConstant(offset, scr);
  418. m_assembler.addlRegReg(base, scr);
  419. m_assembler.movlMemReg(scr, scr);
  420. } else if (offset)
  421. m_assembler.movlMemReg(offset >> 2, base, scr);
  422. else
  423. m_assembler.movlMemReg(base, scr);
  424. if (m_assembler.isImmediate(imm))
  425. m_assembler.movImm8(imm, scr1);
  426. else
  427. m_assembler.loadConstant(imm, scr1);
  428. m_assembler.testlRegReg(scr, scr1);
  429. releaseScratch(scr);
  430. releaseScratch(scr1);
  431. }
  432. void testlImm(int imm, RegisterID dst)
  433. {
  434. if ((dst == SH4Registers::r0) && (imm <= 255) && (imm >= 0)) {
  435. m_assembler.testlImm8r(imm, dst);
  436. return;
  437. }
  438. RegisterID scr = claimScratch();
  439. m_assembler.loadConstant(imm, scr);
  440. m_assembler.testlRegReg(scr, dst);
  441. releaseScratch(scr);
  442. }
  443. void compare32(RegisterID right, int offset, RegisterID base, RelationalCondition cond)
  444. {
  445. if (!offset) {
  446. RegisterID scr = claimScratch();
  447. m_assembler.movlMemReg(base, scr);
  448. m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
  449. releaseScratch(scr);
  450. return;
  451. }
  452. if ((offset < 0) || (offset >= 64)) {
  453. RegisterID scr = claimScratch();
  454. m_assembler.loadConstant(offset, scr);
  455. m_assembler.addlRegReg(base, scr);
  456. m_assembler.movlMemReg(scr, scr);
  457. m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
  458. releaseScratch(scr);
  459. return;
  460. }
  461. RegisterID scr = claimScratch();
  462. m_assembler.movlMemReg(offset >> 2, base, scr);
  463. m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
  464. releaseScratch(scr);
  465. }
  466. void compare32(int imm, int offset, RegisterID base, RelationalCondition cond)
  467. {
  468. if (!offset) {
  469. RegisterID scr = claimScratch();
  470. RegisterID scr1 = claimScratch();
  471. m_assembler.movlMemReg(base, scr);
  472. m_assembler.loadConstant(imm, scr1);
  473. m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
  474. releaseScratch(scr1);
  475. releaseScratch(scr);
  476. return;
  477. }
  478. if ((offset < 0) || (offset >= 64)) {
  479. RegisterID scr = claimScratch();
  480. RegisterID scr1 = claimScratch();
  481. m_assembler.loadConstant(offset, scr);
  482. m_assembler.addlRegReg(base, scr);
  483. m_assembler.movlMemReg(scr, scr);
  484. m_assembler.loadConstant(imm, scr1);
  485. m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
  486. releaseScratch(scr1);
  487. releaseScratch(scr);
  488. return;
  489. }
  490. RegisterID scr = claimScratch();
  491. RegisterID scr1 = claimScratch();
  492. m_assembler.movlMemReg(offset >> 2, base, scr);
  493. m_assembler.loadConstant(imm, scr1);
  494. m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
  495. releaseScratch(scr1);
  496. releaseScratch(scr);
  497. }
  498. // Memory access operation
  499. void load32(ImplicitAddress address, RegisterID dest)
  500. {
  501. load32(address.base, address.offset, dest);
  502. }
  503. void load8(ImplicitAddress address, RegisterID dest)
  504. {
  505. load8(address.base, address.offset, dest);
  506. }
  507. void load8(BaseIndex address, RegisterID dest)
  508. {
  509. RegisterID scr = claimScratch();
  510. move(address.index, scr);
  511. lshift32(TrustedImm32(address.scale), scr);
  512. add32(address.base, scr);
  513. load8(scr, address.offset, dest);
  514. releaseScratch(scr);
  515. }
  516. void load8PostInc(RegisterID base, RegisterID dest)
  517. {
  518. m_assembler.movbMemRegIn(base, dest);
  519. m_assembler.extub(dest, dest);
  520. }
  521. void load8Signed(BaseIndex address, RegisterID dest)
  522. {
  523. RegisterID scr = claimScratch();
  524. move(address.index, scr);
  525. lshift32(TrustedImm32(address.scale), scr);
  526. add32(address.base, scr);
  527. load8Signed(scr, address.offset, dest);
  528. releaseScratch(scr);
  529. }
  530. void load32(BaseIndex address, RegisterID dest)
  531. {
  532. RegisterID scr = claimScratch();
  533. move(address.index, scr);
  534. lshift32(TrustedImm32(address.scale), scr);
  535. add32(address.base, scr);
  536. load32(scr, address.offset, dest);
  537. releaseScratch(scr);
  538. }
  539. void load32(const void* address, RegisterID dest)
  540. {
  541. m_assembler.loadConstant(reinterpret_cast<uint32_t>(const_cast<void*>(address)), dest);
  542. m_assembler.movlMemReg(dest, dest);
  543. }
  544. void load32(RegisterID base, int offset, RegisterID dest)
  545. {
  546. if (!offset) {
  547. m_assembler.movlMemReg(base, dest);
  548. return;
  549. }
  550. if ((offset >= 0) && (offset < 64)) {
  551. m_assembler.movlMemReg(offset >> 2, base, dest);
  552. return;
  553. }
  554. RegisterID scr = (dest == base) ? claimScratch() : dest;
  555. m_assembler.loadConstant(offset, scr);
  556. if (base == SH4Registers::r0)
  557. m_assembler.movlR0mr(scr, dest);
  558. else {
  559. m_assembler.addlRegReg(base, scr);
  560. m_assembler.movlMemReg(scr, dest);
  561. }
  562. if (dest == base)
  563. releaseScratch(scr);
  564. }
  565. void load8Signed(RegisterID base, int offset, RegisterID dest)
  566. {
  567. if (!offset) {
  568. m_assembler.movbMemReg(base, dest);
  569. return;
  570. }
  571. if ((offset > 0) && (offset <= 15) && (dest == SH4Registers::r0)) {
  572. m_assembler.movbMemReg(offset, base, dest);
  573. return;
  574. }
  575. RegisterID scr = (dest == base) ? claimScratch() : dest;
  576. m_assembler.loadConstant(offset, scr);
  577. if (base == SH4Registers::r0)
  578. m_assembler.movbR0mr(scr, dest);
  579. else {
  580. m_assembler.addlRegReg(base, scr);
  581. m_assembler.movbMemReg(scr, dest);
  582. }
  583. if (dest == base)
  584. releaseScratch(scr);
  585. }
  586. void load8(RegisterID base, int offset, RegisterID dest)
  587. {
  588. load8Signed(base, offset, dest);
  589. m_assembler.extub(dest, dest);
  590. }
  591. void load32(RegisterID src, RegisterID dst)
  592. {
  593. m_assembler.movlMemReg(src, dst);
  594. }
  595. void load16(ImplicitAddress address, RegisterID dest)
  596. {
  597. if (!address.offset) {
  598. m_assembler.movwMemReg(address.base, dest);
  599. m_assembler.extuw(dest, dest);
  600. return;
  601. }
  602. if ((address.offset > 0) && (address.offset <= 30) && (dest == SH4Registers::r0)) {
  603. m_assembler.movwMemReg(address.offset >> 1, address.base, dest);
  604. m_assembler.extuw(dest, dest);
  605. return;
  606. }
  607. RegisterID scr = (dest == address.base) ? claimScratch() : dest;
  608. m_assembler.loadConstant(address.offset, scr);
  609. if (address.base == SH4Registers::r0)
  610. m_assembler.movwR0mr(scr, dest);
  611. else {
  612. m_assembler.addlRegReg(address.base, scr);
  613. m_assembler.movwMemReg(scr, dest);
  614. }
  615. m_assembler.extuw(dest, dest);
  616. if (dest == address.base)
  617. releaseScratch(scr);
  618. }
  619. void load16Unaligned(BaseIndex address, RegisterID dest)
  620. {
  621. RegisterID scr = claimScratch();
  622. RegisterID scr1 = claimScratch();
  623. move(address.index, scr);
  624. lshift32(TrustedImm32(address.scale), scr);
  625. if (address.offset)
  626. add32(TrustedImm32(address.offset), scr);
  627. add32(address.base, scr);
  628. load8PostInc(scr, scr1);
  629. load8(scr, dest);
  630. m_assembler.shllImm8r(8, dest);
  631. or32(scr1, dest);
  632. releaseScratch(scr);
  633. releaseScratch(scr1);
  634. }
  635. void load16(RegisterID src, RegisterID dest)
  636. {
  637. m_assembler.movwMemReg(src, dest);
  638. m_assembler.extuw(dest, dest);
  639. }
  640. void load16Signed(RegisterID src, RegisterID dest)
  641. {
  642. m_assembler.movwMemReg(src, dest);
  643. }
  644. void load16(BaseIndex address, RegisterID dest)
  645. {
  646. load16Signed(address, dest);
  647. m_assembler.extuw(dest, dest);
  648. }
  649. void load16PostInc(RegisterID base, RegisterID dest)
  650. {
  651. m_assembler.movwMemRegIn(base, dest);
  652. m_assembler.extuw(dest, dest);
  653. }
  654. void load16Signed(BaseIndex address, RegisterID dest)
  655. {
  656. RegisterID scr = claimScratch();
  657. move(address.index, scr);
  658. lshift32(TrustedImm32(address.scale), scr);
  659. if (address.offset)
  660. add32(TrustedImm32(address.offset), scr);
  661. if (address.base == SH4Registers::r0)
  662. m_assembler.movwR0mr(scr, dest);
  663. else {
  664. add32(address.base, scr);
  665. load16Signed(scr, dest);
  666. }
  667. releaseScratch(scr);
  668. }
  669. void store8(RegisterID src, BaseIndex address)
  670. {
  671. RegisterID scr = claimScratch();
  672. move(address.index, scr);
  673. lshift32(TrustedImm32(address.scale), scr);
  674. add32(TrustedImm32(address.offset), scr);
  675. if (address.base == SH4Registers::r0)
  676. m_assembler.movbRegMemr0(src, scr);
  677. else {
  678. add32(address.base, scr);
  679. m_assembler.movbRegMem(src, scr);
  680. }
  681. releaseScratch(scr);
  682. }
  683. void store16(RegisterID src, BaseIndex address)
  684. {
  685. RegisterID scr = claimScratch();
  686. move(address.index, scr);
  687. lshift32(TrustedImm32(address.scale), scr);
  688. add32(TrustedImm32(address.offset), scr);
  689. if (address.base == SH4Registers::r0)
  690. m_assembler.movwRegMemr0(src, scr);
  691. else {
  692. add32(address.base, scr);
  693. m_assembler.movwRegMem(src, scr);
  694. }
  695. releaseScratch(scr);
  696. }
  697. void store32(RegisterID src, ImplicitAddress address)
  698. {
  699. if (!address.offset) {
  700. m_assembler.movlRegMem(src, address.base);
  701. return;
  702. }
  703. if ((address.offset >= 0) && (address.offset < 64)) {
  704. m_assembler.movlRegMem(src, address.offset >> 2, address.base);
  705. return;
  706. }
  707. RegisterID scr = claimScratch();
  708. m_assembler.loadConstant(address.offset, scr);
  709. if (address.base == SH4Registers::r0)
  710. m_assembler.movlRegMemr0(src, scr);
  711. else {
  712. m_assembler.addlRegReg(address.base, scr);
  713. m_assembler.movlRegMem(src, scr);
  714. }
  715. releaseScratch(scr);
  716. }
  717. void store32(RegisterID src, RegisterID dst)
  718. {
  719. m_assembler.movlRegMem(src, dst);
  720. }
  721. void store32(TrustedImm32 imm, ImplicitAddress address)
  722. {
  723. RegisterID scr = claimScratch();
  724. m_assembler.loadConstant(imm.m_value, scr);
  725. store32(scr, address);
  726. releaseScratch(scr);
  727. }
  728. void store32(RegisterID src, BaseIndex address)
  729. {
  730. RegisterID scr = claimScratch();
  731. move(address.index, scr);
  732. lshift32(TrustedImm32(address.scale), scr);
  733. add32(address.base, scr);
  734. store32(src, Address(scr, address.offset));
  735. releaseScratch(scr);
  736. }
  737. void store32(TrustedImm32 imm, void* address)
  738. {
  739. RegisterID scr = claimScratch();
  740. RegisterID scr1 = claimScratch();
  741. m_assembler.loadConstant(imm.m_value, scr);
  742. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address), scr1);
  743. m_assembler.movlRegMem(scr, scr1);
  744. releaseScratch(scr);
  745. releaseScratch(scr1);
  746. }
  747. void store32(RegisterID src, void* address)
  748. {
  749. RegisterID scr = claimScratch();
  750. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address), scr);
  751. m_assembler.movlRegMem(src, scr);
  752. releaseScratch(scr);
  753. }
  754. DataLabel32 load32WithAddressOffsetPatch(Address address, RegisterID dest)
  755. {
  756. RegisterID scr = claimScratch();
  757. DataLabel32 label(this);
  758. m_assembler.loadConstantUnReusable(address.offset, scr);
  759. m_assembler.addlRegReg(address.base, scr);
  760. m_assembler.movlMemReg(scr, dest);
  761. releaseScratch(scr);
  762. return label;
  763. }
  764. DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
  765. {
  766. RegisterID scr = claimScratch();
  767. DataLabel32 label(this);
  768. m_assembler.loadConstantUnReusable(address.offset, scr);
  769. m_assembler.addlRegReg(address.base, scr);
  770. m_assembler.movlRegMem(src, scr);
  771. releaseScratch(scr);
  772. return label;
  773. }
  774. DataLabelCompact load32WithCompactAddressOffsetPatch(Address address, RegisterID dest)
  775. {
  776. DataLabelCompact dataLabel(this);
  777. ASSERT(address.offset <= MaximumCompactPtrAlignedAddressOffset);
  778. ASSERT(address.offset >= 0);
  779. m_assembler.movlMemRegCompact(address.offset >> 2, address.base, dest);
  780. return dataLabel;
  781. }
  782. ConvertibleLoadLabel convertibleLoadPtr(Address address, RegisterID dest)
  783. {
  784. ConvertibleLoadLabel result(this);
  785. RegisterID scr = claimScratch();
  786. m_assembler.movImm8(address.offset, scr);
  787. m_assembler.addlRegReg(address.base, scr);
  788. m_assembler.movlMemReg(scr, dest);
  789. releaseScratch(scr);
  790. return result;
  791. }
  792. // Floating-point operations
  793. static bool supportsFloatingPoint() { return true; }
  794. static bool supportsFloatingPointTruncate() { return true; }
  795. static bool supportsFloatingPointSqrt() { return true; }
  796. static bool supportsFloatingPointAbs() { return true; }
  797. void moveDoubleToInts(FPRegisterID src, RegisterID dest1, RegisterID dest2)
  798. {
  799. m_assembler.fldsfpul((FPRegisterID)(src + 1));
  800. m_assembler.stsfpulReg(dest1);
  801. m_assembler.fldsfpul(src);
  802. m_assembler.stsfpulReg(dest2);
  803. }
  804. void moveIntsToDouble(RegisterID src1, RegisterID src2, FPRegisterID dest, FPRegisterID scratch)
  805. {
  806. UNUSED_PARAM(scratch);
  807. m_assembler.ldsrmfpul(src1);
  808. m_assembler.fstsfpul((FPRegisterID)(dest + 1));
  809. m_assembler.ldsrmfpul(src2);
  810. m_assembler.fstsfpul(dest);
  811. }
  812. void moveDouble(FPRegisterID src, FPRegisterID dest)
  813. {
  814. if (src != dest) {
  815. m_assembler.fmovsRegReg((FPRegisterID)(src + 1), (FPRegisterID)(dest + 1));
  816. m_assembler.fmovsRegReg(src, dest);
  817. }
  818. }
  819. void loadFloat(BaseIndex address, FPRegisterID dest)
  820. {
  821. RegisterID scr = claimScratch();
  822. move(address.index, scr);
  823. lshift32(TrustedImm32(address.scale), scr);
  824. add32(address.base, scr);
  825. if (address.offset)
  826. add32(TrustedImm32(address.offset), scr);
  827. m_assembler.fmovsReadrm(scr, dest);
  828. releaseScratch(scr);
  829. }
  830. void loadDouble(BaseIndex address, FPRegisterID dest)
  831. {
  832. RegisterID scr = claimScratch();
  833. move(address.index, scr);
  834. lshift32(TrustedImm32(address.scale), scr);
  835. add32(address.base, scr);
  836. if (address.offset)
  837. add32(TrustedImm32(address.offset), scr);
  838. m_assembler.fmovsReadrminc(scr, (FPRegisterID)(dest + 1));
  839. m_assembler.fmovsReadrm(scr, dest);
  840. releaseScratch(scr);
  841. }
  842. void loadDouble(ImplicitAddress address, FPRegisterID dest)
  843. {
  844. RegisterID scr = claimScratch();
  845. m_assembler.loadConstant(address.offset, scr);
  846. if (address.base == SH4Registers::r0) {
  847. m_assembler.fmovsReadr0r(scr, (FPRegisterID)(dest + 1));
  848. m_assembler.addlImm8r(4, scr);
  849. m_assembler.fmovsReadr0r(scr, dest);
  850. releaseScratch(scr);
  851. return;
  852. }
  853. m_assembler.addlRegReg(address.base, scr);
  854. m_assembler.fmovsReadrminc(scr, (FPRegisterID)(dest + 1));
  855. m_assembler.fmovsReadrm(scr, dest);
  856. releaseScratch(scr);
  857. }
  858. void loadDouble(const void* address, FPRegisterID dest)
  859. {
  860. RegisterID scr = claimScratch();
  861. m_assembler.loadConstant(reinterpret_cast<uint32_t>(address), scr);
  862. m_assembler.fmovsReadrminc(scr, (FPRegisterID)(dest + 1));
  863. m_assembler.fmovsReadrm(scr, dest);
  864. releaseScratch(scr);
  865. }
  866. void storeFloat(FPRegisterID src, BaseIndex address)
  867. {
  868. RegisterID scr = claimScratch();
  869. move(address.index, scr);
  870. lshift32(TrustedImm32(address.scale), scr);
  871. add32(address.base, scr);
  872. if (address.offset)
  873. add32(TrustedImm32(address.offset), scr);
  874. m_assembler.fmovsWriterm(src, scr);
  875. releaseScratch(scr);
  876. }
  877. void storeDouble(FPRegisterID src, ImplicitAddress address)
  878. {
  879. RegisterID scr = claimScratch();
  880. m_assembler.loadConstant(address.offset + 8, scr);
  881. m_assembler.addlRegReg(address.base, scr);
  882. m_assembler.fmovsWriterndec(src, scr);
  883. m_assembler.fmovsWriterndec((FPRegisterID)(src + 1), scr);
  884. releaseScratch(scr);
  885. }
  886. void storeDouble(FPRegisterID src, BaseIndex address)
  887. {
  888. RegisterID scr = claimScratch();
  889. move(address.index, scr);
  890. lshift32(TrustedImm32(address.scale), scr);
  891. add32(address.base, scr);
  892. add32(TrustedImm32(address.offset + 8), scr);
  893. m_assembler.fmovsWriterndec(src, scr);
  894. m_assembler.fmovsWriterndec((FPRegisterID)(src + 1), scr);
  895. releaseScratch(scr);
  896. }
  897. void addDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
  898. {
  899. if (op1 == dest)
  900. addDouble(op2, dest);
  901. else {
  902. moveDouble(op2, dest);
  903. addDouble(op1, dest);
  904. }
  905. }
  906. void addDouble(FPRegisterID src, FPRegisterID dest)
  907. {
  908. m_assembler.daddRegReg(src, dest);
  909. }
  910. void addDouble(AbsoluteAddress address, FPRegisterID dest)
  911. {
  912. loadDouble(address.m_ptr, fscratch);
  913. addDouble(fscratch, dest);
  914. }
  915. void addDouble(Address address, FPRegisterID dest)
  916. {
  917. loadDouble(address, fscratch);
  918. addDouble(fscratch, dest);
  919. }
  920. void subDouble(FPRegisterID src, FPRegisterID dest)
  921. {
  922. m_assembler.dsubRegReg(src, dest);
  923. }
  924. void subDouble(Address address, FPRegisterID dest)
  925. {
  926. loadDouble(address, fscratch);
  927. subDouble(fscratch, dest);
  928. }
  929. void mulDouble(FPRegisterID src, FPRegisterID dest)
  930. {
  931. m_assembler.dmulRegReg(src, dest);
  932. }
  933. void mulDouble(Address address, FPRegisterID dest)
  934. {
  935. loadDouble(address, fscratch);
  936. mulDouble(fscratch, dest);
  937. }
  938. void divDouble(FPRegisterID src, FPRegisterID dest)
  939. {
  940. m_assembler.ddivRegReg(src, dest);
  941. }
  942. void convertFloatToDouble(FPRegisterID src, FPRegisterID dst)
  943. {
  944. m_assembler.fldsfpul(src);
  945. m_assembler.dcnvsd(dst);
  946. }
  947. void convertDoubleToFloat(FPRegisterID src, FPRegisterID dst)
  948. {
  949. m_assembler.dcnvds(src);
  950. m_assembler.fstsfpul(dst);
  951. }
  952. void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
  953. {
  954. m_assembler.ldsrmfpul(src);
  955. m_assembler.floatfpulDreg(dest);
  956. }
  957. void convertInt32ToDouble(AbsoluteAddress src, FPRegisterID dest)
  958. {
  959. RegisterID scr = claimScratch();
  960. load32(src.m_ptr, scr);
  961. convertInt32ToDouble(scr, dest);
  962. releaseScratch(scr);
  963. }
  964. void convertInt32ToDouble(Address src, FPRegisterID dest)
  965. {
  966. RegisterID scr = claimScratch();
  967. load32(src, scr);
  968. convertInt32ToDouble(scr, dest);
  969. releaseScratch(scr);
  970. }
  971. void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest)
  972. {
  973. RegisterID scr = claimScratch();
  974. RegisterID scr1 = claimScratch();
  975. Jump m_jump;
  976. JumpList end;
  977. if (dest != SH4Registers::r0)
  978. move(SH4Registers::r0, scr1);
  979. move(address.index, scr);
  980. lshift32(TrustedImm32(address.scale), scr);
  981. add32(address.base, scr);
  982. if (address.offset)
  983. add32(TrustedImm32(address.offset), scr);
  984. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 58, sizeof(uint32_t));
  985. move(scr, SH4Registers::r0);
  986. m_assembler.testlImm8r(0x3, SH4Registers::r0);
  987. m_jump = Jump(m_assembler.jne(), SH4Assembler::JumpNear);
  988. if (dest != SH4Registers::r0)
  989. move(scr1, SH4Registers::r0);
  990. load32(scr, dest);
  991. end.append(Jump(m_assembler.bra(), SH4Assembler::JumpNear));
  992. m_assembler.nop();
  993. m_jump.link(this);
  994. m_assembler.testlImm8r(0x1, SH4Registers::r0);
  995. if (dest != SH4Registers::r0)
  996. move(scr1, SH4Registers::r0);
  997. m_jump = Jump(m_assembler.jne(), SH4Assembler::JumpNear);
  998. load16PostInc(scr, scr1);
  999. load16(scr, dest);
  1000. m_assembler.shllImm8r(16, dest);
  1001. or32(scr1, dest);
  1002. end.append(Jump(m_assembler.bra(), SH4Assembler::JumpNear));
  1003. m_assembler.nop();
  1004. m_jump.link(this);
  1005. load8PostInc(scr, scr1);
  1006. load16PostInc(scr, dest);
  1007. m_assembler.shllImm8r(8, dest);
  1008. or32(dest, scr1);
  1009. load8(scr, dest);
  1010. m_assembler.shllImm8r(8, dest);
  1011. m_assembler.shllImm8r(16, dest);
  1012. or32(scr1, dest);
  1013. end.link(this);
  1014. releaseScratch(scr);
  1015. releaseScratch(scr1);
  1016. }
  1017. Jump branch32WithUnalignedHalfWords(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
  1018. {
  1019. RegisterID scr = scratchReg3;
  1020. load32WithUnalignedHalfWords(left, scr);
  1021. if (((cond == Equal) || (cond == NotEqual)) && !right.m_value)
  1022. m_assembler.testlRegReg(scr, scr);
  1023. else
  1024. compare32(right.m_value, scr, cond);
  1025. if (cond == NotEqual)
  1026. return branchFalse();
  1027. return branchTrue();
  1028. }
  1029. Jump branchDoubleNonZero(FPRegisterID reg, FPRegisterID scratch)
  1030. {
  1031. m_assembler.movImm8(0, scratchReg3);
  1032. convertInt32ToDouble(scratchReg3, scratch);
  1033. return branchDouble(DoubleNotEqual, reg, scratch);
  1034. }
  1035. Jump branchDoubleZeroOrNaN(FPRegisterID reg, FPRegisterID scratch)
  1036. {
  1037. m_assembler.movImm8(0, scratchReg3);
  1038. convertInt32ToDouble(scratchReg3, scratch);
  1039. return branchDouble(DoubleEqualOrUnordered, reg, scratch);
  1040. }
  1041. Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
  1042. {
  1043. if (cond == DoubleEqual) {
  1044. m_assembler.dcmppeq(right, left);
  1045. return branchTrue();
  1046. }
  1047. if (cond == DoubleNotEqual) {
  1048. JumpList end;
  1049. m_assembler.dcmppeq(left, left);
  1050. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
  1051. end.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1052. m_assembler.dcmppeq(right, right);
  1053. end.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1054. m_assembler.dcmppeq(right, left);
  1055. Jump m_jump = branchFalse();
  1056. end.link(this);
  1057. return m_jump;
  1058. }
  1059. if (cond == DoubleGreaterThan) {
  1060. m_assembler.dcmppgt(right, left);
  1061. return branchTrue();
  1062. }
  1063. if (cond == DoubleGreaterThanOrEqual) {
  1064. JumpList end;
  1065. m_assembler.dcmppeq(left, left);
  1066. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
  1067. end.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1068. m_assembler.dcmppeq(right, right);
  1069. end.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1070. m_assembler.dcmppgt(left, right);
  1071. Jump m_jump = branchFalse();
  1072. end.link(this);
  1073. return m_jump;
  1074. }
  1075. if (cond == DoubleLessThan) {
  1076. m_assembler.dcmppgt(left, right);
  1077. return branchTrue();
  1078. }
  1079. if (cond == DoubleLessThanOrEqual) {
  1080. JumpList end;
  1081. m_assembler.dcmppeq(left, left);
  1082. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
  1083. end.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1084. m_assembler.dcmppeq(right, right);
  1085. end.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1086. m_assembler.dcmppgt(right, left);
  1087. Jump m_jump = branchFalse();
  1088. end.link(this);
  1089. return m_jump;
  1090. }
  1091. if (cond == DoubleEqualOrUnordered) {
  1092. JumpList takeBranch;
  1093. m_assembler.dcmppeq(left, left);
  1094. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
  1095. takeBranch.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1096. m_assembler.dcmppeq(right, right);
  1097. takeBranch.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1098. m_assembler.dcmppeq(left, right);
  1099. Jump m_jump = Jump(m_assembler.je());
  1100. takeBranch.link(this);
  1101. m_assembler.extraInstrForBranch(scratchReg3);
  1102. return m_jump;
  1103. }
  1104. if (cond == DoubleGreaterThanOrUnordered) {
  1105. JumpList takeBranch;
  1106. m_assembler.dcmppeq(left, left);
  1107. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
  1108. takeBranch.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1109. m_assembler.dcmppeq(right, right);
  1110. takeBranch.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1111. m_assembler.dcmppgt(right, left);
  1112. Jump m_jump = Jump(m_assembler.je());
  1113. takeBranch.link(this);
  1114. m_assembler.extraInstrForBranch(scratchReg3);
  1115. return m_jump;
  1116. }
  1117. if (cond == DoubleGreaterThanOrEqualOrUnordered) {
  1118. m_assembler.dcmppgt(left, right);
  1119. return branchFalse();
  1120. }
  1121. if (cond == DoubleLessThanOrUnordered) {
  1122. JumpList takeBranch;
  1123. m_assembler.dcmppeq(left, left);
  1124. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
  1125. takeBranch.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1126. m_assembler.dcmppeq(right, right);
  1127. takeBranch.append(Jump(m_assembler.jne(), SH4Assembler::JumpNear));
  1128. m_assembler.dcmppgt(left, right);
  1129. Jump m_jump = Jump(m_assembler.je());
  1130. takeBranch.link(this);
  1131. m_assembler.extraInstrForBranch(scratchReg3);
  1132. return m_jump;
  1133. }
  1134. if (cond == DoubleLessThanOrEqualOrUnordered) {
  1135. m_assembler.dcmppgt(right, left);
  1136. return branchFalse();
  1137. }
  1138. ASSERT(cond == DoubleNotEqualOrUnordered);
  1139. m_assembler.dcmppeq(right, left);
  1140. return branchFalse();
  1141. }
  1142. Jump branchTrue()
  1143. {
  1144. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 6, sizeof(uint32_t));
  1145. Jump m_jump = Jump(m_assembler.je());
  1146. m_assembler.extraInstrForBranch(scratchReg3);
  1147. return m_jump;
  1148. }
  1149. Jump branchFalse()
  1150. {
  1151. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 6, sizeof(uint32_t));
  1152. Jump m_jump = Jump(m_assembler.jne());
  1153. m_assembler.extraInstrForBranch(scratchReg3);
  1154. return m_jump;
  1155. }
  1156. Jump branch32(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
  1157. {
  1158. RegisterID scr = claimScratch();
  1159. move(left.index, scr);
  1160. lshift32(TrustedImm32(left.scale), scr);
  1161. add32(left.base, scr);
  1162. load32(scr, left.offset, scr);
  1163. compare32(right.m_value, scr, cond);
  1164. releaseScratch(scr);
  1165. if (cond == NotEqual)
  1166. return branchFalse();
  1167. return branchTrue();
  1168. }
  1169. void sqrtDouble(FPRegisterID src, FPRegisterID dest)
  1170. {
  1171. moveDouble(src, dest);
  1172. m_assembler.dsqrt(dest);
  1173. }
  1174. void absDouble(FPRegisterID src, FPRegisterID dest)
  1175. {
  1176. moveDouble(src, dest);
  1177. m_assembler.dabs(dest);
  1178. }
  1179. Jump branchTest8(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
  1180. {
  1181. RegisterID addressTempRegister = claimScratch();
  1182. load8(address, addressTempRegister);
  1183. Jump jmp = branchTest32(cond, addressTempRegister, mask);
  1184. releaseScratch(addressTempRegister);
  1185. return jmp;
  1186. }
  1187. Jump branchTest8(ResultCondition cond, AbsoluteAddress address, TrustedImm32 mask = TrustedImm32(-1))
  1188. {
  1189. RegisterID addressTempRegister = claimScratch();
  1190. move(TrustedImmPtr(address.m_ptr), addressTempRegister);
  1191. load8(Address(addressTempRegister), addressTempRegister);
  1192. Jump jmp = branchTest32(cond, addressTempRegister, mask);
  1193. releaseScratch(addressTempRegister);
  1194. return jmp;
  1195. }
  1196. void signExtend32ToPtr(RegisterID src, RegisterID dest)
  1197. {
  1198. if (src != dest)
  1199. move(src, dest);
  1200. }
  1201. Jump branch8(RelationalCondition cond, Address left, TrustedImm32 right)
  1202. {
  1203. RegisterID addressTempRegister = claimScratch();
  1204. load8(left, addressTempRegister);
  1205. Jump jmp = branch32(cond, addressTempRegister, right);
  1206. releaseScratch(addressTempRegister);
  1207. return jmp;
  1208. }
  1209. void compare8(RelationalCondition cond, Address left, TrustedImm32 right, RegisterID dest)
  1210. {
  1211. RegisterID addressTempRegister = claimScratch();
  1212. load8(left, addressTempRegister);
  1213. compare32(cond, addressTempRegister, right, dest);
  1214. releaseScratch(addressTempRegister);
  1215. }
  1216. enum BranchTruncateType { BranchIfTruncateFailed, BranchIfTruncateSuccessful };
  1217. Jump branchTruncateDoubleToInt32(FPRegisterID src, RegisterID dest, BranchTruncateType branchType = BranchIfTruncateFailed)
  1218. {
  1219. m_assembler.ftrcdrmfpul(src);
  1220. m_assembler.stsfpulReg(dest);
  1221. m_assembler.loadConstant(0x7fffffff, scratchReg3);
  1222. m_assembler.cmplRegReg(dest, scratchReg3, SH4Condition(Equal));
  1223. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 14, sizeof(uint32_t));
  1224. m_assembler.branch(BT_OPCODE, 2);
  1225. m_assembler.addlImm8r(1, scratchReg3);
  1226. m_assembler.cmplRegReg(dest, scratchReg3, SH4Condition(Equal));
  1227. return (branchType == BranchIfTruncateFailed) ? branchTrue() : branchFalse();
  1228. }
  1229. // Stack manipulation operations
  1230. void pop(RegisterID dest)
  1231. {
  1232. m_assembler.popReg(dest);
  1233. }
  1234. void push(RegisterID src)
  1235. {
  1236. m_assembler.pushReg(src);
  1237. }
  1238. void push(TrustedImm32 imm)
  1239. {
  1240. RegisterID scr = claimScratch();
  1241. m_assembler.loadConstant(imm.m_value, scr);
  1242. push(scr);
  1243. releaseScratch(scr);
  1244. }
  1245. // Register move operations
  1246. void move(TrustedImm32 imm, RegisterID dest)
  1247. {
  1248. m_assembler.loadConstant(imm.m_value, dest);
  1249. }
  1250. DataLabelPtr moveWithPatch(TrustedImmPtr initialValue, RegisterID dest)
  1251. {
  1252. m_assembler.ensureSpace(m_assembler.maxInstructionSize, sizeof(uint32_t));
  1253. DataLabelPtr dataLabel(this);
  1254. m_assembler.loadConstantUnReusable(reinterpret_cast<uint32_t>(initialValue.m_value), dest);
  1255. return dataLabel;
  1256. }
  1257. void move(RegisterID src, RegisterID dest)
  1258. {
  1259. if (src != dest)
  1260. m_assembler.movlRegReg(src, dest);
  1261. }
  1262. void move(TrustedImmPtr imm, RegisterID dest)
  1263. {
  1264. m_assembler.loadConstant(imm.asIntptr(), dest);
  1265. }
  1266. void compare32(RelationalCondition cond, RegisterID left, RegisterID right, RegisterID dest)
  1267. {
  1268. m_assembler.cmplRegReg(right, left, SH4Condition(cond));
  1269. if (cond != NotEqual) {
  1270. m_assembler.movt(dest);
  1271. return;
  1272. }
  1273. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 4);
  1274. m_assembler.movImm8(0, dest);
  1275. m_assembler.branch(BT_OPCODE, 0);
  1276. m_assembler.movImm8(1, dest);
  1277. }
  1278. void compare32(RelationalCondition cond, RegisterID left, TrustedImm32 right, RegisterID dest)
  1279. {
  1280. if (left != dest) {
  1281. move(right, dest);
  1282. compare32(cond, left, dest, dest);
  1283. return;
  1284. }
  1285. RegisterID scr = claimScratch();
  1286. move(right, scr);
  1287. compare32(cond, left, scr, dest);
  1288. releaseScratch(scr);
  1289. }
  1290. void test8(ResultCondition cond, Address address, TrustedImm32 mask, RegisterID dest)
  1291. {
  1292. ASSERT((cond == Zero) || (cond == NonZero));
  1293. load8(address, dest);
  1294. if (mask.m_value == -1)
  1295. compare32(0, dest, static_cast<RelationalCondition>(cond));
  1296. else
  1297. testlImm(mask.m_value, dest);
  1298. if (cond != NonZero) {
  1299. m_assembler.movt(dest);
  1300. return;
  1301. }
  1302. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 4);
  1303. m_assembler.movImm8(0, dest);
  1304. m_assembler.branch(BT_OPCODE, 0);
  1305. m_assembler.movImm8(1, dest);
  1306. }
  1307. void loadPtrLinkReg(ImplicitAddress address)
  1308. {
  1309. RegisterID scr = claimScratch();
  1310. load32(address, scr);
  1311. m_assembler.ldspr(scr);
  1312. releaseScratch(scr);
  1313. }
  1314. Jump branch32(RelationalCondition cond, RegisterID left, RegisterID right)
  1315. {
  1316. m_assembler.cmplRegReg(right, left, SH4Condition(cond));
  1317. /* BT label => BF off
  1318. nop LDR reg
  1319. nop braf @reg
  1320. nop nop
  1321. */
  1322. if (cond == NotEqual)
  1323. return branchFalse();
  1324. return branchTrue();
  1325. }
  1326. Jump branch32(RelationalCondition cond, RegisterID left, TrustedImm32 right)
  1327. {
  1328. if (((cond == Equal) || (cond == NotEqual)) && !right.m_value)
  1329. m_assembler.testlRegReg(left, left);
  1330. else
  1331. compare32(right.m_value, left, cond);
  1332. if (cond == NotEqual)
  1333. return branchFalse();
  1334. return branchTrue();
  1335. }
  1336. Jump branch32(RelationalCondition cond, RegisterID left, Address right)
  1337. {
  1338. compare32(right.offset, right.base, left, cond);
  1339. if (cond == NotEqual)
  1340. return branchFalse();
  1341. return branchTrue();
  1342. }
  1343. Jump branch32(RelationalCondition cond, Address left, RegisterID right)
  1344. {
  1345. compare32(right, left.offset, left.base, cond);
  1346. if (cond == NotEqual)
  1347. return branchFalse();
  1348. return branchTrue();
  1349. }
  1350. Jump branch32(RelationalCondition cond, Address left, TrustedImm32 right)
  1351. {
  1352. compare32(right.m_value, left.offset, left.base, cond);
  1353. if (cond == NotEqual)
  1354. return branchFalse();
  1355. return branchTrue();
  1356. }
  1357. Jump branch32(RelationalCondition cond, AbsoluteAddress left, RegisterID right)
  1358. {
  1359. RegisterID scr = claimScratch();
  1360. load32(left.m_ptr, scr);
  1361. m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
  1362. releaseScratch(scr);
  1363. if (cond == NotEqual)
  1364. return branchFalse();
  1365. return branchTrue();
  1366. }
  1367. Jump branch32(RelationalCondition cond, AbsoluteAddress left, TrustedImm32 right)
  1368. {
  1369. RegisterID addressTempRegister = claimScratch();
  1370. m_assembler.loadConstant(reinterpret_cast<uint32_t>(left.m_ptr), addressTempRegister);
  1371. m_assembler.movlMemReg(addressTempRegister, addressTempRegister);
  1372. compare32(right.m_value, addressTempRegister, cond);
  1373. releaseScratch(addressTempRegister);
  1374. if (cond == NotEqual)
  1375. return branchFalse();
  1376. return branchTrue();
  1377. }
  1378. Jump branch8(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
  1379. {
  1380. ASSERT(!(right.m_value & 0xFFFFFF00));
  1381. RegisterID scr = claimScratch();
  1382. move(left.index, scr);
  1383. lshift32(TrustedImm32(left.scale), scr);
  1384. if (left.offset)
  1385. add32(TrustedImm32(left.offset), scr);
  1386. add32(left.base, scr);
  1387. load8(scr, scr);
  1388. RegisterID scr1 = claimScratch();
  1389. m_assembler.loadConstant(right.m_value, scr1);
  1390. releaseScratch(scr);
  1391. releaseScratch(scr1);
  1392. return branch32(cond, scr, scr1);
  1393. }
  1394. Jump branchTest32(ResultCondition cond, RegisterID reg, RegisterID mask)
  1395. {
  1396. ASSERT((cond == Zero) || (cond == NonZero));
  1397. m_assembler.testlRegReg(reg, mask);
  1398. if (cond == NonZero) // NotEqual
  1399. return branchFalse();
  1400. return branchTrue();
  1401. }
  1402. Jump branchTest32(ResultCondition cond, RegisterID reg, TrustedImm32 mask = TrustedImm32(-1))
  1403. {
  1404. ASSERT((cond == Zero) || (cond == NonZero));
  1405. if (mask.m_value == -1)
  1406. m_assembler.testlRegReg(reg, reg);
  1407. else
  1408. testlImm(mask.m_value, reg);
  1409. if (cond == NonZero) // NotEqual
  1410. return branchFalse();
  1411. return branchTrue();
  1412. }
  1413. Jump branchTest32(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
  1414. {
  1415. ASSERT((cond == Zero) || (cond == NonZero));
  1416. if (mask.m_value == -1)
  1417. compare32(0, address.offset, address.base, static_cast<RelationalCondition>(cond));
  1418. else
  1419. testImm(mask.m_value, address.offset, address.base);
  1420. if (cond == NonZero) // NotEqual
  1421. return branchFalse();
  1422. return branchTrue();
  1423. }
  1424. Jump branchTest32(ResultCondition cond, BaseIndex address, TrustedImm32 mask = TrustedImm32(-1))
  1425. {
  1426. RegisterID scr = claimScratch();
  1427. move(address.index, scr);
  1428. lshift32(TrustedImm32(address.scale), scr);
  1429. add32(address.base, scr);
  1430. load32(scr, address.offset, scr);
  1431. if (mask.m_value == -1)
  1432. m_assembler.testlRegReg(scr, scr);
  1433. else
  1434. testlImm(mask.m_value, scr);
  1435. releaseScratch(scr);
  1436. if (cond == NonZero) // NotEqual
  1437. return branchFalse();
  1438. return branchTrue();
  1439. }
  1440. Jump jump()
  1441. {
  1442. return Jump(m_assembler.jmp());
  1443. }
  1444. void jump(RegisterID target)
  1445. {
  1446. m_assembler.jmpReg(target);
  1447. }
  1448. void jump(Address address)
  1449. {
  1450. RegisterID scr = claimScratch();
  1451. if ((address.offset < 0) || (address.offset >= 64)) {
  1452. m_assembler.loadConstant(address.offset, scr);
  1453. m_assembler.addlRegReg(address.base, scr);
  1454. m_assembler.movlMemReg(scr, scr);
  1455. } else if (address.offset)
  1456. m_assembler.movlMemReg(address.offset >> 2, address.base, scr);
  1457. else
  1458. m_assembler.movlMemReg(address.base, scr);
  1459. m_assembler.jmpReg(scr);
  1460. releaseScratch(scr);
  1461. }
  1462. // Arithmetic control flow operations
  1463. Jump branchAdd32(ResultCondition cond, RegisterID src, RegisterID dest)
  1464. {
  1465. ASSERT((cond == Overflow) || (cond == Signed) || (cond == PositiveOrZero) || (cond == Zero) || (cond == NonZero));
  1466. if (cond == Overflow) {
  1467. m_assembler.addvlRegReg(src, dest);
  1468. return branchTrue();
  1469. }
  1470. if (cond == Signed) {
  1471. m_assembler.addlRegReg(src, dest);
  1472. // Check if dest is negative
  1473. m_assembler.cmppz(dest);
  1474. return branchFalse();
  1475. }
  1476. if (cond == PositiveOrZero) {
  1477. m_assembler.addlRegReg(src, dest);
  1478. m_assembler.cmppz(dest);
  1479. return branchTrue();
  1480. }
  1481. m_assembler.addlRegReg(src, dest);
  1482. compare32(0, dest, Equal);
  1483. if (cond == NonZero) // NotEqual
  1484. return branchFalse();
  1485. return branchTrue();
  1486. }
  1487. Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
  1488. {
  1489. ASSERT((cond == Overflow) || (cond == Signed) || (cond == PositiveOrZero) || (cond == Zero) || (cond == NonZero));
  1490. move(imm, scratchReg3);
  1491. return branchAdd32(cond, scratchReg3, dest);
  1492. }
  1493. Jump branchAdd32(ResultCondition cond, RegisterID src, TrustedImm32 imm, RegisterID dest)
  1494. {
  1495. ASSERT((cond == Overflow) || (cond == Signed) || (cond == PositiveOrZero) || (cond == Zero) || (cond == NonZero));
  1496. if (src != dest)
  1497. move(src, dest);
  1498. if (cond == Overflow) {
  1499. move(imm, scratchReg3);
  1500. m_assembler.addvlRegReg(scratchReg3, dest);
  1501. return branchTrue();
  1502. }
  1503. add32(imm, dest);
  1504. if (cond == Signed) {
  1505. m_assembler.cmppz(dest);
  1506. return branchFalse();
  1507. }
  1508. if (cond == PositiveOrZero) {
  1509. m_assembler.cmppz(dest);
  1510. return branchTrue();
  1511. }
  1512. compare32(0, dest, Equal);
  1513. if (cond == NonZero) // NotEqual
  1514. return branchFalse();
  1515. return branchTrue();
  1516. }
  1517. Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, AbsoluteAddress dest)
  1518. {
  1519. ASSERT((cond == Overflow) || (cond == Signed) || (cond == PositiveOrZero) || (cond == Zero) || (cond == NonZero));
  1520. bool result;
  1521. move(imm, scratchReg3);
  1522. RegisterID destptr = claimScratch();
  1523. RegisterID destval = claimScratch();
  1524. m_assembler.loadConstant(reinterpret_cast<uint32_t>(dest.m_ptr), destptr);
  1525. m_assembler.movlMemReg(destptr, destval);
  1526. if (cond == Overflow) {
  1527. m_assembler.addvlRegReg(scratchReg3, destval);
  1528. result = true;
  1529. } else {
  1530. m_assembler.addlRegReg(scratchReg3, destval);
  1531. if (cond == Signed) {
  1532. m_assembler.cmppz(destval);
  1533. result = false;
  1534. } else if (cond == PositiveOrZero) {
  1535. m_assembler.cmppz(destval);
  1536. result = true;
  1537. } else {
  1538. m_assembler.movImm8(0, scratchReg3);
  1539. m_assembler.cmplRegReg(scratchReg3, destval, SH4Condition(cond));
  1540. result = (cond == Zero);
  1541. }
  1542. }
  1543. m_assembler.movlRegMem(destval, destptr);
  1544. releaseScratch(destval);
  1545. releaseScratch(destptr);
  1546. return result ? branchTrue() : branchFalse();
  1547. }
  1548. Jump branchMul32(ResultCondition cond, RegisterID src, RegisterID dest)
  1549. {
  1550. ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
  1551. if (cond == Overflow) {
  1552. RegisterID scrsign = claimScratch();
  1553. RegisterID msbres = claimScratch();
  1554. m_assembler.dmulslRegReg(src, dest);
  1555. m_assembler.stsmacl(dest);
  1556. m_assembler.cmppz(dest);
  1557. m_assembler.movt(scrsign);
  1558. m_assembler.addlImm8r(-1, scrsign);
  1559. m_assembler.stsmach(msbres);
  1560. m_assembler.cmplRegReg(msbres, scrsign, SH4Condition(Equal));
  1561. releaseScratch(msbres);
  1562. releaseScratch(scrsign);
  1563. return branchFalse();
  1564. }
  1565. m_assembler.imullRegReg(src, dest);
  1566. m_assembler.stsmacl(dest);
  1567. if (cond == Signed) {
  1568. // Check if dest is negative
  1569. m_assembler.cmppz(dest);
  1570. return branchFalse();
  1571. }
  1572. compare32(0, dest, static_cast<RelationalCondition>(cond));
  1573. if (cond == NonZero) // NotEqual
  1574. return branchFalse();
  1575. return branchTrue();
  1576. }
  1577. Jump branchMul32(ResultCondition cond, TrustedImm32 imm, RegisterID src, RegisterID dest)
  1578. {
  1579. ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
  1580. move(imm, scratchReg3);
  1581. if (src != dest)
  1582. move(src, dest);
  1583. return branchMul32(cond, scratchReg3, dest);
  1584. }
  1585. Jump branchSub32(ResultCondition cond, RegisterID src, RegisterID dest)
  1586. {
  1587. ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
  1588. if (cond == Overflow) {
  1589. m_assembler.subvlRegReg(src, dest);
  1590. return branchTrue();
  1591. }
  1592. if (cond == Signed) {
  1593. // Check if dest is negative
  1594. m_assembler.sublRegReg(src, dest);
  1595. compare32(0, dest, LessThan);
  1596. return branchTrue();
  1597. }
  1598. sub32(src, dest);
  1599. compare32(0, dest, static_cast<RelationalCondition>(cond));
  1600. if (cond == NonZero) // NotEqual
  1601. return branchFalse();
  1602. return branchTrue();
  1603. }
  1604. Jump branchSub32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
  1605. {
  1606. ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
  1607. move(imm, scratchReg3);
  1608. return branchSub32(cond, scratchReg3, dest);
  1609. }
  1610. Jump branchSub32(ResultCondition cond, RegisterID src, TrustedImm32 imm, RegisterID dest)
  1611. {
  1612. move(imm, scratchReg3);
  1613. if (src != dest)
  1614. move(src, dest);
  1615. return branchSub32(cond, scratchReg3, dest);
  1616. }
  1617. Jump branchSub32(ResultCondition cond, RegisterID src1, RegisterID src2, RegisterID dest)
  1618. {
  1619. if (src1 != dest)
  1620. move(src1, dest);
  1621. return branchSub32(cond, src2, dest);
  1622. }
  1623. Jump branchOr32(ResultCondition cond, RegisterID src, RegisterID dest)
  1624. {
  1625. ASSERT((cond == Signed) || (cond == Zero) || (cond == NonZero));
  1626. if (cond == Signed) {
  1627. or32(src, dest);
  1628. compare32(0, dest, static_cast<RelationalCondition>(LessThan));
  1629. return branchTrue();
  1630. }
  1631. or32(src, dest);
  1632. compare32(0, dest, static_cast<RelationalCondition>(cond));
  1633. if (cond == NonZero) // NotEqual
  1634. return branchFalse();
  1635. return branchTrue();
  1636. }
  1637. void branchConvertDoubleToInt32(FPRegisterID src, RegisterID dest, JumpList& failureCases, FPRegisterID fpTemp, bool negZeroCheck = true)
  1638. {
  1639. m_assembler.ftrcdrmfpul(src);
  1640. m_assembler.stsfpulReg(dest);
  1641. convertInt32ToDouble(dest, fscratch);
  1642. failureCases.append(branchDouble(DoubleNotEqualOrUnordered, fscratch, src));
  1643. if (negZeroCheck) {
  1644. if (dest == SH4Registers::r0)
  1645. m_assembler.cmpEqImmR0(0, dest);
  1646. else {
  1647. m_assembler.movImm8(0, scratchReg3);
  1648. m_assembler.cmplRegReg(scratchReg3, dest, SH4Condition(Equal));
  1649. }
  1650. failureCases.append(branchTrue());
  1651. }
  1652. }
  1653. void neg32(RegisterID dst)
  1654. {
  1655. m_assembler.neg(dst, dst);
  1656. }
  1657. void urshift32(RegisterID shiftamount, RegisterID dest)
  1658. {
  1659. RegisterID shiftTmp = claimScratch();
  1660. m_assembler.loadConstant(0x1f, shiftTmp);
  1661. m_assembler.andlRegReg(shiftamount, shiftTmp);
  1662. m_assembler.neg(shiftTmp, shiftTmp);
  1663. m_assembler.shldRegReg(dest, shiftTmp);
  1664. releaseScratch(shiftTmp);
  1665. }
  1666. void urshift32(TrustedImm32 imm, RegisterID dest)
  1667. {
  1668. int immMasked = imm.m_value & 0x1f;
  1669. if (!immMasked)
  1670. return;
  1671. if ((immMasked == 1) || (immMasked == 2) || (immMasked == 8) || (immMasked == 16)) {
  1672. m_assembler.shlrImm8r(immMasked, dest);
  1673. return;
  1674. }
  1675. RegisterID shiftTmp = claimScratch();
  1676. m_assembler.loadConstant(-immMasked, shiftTmp);
  1677. m_assembler.shldRegReg(dest, shiftTmp);
  1678. releaseScratch(shiftTmp);
  1679. }
  1680. void urshift32(RegisterID src, TrustedImm32 shiftamount, RegisterID dest)
  1681. {
  1682. if (src != dest)
  1683. move(src, dest);
  1684. urshift32(shiftamount, dest);
  1685. }
  1686. Call call()
  1687. {
  1688. return Call(m_assembler.call(), Call::Linkable);
  1689. }
  1690. Call nearCall()
  1691. {
  1692. return Call(m_assembler.call(), Call::LinkableNear);
  1693. }
  1694. Call call(RegisterID target)
  1695. {
  1696. return Call(m_assembler.call(target), Call::None);
  1697. }
  1698. void call(Address address, RegisterID target)
  1699. {
  1700. load32(address.base, address.offset, target);
  1701. m_assembler.ensureSpace(m_assembler.maxInstructionSize + 2);
  1702. m_assembler.branch(JSR_OPCODE, target);
  1703. m_assembler.nop();
  1704. }
  1705. void breakpoint()
  1706. {
  1707. m_assembler.bkpt();
  1708. m_assembler.nop();
  1709. }
  1710. Jump branchPtrWithPatch(RelationalCondition cond, RegisterID left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
  1711. {
  1712. RegisterID dataTempRegister = claimScratch();
  1713. dataLabel = moveWithPatch(initialRightValue, dataTempRegister);
  1714. m_assembler.cmplRegReg(dataTempRegister, left, SH4Condition(cond));
  1715. releaseScratch(dataTempRegister);
  1716. if (cond == NotEqual)
  1717. return branchFalse();
  1718. return branchTrue();
  1719. }
  1720. Jump branchPtrWithPatch(RelationalCondition cond, Address left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
  1721. {
  1722. RegisterID scr = claimScratch();
  1723. m_assembler.loadConstant(left.offset, scr);
  1724. m_assembler.addlRegReg(left.base, scr);
  1725. m_assembler.movlMemReg(scr, scr);
  1726. RegisterID scr1 = claimScratch();
  1727. dataLabel = moveWithPatch(initialRightValue, scr1);
  1728. m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
  1729. releaseScratch(scr);
  1730. releaseScratch(scr1);
  1731. if (cond == NotEqual)
  1732. return branchFalse();
  1733. return branchTrue();
  1734. }
  1735. void ret()
  1736. {
  1737. m_assembler.ret();
  1738. m_assembler.nop();
  1739. }
  1740. DataLabelPtr storePtrWithPatch(TrustedImmPtr initialValue, ImplicitAddress address)
  1741. {
  1742. RegisterID scr = claimScratch();
  1743. DataLabelPtr label = moveWithPatch(initialValue, scr);
  1744. store32(scr, address);
  1745. releaseScratch(scr);
  1746. return label;
  1747. }
  1748. DataLabelPtr storePtrWithPatch(ImplicitAddress address) { return storePtrWithPatch(TrustedImmPtr(0), address); }
  1749. int sizeOfConstantPool()
  1750. {
  1751. return m_assembler.sizeOfConstantPool();
  1752. }
  1753. Call tailRecursiveCall()
  1754. {
  1755. RegisterID scr = claimScratch();
  1756. m_assembler.loadConstantUnReusable(0x0, scr, true);
  1757. Jump m_jump = Jump(m_assembler.jmp(scr));
  1758. releaseScratch(scr);
  1759. return Call::fromTailJump(m_jump);
  1760. }
  1761. Call makeTailRecursiveCall(Jump oldJump)
  1762. {
  1763. oldJump.link(this);
  1764. return tailRecursiveCall();
  1765. }
  1766. void nop()
  1767. {
  1768. m_assembler.nop();
  1769. }
  1770. static FunctionPtr readCallTarget(CodeLocationCall call)
  1771. {
  1772. return FunctionPtr(reinterpret_cast<void(*)()>(SH4Assembler::readCallTarget(call.dataLocation())));
  1773. }
  1774. static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
  1775. {
  1776. SH4Assembler::replaceWithJump(instructionStart.dataLocation(), destination.dataLocation());
  1777. }
  1778. static ptrdiff_t maxJumpReplacementSize()
  1779. {
  1780. return SH4Assembler::maxJumpReplacementSize();
  1781. }
  1782. static bool canJumpReplacePatchableBranchPtrWithPatch() { return false; }
  1783. static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
  1784. {
  1785. return label.labelAtOffset(0);
  1786. }
  1787. static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID, void* initialValue)
  1788. {
  1789. SH4Assembler::revertJump(instructionStart.dataLocation(), initialValue);
  1790. }
  1791. static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr)
  1792. {
  1793. UNREACHABLE_FOR_PLATFORM();
  1794. return CodeLocationLabel();
  1795. }
  1796. static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel instructionStart, Address, void* initialValue)
  1797. {
  1798. UNREACHABLE_FOR_PLATFORM();
  1799. }
  1800. protected:
  1801. SH4Assembler::Condition SH4Condition(RelationalCondition cond)
  1802. {
  1803. return static_cast<SH4Assembler::Condition>(cond);
  1804. }
  1805. SH4Assembler::Condition SH4Condition(ResultCondition cond)
  1806. {
  1807. return static_cast<SH4Assembler::Condition>(cond);
  1808. }
  1809. private:
  1810. friend class LinkBuffer;
  1811. friend class RepatchBuffer;
  1812. static void linkCall(void* code, Call call, FunctionPtr function)
  1813. {
  1814. SH4Assembler::linkCall(code, call.m_label, function.value());
  1815. }
  1816. static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
  1817. {
  1818. SH4Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
  1819. }
  1820. static void repatchCall(CodeLocationCall call, FunctionPtr destination)
  1821. {
  1822. SH4Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
  1823. }
  1824. };
  1825. } // namespace JSC
  1826. #endif // ENABLE(ASSEMBLER)
  1827. #endif // MacroAssemblerSH4_h