MacroAssemblerARMv7.h 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917
  1. /*
  2. * Copyright (C) 2009, 2010 Apple Inc. All rights reserved.
  3. * Copyright (C) 2010 University of Szeged
  4. *
  5. * Redistribution and use in source and binary forms, with or without
  6. * modification, are permitted provided that the following conditions
  7. * are met:
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. * 2. Redistributions in binary form must reproduce the above copyright
  11. * notice, this list of conditions and the following disclaimer in the
  12. * documentation and/or other materials provided with the distribution.
  13. *
  14. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  15. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  16. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  17. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  18. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  19. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  20. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  21. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  22. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  23. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  24. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  25. */
  26. #ifndef MacroAssemblerARMv7_h
  27. #define MacroAssemblerARMv7_h
  28. #if ENABLE(ASSEMBLER)
  29. #include "ARMv7Assembler.h"
  30. #include "AbstractMacroAssembler.h"
  31. namespace JSC {
  32. class MacroAssemblerARMv7 : public AbstractMacroAssembler<ARMv7Assembler> {
  33. // FIXME: switch dataTempRegister & addressTempRegister, or possibly use r7?
  34. // - dTR is likely used more than aTR, and we'll get better instruction
  35. // encoding if it's in the low 8 registers.
  36. static const RegisterID dataTempRegister = ARMRegisters::ip;
  37. static const RegisterID addressTempRegister = ARMRegisters::r3;
  38. static const ARMRegisters::FPDoubleRegisterID fpTempRegister = ARMRegisters::d7;
  39. inline ARMRegisters::FPSingleRegisterID fpTempRegisterAsSingle() { return ARMRegisters::asSingle(fpTempRegister); }
  40. public:
  41. MacroAssemblerARMv7()
  42. : m_makeJumpPatchable(false)
  43. {
  44. }
  45. typedef ARMv7Assembler::LinkRecord LinkRecord;
  46. typedef ARMv7Assembler::JumpType JumpType;
  47. typedef ARMv7Assembler::JumpLinkType JumpLinkType;
  48. static bool isCompactPtrAlignedAddressOffset(ptrdiff_t value)
  49. {
  50. return value >= -255 && value <= 255;
  51. }
  52. Vector<LinkRecord, 0, UnsafeVectorOverflow>& jumpsToLink() { return m_assembler.jumpsToLink(); }
  53. void* unlinkedCode() { return m_assembler.unlinkedCode(); }
  54. bool canCompact(JumpType jumpType) { return m_assembler.canCompact(jumpType); }
  55. JumpLinkType computeJumpType(JumpType jumpType, const uint8_t* from, const uint8_t* to) { return m_assembler.computeJumpType(jumpType, from, to); }
  56. JumpLinkType computeJumpType(LinkRecord& record, const uint8_t* from, const uint8_t* to) { return m_assembler.computeJumpType(record, from, to); }
  57. void recordLinkOffsets(int32_t regionStart, int32_t regionEnd, int32_t offset) {return m_assembler.recordLinkOffsets(regionStart, regionEnd, offset); }
  58. int jumpSizeDelta(JumpType jumpType, JumpLinkType jumpLinkType) { return m_assembler.jumpSizeDelta(jumpType, jumpLinkType); }
  59. void link(LinkRecord& record, uint8_t* from, uint8_t* to) { return m_assembler.link(record, from, to); }
  60. struct ArmAddress {
  61. enum AddressType {
  62. HasOffset,
  63. HasIndex,
  64. } type;
  65. RegisterID base;
  66. union {
  67. int32_t offset;
  68. struct {
  69. RegisterID index;
  70. Scale scale;
  71. };
  72. } u;
  73. explicit ArmAddress(RegisterID base, int32_t offset = 0)
  74. : type(HasOffset)
  75. , base(base)
  76. {
  77. u.offset = offset;
  78. }
  79. explicit ArmAddress(RegisterID base, RegisterID index, Scale scale = TimesOne)
  80. : type(HasIndex)
  81. , base(base)
  82. {
  83. u.index = index;
  84. u.scale = scale;
  85. }
  86. };
  87. public:
  88. typedef ARMRegisters::FPDoubleRegisterID FPRegisterID;
  89. static const Scale ScalePtr = TimesFour;
  90. enum RelationalCondition {
  91. Equal = ARMv7Assembler::ConditionEQ,
  92. NotEqual = ARMv7Assembler::ConditionNE,
  93. Above = ARMv7Assembler::ConditionHI,
  94. AboveOrEqual = ARMv7Assembler::ConditionHS,
  95. Below = ARMv7Assembler::ConditionLO,
  96. BelowOrEqual = ARMv7Assembler::ConditionLS,
  97. GreaterThan = ARMv7Assembler::ConditionGT,
  98. GreaterThanOrEqual = ARMv7Assembler::ConditionGE,
  99. LessThan = ARMv7Assembler::ConditionLT,
  100. LessThanOrEqual = ARMv7Assembler::ConditionLE
  101. };
  102. enum ResultCondition {
  103. Overflow = ARMv7Assembler::ConditionVS,
  104. Signed = ARMv7Assembler::ConditionMI,
  105. PositiveOrZero = ARMv7Assembler::ConditionPL,
  106. Zero = ARMv7Assembler::ConditionEQ,
  107. NonZero = ARMv7Assembler::ConditionNE
  108. };
  109. enum DoubleCondition {
  110. // These conditions will only evaluate to true if the comparison is ordered - i.e. neither operand is NaN.
  111. DoubleEqual = ARMv7Assembler::ConditionEQ,
  112. DoubleNotEqual = ARMv7Assembler::ConditionVC, // Not the right flag! check for this & handle differently.
  113. DoubleGreaterThan = ARMv7Assembler::ConditionGT,
  114. DoubleGreaterThanOrEqual = ARMv7Assembler::ConditionGE,
  115. DoubleLessThan = ARMv7Assembler::ConditionLO,
  116. DoubleLessThanOrEqual = ARMv7Assembler::ConditionLS,
  117. // If either operand is NaN, these conditions always evaluate to true.
  118. DoubleEqualOrUnordered = ARMv7Assembler::ConditionVS, // Not the right flag! check for this & handle differently.
  119. DoubleNotEqualOrUnordered = ARMv7Assembler::ConditionNE,
  120. DoubleGreaterThanOrUnordered = ARMv7Assembler::ConditionHI,
  121. DoubleGreaterThanOrEqualOrUnordered = ARMv7Assembler::ConditionHS,
  122. DoubleLessThanOrUnordered = ARMv7Assembler::ConditionLT,
  123. DoubleLessThanOrEqualOrUnordered = ARMv7Assembler::ConditionLE,
  124. };
  125. static const RegisterID stackPointerRegister = ARMRegisters::sp;
  126. static const RegisterID linkRegister = ARMRegisters::lr;
  127. // Integer arithmetic operations:
  128. //
  129. // Operations are typically two operand - operation(source, srcDst)
  130. // For many operations the source may be an TrustedImm32, the srcDst operand
  131. // may often be a memory location (explictly described using an Address
  132. // object).
  133. void add32(RegisterID src, RegisterID dest)
  134. {
  135. m_assembler.add(dest, dest, src);
  136. }
  137. void add32(TrustedImm32 imm, RegisterID dest)
  138. {
  139. add32(imm, dest, dest);
  140. }
  141. void add32(AbsoluteAddress src, RegisterID dest)
  142. {
  143. load32(src.m_ptr, dataTempRegister);
  144. add32(dataTempRegister, dest);
  145. }
  146. void add32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  147. {
  148. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12OrEncodedImm(imm.m_value);
  149. if (armImm.isValid())
  150. m_assembler.add(dest, src, armImm);
  151. else {
  152. move(imm, dataTempRegister);
  153. m_assembler.add(dest, src, dataTempRegister);
  154. }
  155. }
  156. void add32(TrustedImm32 imm, Address address)
  157. {
  158. load32(address, dataTempRegister);
  159. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12OrEncodedImm(imm.m_value);
  160. if (armImm.isValid())
  161. m_assembler.add(dataTempRegister, dataTempRegister, armImm);
  162. else {
  163. // Hrrrm, since dataTempRegister holds the data loaded,
  164. // use addressTempRegister to hold the immediate.
  165. move(imm, addressTempRegister);
  166. m_assembler.add(dataTempRegister, dataTempRegister, addressTempRegister);
  167. }
  168. store32(dataTempRegister, address);
  169. }
  170. void add32(Address src, RegisterID dest)
  171. {
  172. load32(src, dataTempRegister);
  173. add32(dataTempRegister, dest);
  174. }
  175. void add32(TrustedImm32 imm, AbsoluteAddress address)
  176. {
  177. load32(address.m_ptr, dataTempRegister);
  178. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12OrEncodedImm(imm.m_value);
  179. if (armImm.isValid())
  180. m_assembler.add(dataTempRegister, dataTempRegister, armImm);
  181. else {
  182. // Hrrrm, since dataTempRegister holds the data loaded,
  183. // use addressTempRegister to hold the immediate.
  184. move(imm, addressTempRegister);
  185. m_assembler.add(dataTempRegister, dataTempRegister, addressTempRegister);
  186. }
  187. store32(dataTempRegister, address.m_ptr);
  188. }
  189. void add64(TrustedImm32 imm, AbsoluteAddress address)
  190. {
  191. move(TrustedImmPtr(address.m_ptr), addressTempRegister);
  192. m_assembler.ldr(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt12(0));
  193. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
  194. if (armImm.isValid())
  195. m_assembler.add_S(dataTempRegister, dataTempRegister, armImm);
  196. else {
  197. move(imm, addressTempRegister);
  198. m_assembler.add_S(dataTempRegister, dataTempRegister, addressTempRegister);
  199. move(TrustedImmPtr(address.m_ptr), addressTempRegister);
  200. }
  201. m_assembler.str(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt12(0));
  202. m_assembler.ldr(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt12(4));
  203. m_assembler.adc(dataTempRegister, dataTempRegister, ARMThumbImmediate::makeEncodedImm(imm.m_value >> 31));
  204. m_assembler.str(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt12(4));
  205. }
  206. void and32(RegisterID op1, RegisterID op2, RegisterID dest)
  207. {
  208. m_assembler.ARM_and(dest, op1, op2);
  209. }
  210. void and32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  211. {
  212. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
  213. if (armImm.isValid())
  214. m_assembler.ARM_and(dest, src, armImm);
  215. else {
  216. move(imm, dataTempRegister);
  217. m_assembler.ARM_and(dest, src, dataTempRegister);
  218. }
  219. }
  220. void and32(RegisterID src, RegisterID dest)
  221. {
  222. and32(dest, src, dest);
  223. }
  224. void and32(TrustedImm32 imm, RegisterID dest)
  225. {
  226. and32(imm, dest, dest);
  227. }
  228. void and32(Address src, RegisterID dest)
  229. {
  230. load32(src, dataTempRegister);
  231. and32(dataTempRegister, dest);
  232. }
  233. void countLeadingZeros32(RegisterID src, RegisterID dest)
  234. {
  235. m_assembler.clz(dest, src);
  236. }
  237. void lshift32(RegisterID src, RegisterID shiftAmount, RegisterID dest)
  238. {
  239. // Clamp the shift to the range 0..31
  240. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(0x1f);
  241. ASSERT(armImm.isValid());
  242. m_assembler.ARM_and(dataTempRegister, shiftAmount, armImm);
  243. m_assembler.lsl(dest, src, dataTempRegister);
  244. }
  245. void lshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
  246. {
  247. m_assembler.lsl(dest, src, imm.m_value & 0x1f);
  248. }
  249. void lshift32(RegisterID shiftAmount, RegisterID dest)
  250. {
  251. lshift32(dest, shiftAmount, dest);
  252. }
  253. void lshift32(TrustedImm32 imm, RegisterID dest)
  254. {
  255. lshift32(dest, imm, dest);
  256. }
  257. void mul32(RegisterID src, RegisterID dest)
  258. {
  259. m_assembler.smull(dest, dataTempRegister, dest, src);
  260. }
  261. void mul32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  262. {
  263. move(imm, dataTempRegister);
  264. m_assembler.smull(dest, dataTempRegister, src, dataTempRegister);
  265. }
  266. void neg32(RegisterID srcDest)
  267. {
  268. m_assembler.neg(srcDest, srcDest);
  269. }
  270. void or32(RegisterID src, RegisterID dest)
  271. {
  272. m_assembler.orr(dest, dest, src);
  273. }
  274. void or32(RegisterID src, AbsoluteAddress dest)
  275. {
  276. move(TrustedImmPtr(dest.m_ptr), addressTempRegister);
  277. load32(addressTempRegister, dataTempRegister);
  278. or32(src, dataTempRegister);
  279. store32(dataTempRegister, addressTempRegister);
  280. }
  281. void or32(TrustedImm32 imm, RegisterID dest)
  282. {
  283. or32(imm, dest, dest);
  284. }
  285. void or32(RegisterID op1, RegisterID op2, RegisterID dest)
  286. {
  287. m_assembler.orr(dest, op1, op2);
  288. }
  289. void or32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  290. {
  291. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
  292. if (armImm.isValid())
  293. m_assembler.orr(dest, src, armImm);
  294. else {
  295. move(imm, dataTempRegister);
  296. m_assembler.orr(dest, src, dataTempRegister);
  297. }
  298. }
  299. void rshift32(RegisterID src, RegisterID shiftAmount, RegisterID dest)
  300. {
  301. // Clamp the shift to the range 0..31
  302. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(0x1f);
  303. ASSERT(armImm.isValid());
  304. m_assembler.ARM_and(dataTempRegister, shiftAmount, armImm);
  305. m_assembler.asr(dest, src, dataTempRegister);
  306. }
  307. void rshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
  308. {
  309. m_assembler.asr(dest, src, imm.m_value & 0x1f);
  310. }
  311. void rshift32(RegisterID shiftAmount, RegisterID dest)
  312. {
  313. rshift32(dest, shiftAmount, dest);
  314. }
  315. void rshift32(TrustedImm32 imm, RegisterID dest)
  316. {
  317. rshift32(dest, imm, dest);
  318. }
  319. void urshift32(RegisterID src, RegisterID shiftAmount, RegisterID dest)
  320. {
  321. // Clamp the shift to the range 0..31
  322. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(0x1f);
  323. ASSERT(armImm.isValid());
  324. m_assembler.ARM_and(dataTempRegister, shiftAmount, armImm);
  325. m_assembler.lsr(dest, src, dataTempRegister);
  326. }
  327. void urshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
  328. {
  329. m_assembler.lsr(dest, src, imm.m_value & 0x1f);
  330. }
  331. void urshift32(RegisterID shiftAmount, RegisterID dest)
  332. {
  333. urshift32(dest, shiftAmount, dest);
  334. }
  335. void urshift32(TrustedImm32 imm, RegisterID dest)
  336. {
  337. urshift32(dest, imm, dest);
  338. }
  339. void sub32(RegisterID src, RegisterID dest)
  340. {
  341. m_assembler.sub(dest, dest, src);
  342. }
  343. void sub32(TrustedImm32 imm, RegisterID dest)
  344. {
  345. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12OrEncodedImm(imm.m_value);
  346. if (armImm.isValid())
  347. m_assembler.sub(dest, dest, armImm);
  348. else {
  349. move(imm, dataTempRegister);
  350. m_assembler.sub(dest, dest, dataTempRegister);
  351. }
  352. }
  353. void sub32(TrustedImm32 imm, Address address)
  354. {
  355. load32(address, dataTempRegister);
  356. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12OrEncodedImm(imm.m_value);
  357. if (armImm.isValid())
  358. m_assembler.sub(dataTempRegister, dataTempRegister, armImm);
  359. else {
  360. // Hrrrm, since dataTempRegister holds the data loaded,
  361. // use addressTempRegister to hold the immediate.
  362. move(imm, addressTempRegister);
  363. m_assembler.sub(dataTempRegister, dataTempRegister, addressTempRegister);
  364. }
  365. store32(dataTempRegister, address);
  366. }
  367. void sub32(Address src, RegisterID dest)
  368. {
  369. load32(src, dataTempRegister);
  370. sub32(dataTempRegister, dest);
  371. }
  372. void sub32(TrustedImm32 imm, AbsoluteAddress address)
  373. {
  374. load32(address.m_ptr, dataTempRegister);
  375. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12OrEncodedImm(imm.m_value);
  376. if (armImm.isValid())
  377. m_assembler.sub(dataTempRegister, dataTempRegister, armImm);
  378. else {
  379. // Hrrrm, since dataTempRegister holds the data loaded,
  380. // use addressTempRegister to hold the immediate.
  381. move(imm, addressTempRegister);
  382. m_assembler.sub(dataTempRegister, dataTempRegister, addressTempRegister);
  383. }
  384. store32(dataTempRegister, address.m_ptr);
  385. }
  386. void xor32(RegisterID op1, RegisterID op2, RegisterID dest)
  387. {
  388. m_assembler.eor(dest, op1, op2);
  389. }
  390. void xor32(TrustedImm32 imm, RegisterID src, RegisterID dest)
  391. {
  392. if (imm.m_value == -1) {
  393. m_assembler.mvn(dest, src);
  394. return;
  395. }
  396. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
  397. if (armImm.isValid())
  398. m_assembler.eor(dest, src, armImm);
  399. else {
  400. move(imm, dataTempRegister);
  401. m_assembler.eor(dest, src, dataTempRegister);
  402. }
  403. }
  404. void xor32(RegisterID src, RegisterID dest)
  405. {
  406. xor32(dest, src, dest);
  407. }
  408. void xor32(TrustedImm32 imm, RegisterID dest)
  409. {
  410. if (imm.m_value == -1)
  411. m_assembler.mvn(dest, dest);
  412. else
  413. xor32(imm, dest, dest);
  414. }
  415. // Memory access operations:
  416. //
  417. // Loads are of the form load(address, destination) and stores of the form
  418. // store(source, address). The source for a store may be an TrustedImm32. Address
  419. // operand objects to loads and store will be implicitly constructed if a
  420. // register is passed.
  421. private:
  422. void load32(ArmAddress address, RegisterID dest)
  423. {
  424. if (address.type == ArmAddress::HasIndex)
  425. m_assembler.ldr(dest, address.base, address.u.index, address.u.scale);
  426. else if (address.u.offset >= 0) {
  427. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.u.offset);
  428. ASSERT(armImm.isValid());
  429. m_assembler.ldr(dest, address.base, armImm);
  430. } else {
  431. ASSERT(address.u.offset >= -255);
  432. m_assembler.ldr(dest, address.base, address.u.offset, true, false);
  433. }
  434. }
  435. void load16(ArmAddress address, RegisterID dest)
  436. {
  437. if (address.type == ArmAddress::HasIndex)
  438. m_assembler.ldrh(dest, address.base, address.u.index, address.u.scale);
  439. else if (address.u.offset >= 0) {
  440. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.u.offset);
  441. ASSERT(armImm.isValid());
  442. m_assembler.ldrh(dest, address.base, armImm);
  443. } else {
  444. ASSERT(address.u.offset >= -255);
  445. m_assembler.ldrh(dest, address.base, address.u.offset, true, false);
  446. }
  447. }
  448. void load16Signed(ArmAddress address, RegisterID dest)
  449. {
  450. ASSERT(address.type == ArmAddress::HasIndex);
  451. m_assembler.ldrsh(dest, address.base, address.u.index, address.u.scale);
  452. }
  453. void load8(ArmAddress address, RegisterID dest)
  454. {
  455. if (address.type == ArmAddress::HasIndex)
  456. m_assembler.ldrb(dest, address.base, address.u.index, address.u.scale);
  457. else if (address.u.offset >= 0) {
  458. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.u.offset);
  459. ASSERT(armImm.isValid());
  460. m_assembler.ldrb(dest, address.base, armImm);
  461. } else {
  462. ASSERT(address.u.offset >= -255);
  463. m_assembler.ldrb(dest, address.base, address.u.offset, true, false);
  464. }
  465. }
  466. void load8Signed(ArmAddress address, RegisterID dest)
  467. {
  468. ASSERT(address.type == ArmAddress::HasIndex);
  469. m_assembler.ldrsb(dest, address.base, address.u.index, address.u.scale);
  470. }
  471. protected:
  472. void store32(RegisterID src, ArmAddress address)
  473. {
  474. if (address.type == ArmAddress::HasIndex)
  475. m_assembler.str(src, address.base, address.u.index, address.u.scale);
  476. else if (address.u.offset >= 0) {
  477. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.u.offset);
  478. ASSERT(armImm.isValid());
  479. m_assembler.str(src, address.base, armImm);
  480. } else {
  481. ASSERT(address.u.offset >= -255);
  482. m_assembler.str(src, address.base, address.u.offset, true, false);
  483. }
  484. }
  485. private:
  486. void store8(RegisterID src, ArmAddress address)
  487. {
  488. if (address.type == ArmAddress::HasIndex)
  489. m_assembler.strb(src, address.base, address.u.index, address.u.scale);
  490. else if (address.u.offset >= 0) {
  491. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.u.offset);
  492. ASSERT(armImm.isValid());
  493. m_assembler.strb(src, address.base, armImm);
  494. } else {
  495. ASSERT(address.u.offset >= -255);
  496. m_assembler.strb(src, address.base, address.u.offset, true, false);
  497. }
  498. }
  499. void store16(RegisterID src, ArmAddress address)
  500. {
  501. if (address.type == ArmAddress::HasIndex)
  502. m_assembler.strh(src, address.base, address.u.index, address.u.scale);
  503. else if (address.u.offset >= 0) {
  504. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.u.offset);
  505. ASSERT(armImm.isValid());
  506. m_assembler.strh(src, address.base, armImm);
  507. } else {
  508. ASSERT(address.u.offset >= -255);
  509. m_assembler.strh(src, address.base, address.u.offset, true, false);
  510. }
  511. }
  512. public:
  513. void load32(ImplicitAddress address, RegisterID dest)
  514. {
  515. load32(setupArmAddress(address), dest);
  516. }
  517. void load32(BaseIndex address, RegisterID dest)
  518. {
  519. load32(setupArmAddress(address), dest);
  520. }
  521. void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest)
  522. {
  523. load32(setupArmAddress(address), dest);
  524. }
  525. void load16Unaligned(BaseIndex address, RegisterID dest)
  526. {
  527. load16(setupArmAddress(address), dest);
  528. }
  529. void load32(const void* address, RegisterID dest)
  530. {
  531. move(TrustedImmPtr(address), addressTempRegister);
  532. m_assembler.ldr(dest, addressTempRegister, ARMThumbImmediate::makeUInt16(0));
  533. }
  534. ConvertibleLoadLabel convertibleLoadPtr(Address address, RegisterID dest)
  535. {
  536. ConvertibleLoadLabel result(this);
  537. ASSERT(address.offset >= 0 && address.offset <= 255);
  538. m_assembler.ldrWide8BitImmediate(dest, address.base, address.offset);
  539. return result;
  540. }
  541. void load8(ImplicitAddress address, RegisterID dest)
  542. {
  543. load8(setupArmAddress(address), dest);
  544. }
  545. void load8Signed(ImplicitAddress, RegisterID)
  546. {
  547. UNREACHABLE_FOR_PLATFORM();
  548. }
  549. void load8(BaseIndex address, RegisterID dest)
  550. {
  551. load8(setupArmAddress(address), dest);
  552. }
  553. void load8Signed(BaseIndex address, RegisterID dest)
  554. {
  555. load8Signed(setupArmAddress(address), dest);
  556. }
  557. DataLabel32 load32WithAddressOffsetPatch(Address address, RegisterID dest)
  558. {
  559. DataLabel32 label = moveWithPatch(TrustedImm32(address.offset), dataTempRegister);
  560. load32(ArmAddress(address.base, dataTempRegister), dest);
  561. return label;
  562. }
  563. DataLabelCompact load32WithCompactAddressOffsetPatch(Address address, RegisterID dest)
  564. {
  565. padBeforePatch();
  566. RegisterID base = address.base;
  567. DataLabelCompact label(this);
  568. ASSERT(isCompactPtrAlignedAddressOffset(address.offset));
  569. m_assembler.ldr(dest, base, address.offset, true, false);
  570. return label;
  571. }
  572. void load16(BaseIndex address, RegisterID dest)
  573. {
  574. m_assembler.ldrh(dest, makeBaseIndexBase(address), address.index, address.scale);
  575. }
  576. void load16Signed(BaseIndex address, RegisterID dest)
  577. {
  578. load16Signed(setupArmAddress(address), dest);
  579. }
  580. void load16(ImplicitAddress address, RegisterID dest)
  581. {
  582. ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.offset);
  583. if (armImm.isValid())
  584. m_assembler.ldrh(dest, address.base, armImm);
  585. else {
  586. move(TrustedImm32(address.offset), dataTempRegister);
  587. m_assembler.ldrh(dest, address.base, dataTempRegister);
  588. }
  589. }
  590. void load16Signed(ImplicitAddress, RegisterID)
  591. {
  592. UNREACHABLE_FOR_PLATFORM();
  593. }
  594. DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
  595. {
  596. DataLabel32 label = moveWithPatch(TrustedImm32(address.offset), dataTempRegister);
  597. store32(src, ArmAddress(address.base, dataTempRegister));
  598. return label;
  599. }
  600. void store32(RegisterID src, ImplicitAddress address)
  601. {
  602. store32(src, setupArmAddress(address));
  603. }
  604. void store32(RegisterID src, BaseIndex address)
  605. {
  606. store32(src, setupArmAddress(address));
  607. }
  608. void store32(TrustedImm32 imm, ImplicitAddress address)
  609. {
  610. move(imm, dataTempRegister);
  611. store32(dataTempRegister, setupArmAddress(address));
  612. }
  613. void store32(TrustedImm32 imm, BaseIndex address)
  614. {
  615. move(imm, dataTempRegister);
  616. store32(dataTempRegister, setupArmAddress(address));
  617. }
  618. void store32(RegisterID src, const void* address)
  619. {
  620. move(TrustedImmPtr(address), addressTempRegister);
  621. m_assembler.str(src, addressTempRegister, ARMThumbImmediate::makeUInt16(0));
  622. }
  623. void store32(TrustedImm32 imm, const void* address)
  624. {
  625. move(imm, dataTempRegister);
  626. store32(dataTempRegister, address);
  627. }
  628. void store8(RegisterID src, BaseIndex address)
  629. {
  630. store8(src, setupArmAddress(address));
  631. }
  632. void store8(RegisterID src, void* address)
  633. {
  634. move(TrustedImmPtr(address), addressTempRegister);
  635. store8(src, ArmAddress(addressTempRegister, 0));
  636. }
  637. void store8(TrustedImm32 imm, void* address)
  638. {
  639. move(imm, dataTempRegister);
  640. store8(dataTempRegister, address);
  641. }
  642. void store16(RegisterID src, BaseIndex address)
  643. {
  644. store16(src, setupArmAddress(address));
  645. }
  646. // Possibly clobbers src, but not on this architecture.
  647. void moveDoubleToInts(FPRegisterID src, RegisterID dest1, RegisterID dest2)
  648. {
  649. m_assembler.vmov(dest1, dest2, src);
  650. }
  651. void moveIntsToDouble(RegisterID src1, RegisterID src2, FPRegisterID dest, FPRegisterID scratch)
  652. {
  653. UNUSED_PARAM(scratch);
  654. m_assembler.vmov(dest, src1, src2);
  655. }
  656. #if ENABLE(JIT_CONSTANT_BLINDING)
  657. static bool shouldBlindForSpecificArch(uint32_t value)
  658. {
  659. ARMThumbImmediate immediate = ARMThumbImmediate::makeEncodedImm(value);
  660. // Couldn't be encoded as an immediate, so assume it's untrusted.
  661. if (!immediate.isValid())
  662. return true;
  663. // If we can encode the immediate, we have less than 16 attacker
  664. // controlled bits.
  665. if (immediate.isEncodedImm())
  666. return false;
  667. // Don't let any more than 12 bits of an instruction word
  668. // be controlled by an attacker.
  669. return !immediate.isUInt12();
  670. }
  671. #endif
  672. // Floating-point operations:
  673. static bool supportsFloatingPoint() { return true; }
  674. static bool supportsFloatingPointTruncate() { return true; }
  675. static bool supportsFloatingPointSqrt() { return true; }
  676. static bool supportsFloatingPointAbs() { return true; }
  677. void loadDouble(ImplicitAddress address, FPRegisterID dest)
  678. {
  679. RegisterID base = address.base;
  680. int32_t offset = address.offset;
  681. // Arm vfp addresses can be offset by a 9-bit ones-comp immediate, left shifted by 2.
  682. if ((offset & 3) || (offset > (255 * 4)) || (offset < -(255 * 4))) {
  683. add32(TrustedImm32(offset), base, addressTempRegister);
  684. base = addressTempRegister;
  685. offset = 0;
  686. }
  687. m_assembler.vldr(dest, base, offset);
  688. }
  689. void loadFloat(ImplicitAddress address, FPRegisterID dest)
  690. {
  691. RegisterID base = address.base;
  692. int32_t offset = address.offset;
  693. // Arm vfp addresses can be offset by a 9-bit ones-comp immediate, left shifted by 2.
  694. if ((offset & 3) || (offset > (255 * 4)) || (offset < -(255 * 4))) {
  695. add32(TrustedImm32(offset), base, addressTempRegister);
  696. base = addressTempRegister;
  697. offset = 0;
  698. }
  699. m_assembler.flds(ARMRegisters::asSingle(dest), base, offset);
  700. }
  701. void loadDouble(BaseIndex address, FPRegisterID dest)
  702. {
  703. move(address.index, addressTempRegister);
  704. lshift32(TrustedImm32(address.scale), addressTempRegister);
  705. add32(address.base, addressTempRegister);
  706. loadDouble(Address(addressTempRegister, address.offset), dest);
  707. }
  708. void loadFloat(BaseIndex address, FPRegisterID dest)
  709. {
  710. move(address.index, addressTempRegister);
  711. lshift32(TrustedImm32(address.scale), addressTempRegister);
  712. add32(address.base, addressTempRegister);
  713. loadFloat(Address(addressTempRegister, address.offset), dest);
  714. }
  715. void moveDouble(FPRegisterID src, FPRegisterID dest)
  716. {
  717. if (src != dest)
  718. m_assembler.vmov(dest, src);
  719. }
  720. void loadDouble(const void* address, FPRegisterID dest)
  721. {
  722. move(TrustedImmPtr(address), addressTempRegister);
  723. m_assembler.vldr(dest, addressTempRegister, 0);
  724. }
  725. void storeDouble(FPRegisterID src, ImplicitAddress address)
  726. {
  727. RegisterID base = address.base;
  728. int32_t offset = address.offset;
  729. // Arm vfp addresses can be offset by a 9-bit ones-comp immediate, left shifted by 2.
  730. if ((offset & 3) || (offset > (255 * 4)) || (offset < -(255 * 4))) {
  731. add32(TrustedImm32(offset), base, addressTempRegister);
  732. base = addressTempRegister;
  733. offset = 0;
  734. }
  735. m_assembler.vstr(src, base, offset);
  736. }
  737. void storeFloat(FPRegisterID src, ImplicitAddress address)
  738. {
  739. RegisterID base = address.base;
  740. int32_t offset = address.offset;
  741. // Arm vfp addresses can be offset by a 9-bit ones-comp immediate, left shifted by 2.
  742. if ((offset & 3) || (offset > (255 * 4)) || (offset < -(255 * 4))) {
  743. add32(TrustedImm32(offset), base, addressTempRegister);
  744. base = addressTempRegister;
  745. offset = 0;
  746. }
  747. m_assembler.fsts(ARMRegisters::asSingle(src), base, offset);
  748. }
  749. void storeDouble(FPRegisterID src, const void* address)
  750. {
  751. move(TrustedImmPtr(address), addressTempRegister);
  752. storeDouble(src, addressTempRegister);
  753. }
  754. void storeDouble(FPRegisterID src, BaseIndex address)
  755. {
  756. move(address.index, addressTempRegister);
  757. lshift32(TrustedImm32(address.scale), addressTempRegister);
  758. add32(address.base, addressTempRegister);
  759. storeDouble(src, Address(addressTempRegister, address.offset));
  760. }
  761. void storeFloat(FPRegisterID src, BaseIndex address)
  762. {
  763. move(address.index, addressTempRegister);
  764. lshift32(TrustedImm32(address.scale), addressTempRegister);
  765. add32(address.base, addressTempRegister);
  766. storeFloat(src, Address(addressTempRegister, address.offset));
  767. }
  768. void addDouble(FPRegisterID src, FPRegisterID dest)
  769. {
  770. m_assembler.vadd(dest, dest, src);
  771. }
  772. void addDouble(Address src, FPRegisterID dest)
  773. {
  774. loadDouble(src, fpTempRegister);
  775. addDouble(fpTempRegister, dest);
  776. }
  777. void addDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
  778. {
  779. m_assembler.vadd(dest, op1, op2);
  780. }
  781. void addDouble(AbsoluteAddress address, FPRegisterID dest)
  782. {
  783. loadDouble(address.m_ptr, fpTempRegister);
  784. m_assembler.vadd(dest, dest, fpTempRegister);
  785. }
  786. void divDouble(FPRegisterID src, FPRegisterID dest)
  787. {
  788. m_assembler.vdiv(dest, dest, src);
  789. }
  790. void divDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
  791. {
  792. m_assembler.vdiv(dest, op1, op2);
  793. }
  794. void subDouble(FPRegisterID src, FPRegisterID dest)
  795. {
  796. m_assembler.vsub(dest, dest, src);
  797. }
  798. void subDouble(Address src, FPRegisterID dest)
  799. {
  800. loadDouble(src, fpTempRegister);
  801. subDouble(fpTempRegister, dest);
  802. }
  803. void subDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
  804. {
  805. m_assembler.vsub(dest, op1, op2);
  806. }
  807. void mulDouble(FPRegisterID src, FPRegisterID dest)
  808. {
  809. m_assembler.vmul(dest, dest, src);
  810. }
  811. void mulDouble(Address src, FPRegisterID dest)
  812. {
  813. loadDouble(src, fpTempRegister);
  814. mulDouble(fpTempRegister, dest);
  815. }
  816. void mulDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
  817. {
  818. m_assembler.vmul(dest, op1, op2);
  819. }
  820. void sqrtDouble(FPRegisterID src, FPRegisterID dest)
  821. {
  822. m_assembler.vsqrt(dest, src);
  823. }
  824. void absDouble(FPRegisterID src, FPRegisterID dest)
  825. {
  826. m_assembler.vabs(dest, src);
  827. }
  828. void negateDouble(FPRegisterID src, FPRegisterID dest)
  829. {
  830. m_assembler.vneg(dest, src);
  831. }
  832. void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
  833. {
  834. m_assembler.vmov(fpTempRegister, src, src);
  835. m_assembler.vcvt_signedToFloatingPoint(dest, fpTempRegisterAsSingle());
  836. }
  837. void convertInt32ToDouble(Address address, FPRegisterID dest)
  838. {
  839. // Fixme: load directly into the fpr!
  840. load32(address, dataTempRegister);
  841. m_assembler.vmov(fpTempRegister, dataTempRegister, dataTempRegister);
  842. m_assembler.vcvt_signedToFloatingPoint(dest, fpTempRegisterAsSingle());
  843. }
  844. void convertInt32ToDouble(AbsoluteAddress address, FPRegisterID dest)
  845. {
  846. // Fixme: load directly into the fpr!
  847. load32(address.m_ptr, dataTempRegister);
  848. m_assembler.vmov(fpTempRegister, dataTempRegister, dataTempRegister);
  849. m_assembler.vcvt_signedToFloatingPoint(dest, fpTempRegisterAsSingle());
  850. }
  851. void convertFloatToDouble(FPRegisterID src, FPRegisterID dst)
  852. {
  853. m_assembler.vcvtds(dst, ARMRegisters::asSingle(src));
  854. }
  855. void convertDoubleToFloat(FPRegisterID src, FPRegisterID dst)
  856. {
  857. m_assembler.vcvtsd(ARMRegisters::asSingle(dst), src);
  858. }
  859. Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
  860. {
  861. m_assembler.vcmp(left, right);
  862. m_assembler.vmrs();
  863. if (cond == DoubleNotEqual) {
  864. // ConditionNE jumps if NotEqual *or* unordered - force the unordered cases not to jump.
  865. Jump unordered = makeBranch(ARMv7Assembler::ConditionVS);
  866. Jump result = makeBranch(ARMv7Assembler::ConditionNE);
  867. unordered.link(this);
  868. return result;
  869. }
  870. if (cond == DoubleEqualOrUnordered) {
  871. Jump unordered = makeBranch(ARMv7Assembler::ConditionVS);
  872. Jump notEqual = makeBranch(ARMv7Assembler::ConditionNE);
  873. unordered.link(this);
  874. // We get here if either unordered or equal.
  875. Jump result = jump();
  876. notEqual.link(this);
  877. return result;
  878. }
  879. return makeBranch(cond);
  880. }
  881. enum BranchTruncateType { BranchIfTruncateFailed, BranchIfTruncateSuccessful };
  882. Jump branchTruncateDoubleToInt32(FPRegisterID src, RegisterID dest, BranchTruncateType branchType = BranchIfTruncateFailed)
  883. {
  884. // Convert into dest.
  885. m_assembler.vcvt_floatingPointToSigned(fpTempRegisterAsSingle(), src);
  886. m_assembler.vmov(dest, fpTempRegisterAsSingle());
  887. // Calculate 2x dest. If the value potentially underflowed, it will have
  888. // clamped to 0x80000000, so 2x dest is zero in this case. In the case of
  889. // overflow the result will be equal to -2.
  890. Jump underflow = branchAdd32(Zero, dest, dest, dataTempRegister);
  891. Jump noOverflow = branch32(NotEqual, dataTempRegister, TrustedImm32(-2));
  892. // For BranchIfTruncateSuccessful, we branch if 'noOverflow' jumps.
  893. underflow.link(this);
  894. if (branchType == BranchIfTruncateSuccessful)
  895. return noOverflow;
  896. // We'll reach the current point in the code on failure, so plant a
  897. // jump here & link the success case.
  898. Jump failure = jump();
  899. noOverflow.link(this);
  900. return failure;
  901. }
  902. Jump branchTruncateDoubleToUint32(FPRegisterID src, RegisterID dest, BranchTruncateType branchType = BranchIfTruncateFailed)
  903. {
  904. m_assembler.vcvt_floatingPointToSigned(fpTempRegisterAsSingle(), src);
  905. m_assembler.vmov(dest, fpTempRegisterAsSingle());
  906. Jump overflow = branch32(Equal, dest, TrustedImm32(0x7fffffff));
  907. Jump success = branch32(GreaterThanOrEqual, dest, TrustedImm32(0));
  908. overflow.link(this);
  909. if (branchType == BranchIfTruncateSuccessful)
  910. return success;
  911. Jump failure = jump();
  912. success.link(this);
  913. return failure;
  914. }
  915. // Result is undefined if the value is outside of the integer range.
  916. void truncateDoubleToInt32(FPRegisterID src, RegisterID dest)
  917. {
  918. m_assembler.vcvt_floatingPointToSigned(fpTempRegisterAsSingle(), src);
  919. m_assembler.vmov(dest, fpTempRegisterAsSingle());
  920. }
  921. void truncateDoubleToUint32(FPRegisterID src, RegisterID dest)
  922. {
  923. m_assembler.vcvt_floatingPointToUnsigned(fpTempRegisterAsSingle(), src);
  924. m_assembler.vmov(dest, fpTempRegisterAsSingle());
  925. }
  926. // Convert 'src' to an integer, and places the resulting 'dest'.
  927. // If the result is not representable as a 32 bit value, branch.
  928. // May also branch for some values that are representable in 32 bits
  929. // (specifically, in this case, 0).
  930. void branchConvertDoubleToInt32(FPRegisterID src, RegisterID dest, JumpList& failureCases, FPRegisterID, bool negZeroCheck = true)
  931. {
  932. m_assembler.vcvt_floatingPointToSigned(fpTempRegisterAsSingle(), src);
  933. m_assembler.vmov(dest, fpTempRegisterAsSingle());
  934. // Convert the integer result back to float & compare to the original value - if not equal or unordered (NaN) then jump.
  935. m_assembler.vcvt_signedToFloatingPoint(fpTempRegister, fpTempRegisterAsSingle());
  936. failureCases.append(branchDouble(DoubleNotEqualOrUnordered, src, fpTempRegister));
  937. // If the result is zero, it might have been -0.0, and the double comparison won't catch this!
  938. if (negZeroCheck)
  939. failureCases.append(branchTest32(Zero, dest));
  940. }
  941. Jump branchDoubleNonZero(FPRegisterID reg, FPRegisterID)
  942. {
  943. m_assembler.vcmpz(reg);
  944. m_assembler.vmrs();
  945. Jump unordered = makeBranch(ARMv7Assembler::ConditionVS);
  946. Jump result = makeBranch(ARMv7Assembler::ConditionNE);
  947. unordered.link(this);
  948. return result;
  949. }
  950. Jump branchDoubleZeroOrNaN(FPRegisterID reg, FPRegisterID)
  951. {
  952. m_assembler.vcmpz(reg);
  953. m_assembler.vmrs();
  954. Jump unordered = makeBranch(ARMv7Assembler::ConditionVS);
  955. Jump notEqual = makeBranch(ARMv7Assembler::ConditionNE);
  956. unordered.link(this);
  957. // We get here if either unordered or equal.
  958. Jump result = jump();
  959. notEqual.link(this);
  960. return result;
  961. }
  962. // Stack manipulation operations:
  963. //
  964. // The ABI is assumed to provide a stack abstraction to memory,
  965. // containing machine word sized units of data. Push and pop
  966. // operations add and remove a single register sized unit of data
  967. // to or from the stack. Peek and poke operations read or write
  968. // values on the stack, without moving the current stack position.
  969. void pop(RegisterID dest)
  970. {
  971. // store postindexed with writeback
  972. m_assembler.ldr(dest, ARMRegisters::sp, sizeof(void*), false, true);
  973. }
  974. void push(RegisterID src)
  975. {
  976. // store preindexed with writeback
  977. m_assembler.str(src, ARMRegisters::sp, -sizeof(void*), true, true);
  978. }
  979. void push(Address address)
  980. {
  981. load32(address, dataTempRegister);
  982. push(dataTempRegister);
  983. }
  984. void push(TrustedImm32 imm)
  985. {
  986. move(imm, dataTempRegister);
  987. push(dataTempRegister);
  988. }
  989. // Register move operations:
  990. //
  991. // Move values in registers.
  992. void move(TrustedImm32 imm, RegisterID dest)
  993. {
  994. uint32_t value = imm.m_value;
  995. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(value);
  996. if (armImm.isValid())
  997. m_assembler.mov(dest, armImm);
  998. else if ((armImm = ARMThumbImmediate::makeEncodedImm(~value)).isValid())
  999. m_assembler.mvn(dest, armImm);
  1000. else {
  1001. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(value));
  1002. if (value & 0xffff0000)
  1003. m_assembler.movt(dest, ARMThumbImmediate::makeUInt16(value >> 16));
  1004. }
  1005. }
  1006. void move(RegisterID src, RegisterID dest)
  1007. {
  1008. if (src != dest)
  1009. m_assembler.mov(dest, src);
  1010. }
  1011. void move(TrustedImmPtr imm, RegisterID dest)
  1012. {
  1013. move(TrustedImm32(imm), dest);
  1014. }
  1015. void swap(RegisterID reg1, RegisterID reg2)
  1016. {
  1017. move(reg1, dataTempRegister);
  1018. move(reg2, reg1);
  1019. move(dataTempRegister, reg2);
  1020. }
  1021. void signExtend32ToPtr(RegisterID src, RegisterID dest)
  1022. {
  1023. move(src, dest);
  1024. }
  1025. void zeroExtend32ToPtr(RegisterID src, RegisterID dest)
  1026. {
  1027. move(src, dest);
  1028. }
  1029. // Invert a relational condition, e.g. == becomes !=, < becomes >=, etc.
  1030. static RelationalCondition invert(RelationalCondition cond)
  1031. {
  1032. return static_cast<RelationalCondition>(cond ^ 1);
  1033. }
  1034. void nop()
  1035. {
  1036. m_assembler.nop();
  1037. }
  1038. static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
  1039. {
  1040. ARMv7Assembler::replaceWithJump(instructionStart.dataLocation(), destination.dataLocation());
  1041. }
  1042. static ptrdiff_t maxJumpReplacementSize()
  1043. {
  1044. return ARMv7Assembler::maxJumpReplacementSize();
  1045. }
  1046. // Forwards / external control flow operations:
  1047. //
  1048. // This set of jump and conditional branch operations return a Jump
  1049. // object which may linked at a later point, allow forwards jump,
  1050. // or jumps that will require external linkage (after the code has been
  1051. // relocated).
  1052. //
  1053. // For branches, signed <, >, <= and >= are denoted as l, g, le, and ge
  1054. // respecitvely, for unsigned comparisons the names b, a, be, and ae are
  1055. // used (representing the names 'below' and 'above').
  1056. //
  1057. // Operands to the comparision are provided in the expected order, e.g.
  1058. // jle32(reg1, TrustedImm32(5)) will branch if the value held in reg1, when
  1059. // treated as a signed 32bit value, is less than or equal to 5.
  1060. //
  1061. // jz and jnz test whether the first operand is equal to zero, and take
  1062. // an optional second operand of a mask under which to perform the test.
  1063. private:
  1064. // Should we be using TEQ for equal/not-equal?
  1065. void compare32(RegisterID left, TrustedImm32 right)
  1066. {
  1067. int32_t imm = right.m_value;
  1068. if (!imm)
  1069. m_assembler.tst(left, left);
  1070. else {
  1071. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm);
  1072. if (armImm.isValid())
  1073. m_assembler.cmp(left, armImm);
  1074. else if ((armImm = ARMThumbImmediate::makeEncodedImm(-imm)).isValid())
  1075. m_assembler.cmn(left, armImm);
  1076. else {
  1077. move(TrustedImm32(imm), dataTempRegister);
  1078. m_assembler.cmp(left, dataTempRegister);
  1079. }
  1080. }
  1081. }
  1082. void test32(RegisterID reg, TrustedImm32 mask)
  1083. {
  1084. int32_t imm = mask.m_value;
  1085. if (imm == -1)
  1086. m_assembler.tst(reg, reg);
  1087. else {
  1088. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm);
  1089. if (armImm.isValid())
  1090. m_assembler.tst(reg, armImm);
  1091. else {
  1092. move(mask, dataTempRegister);
  1093. m_assembler.tst(reg, dataTempRegister);
  1094. }
  1095. }
  1096. }
  1097. public:
  1098. Jump branch32(RelationalCondition cond, RegisterID left, RegisterID right)
  1099. {
  1100. m_assembler.cmp(left, right);
  1101. return Jump(makeBranch(cond));
  1102. }
  1103. Jump branch32(RelationalCondition cond, RegisterID left, TrustedImm32 right)
  1104. {
  1105. compare32(left, right);
  1106. return Jump(makeBranch(cond));
  1107. }
  1108. Jump branch32(RelationalCondition cond, RegisterID left, Address right)
  1109. {
  1110. load32(right, dataTempRegister);
  1111. return branch32(cond, left, dataTempRegister);
  1112. }
  1113. Jump branch32(RelationalCondition cond, Address left, RegisterID right)
  1114. {
  1115. load32(left, dataTempRegister);
  1116. return branch32(cond, dataTempRegister, right);
  1117. }
  1118. Jump branch32(RelationalCondition cond, Address left, TrustedImm32 right)
  1119. {
  1120. // use addressTempRegister incase the branch32 we call uses dataTempRegister. :-/
  1121. load32(left, addressTempRegister);
  1122. return branch32(cond, addressTempRegister, right);
  1123. }
  1124. Jump branch32(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
  1125. {
  1126. // use addressTempRegister incase the branch32 we call uses dataTempRegister. :-/
  1127. load32(left, addressTempRegister);
  1128. return branch32(cond, addressTempRegister, right);
  1129. }
  1130. Jump branch32WithUnalignedHalfWords(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
  1131. {
  1132. // use addressTempRegister incase the branch32 we call uses dataTempRegister. :-/
  1133. load32WithUnalignedHalfWords(left, addressTempRegister);
  1134. return branch32(cond, addressTempRegister, right);
  1135. }
  1136. Jump branch32(RelationalCondition cond, AbsoluteAddress left, RegisterID right)
  1137. {
  1138. load32(left.m_ptr, dataTempRegister);
  1139. return branch32(cond, dataTempRegister, right);
  1140. }
  1141. Jump branch32(RelationalCondition cond, AbsoluteAddress left, TrustedImm32 right)
  1142. {
  1143. // use addressTempRegister incase the branch32 we call uses dataTempRegister. :-/
  1144. load32(left.m_ptr, addressTempRegister);
  1145. return branch32(cond, addressTempRegister, right);
  1146. }
  1147. Jump branch8(RelationalCondition cond, RegisterID left, TrustedImm32 right)
  1148. {
  1149. compare32(left, right);
  1150. return Jump(makeBranch(cond));
  1151. }
  1152. Jump branch8(RelationalCondition cond, Address left, TrustedImm32 right)
  1153. {
  1154. ASSERT(!(0xffffff00 & right.m_value));
  1155. // use addressTempRegister incase the branch8 we call uses dataTempRegister. :-/
  1156. load8(left, addressTempRegister);
  1157. return branch8(cond, addressTempRegister, right);
  1158. }
  1159. Jump branch8(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
  1160. {
  1161. ASSERT(!(0xffffff00 & right.m_value));
  1162. // use addressTempRegister incase the branch32 we call uses dataTempRegister. :-/
  1163. load8(left, addressTempRegister);
  1164. return branch32(cond, addressTempRegister, right);
  1165. }
  1166. Jump branchTest32(ResultCondition cond, RegisterID reg, RegisterID mask)
  1167. {
  1168. m_assembler.tst(reg, mask);
  1169. return Jump(makeBranch(cond));
  1170. }
  1171. Jump branchTest32(ResultCondition cond, RegisterID reg, TrustedImm32 mask = TrustedImm32(-1))
  1172. {
  1173. test32(reg, mask);
  1174. return Jump(makeBranch(cond));
  1175. }
  1176. Jump branchTest32(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
  1177. {
  1178. // use addressTempRegister incase the branchTest32 we call uses dataTempRegister. :-/
  1179. load32(address, addressTempRegister);
  1180. return branchTest32(cond, addressTempRegister, mask);
  1181. }
  1182. Jump branchTest32(ResultCondition cond, BaseIndex address, TrustedImm32 mask = TrustedImm32(-1))
  1183. {
  1184. // use addressTempRegister incase the branchTest32 we call uses dataTempRegister. :-/
  1185. load32(address, addressTempRegister);
  1186. return branchTest32(cond, addressTempRegister, mask);
  1187. }
  1188. Jump branchTest8(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
  1189. {
  1190. // use addressTempRegister incase the branchTest8 we call uses dataTempRegister. :-/
  1191. load8(address, addressTempRegister);
  1192. return branchTest32(cond, addressTempRegister, mask);
  1193. }
  1194. Jump branchTest8(ResultCondition cond, AbsoluteAddress address, TrustedImm32 mask = TrustedImm32(-1))
  1195. {
  1196. // use addressTempRegister incase the branchTest8 we call uses dataTempRegister. :-/
  1197. move(TrustedImmPtr(address.m_ptr), addressTempRegister);
  1198. load8(Address(addressTempRegister), addressTempRegister);
  1199. return branchTest32(cond, addressTempRegister, mask);
  1200. }
  1201. void jump(RegisterID target)
  1202. {
  1203. m_assembler.bx(target);
  1204. }
  1205. // Address is a memory location containing the address to jump to
  1206. void jump(Address address)
  1207. {
  1208. load32(address, dataTempRegister);
  1209. m_assembler.bx(dataTempRegister);
  1210. }
  1211. void jump(AbsoluteAddress address)
  1212. {
  1213. move(TrustedImmPtr(address.m_ptr), dataTempRegister);
  1214. load32(Address(dataTempRegister), dataTempRegister);
  1215. m_assembler.bx(dataTempRegister);
  1216. }
  1217. // Arithmetic control flow operations:
  1218. //
  1219. // This set of conditional branch operations branch based
  1220. // on the result of an arithmetic operation. The operation
  1221. // is performed as normal, storing the result.
  1222. //
  1223. // * jz operations branch if the result is zero.
  1224. // * jo operations branch if the (signed) arithmetic
  1225. // operation caused an overflow to occur.
  1226. Jump branchAdd32(ResultCondition cond, RegisterID op1, RegisterID op2, RegisterID dest)
  1227. {
  1228. m_assembler.add_S(dest, op1, op2);
  1229. return Jump(makeBranch(cond));
  1230. }
  1231. Jump branchAdd32(ResultCondition cond, RegisterID op1, TrustedImm32 imm, RegisterID dest)
  1232. {
  1233. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
  1234. if (armImm.isValid())
  1235. m_assembler.add_S(dest, op1, armImm);
  1236. else {
  1237. move(imm, dataTempRegister);
  1238. m_assembler.add_S(dest, op1, dataTempRegister);
  1239. }
  1240. return Jump(makeBranch(cond));
  1241. }
  1242. Jump branchAdd32(ResultCondition cond, RegisterID src, RegisterID dest)
  1243. {
  1244. return branchAdd32(cond, dest, src, dest);
  1245. }
  1246. Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
  1247. {
  1248. return branchAdd32(cond, dest, imm, dest);
  1249. }
  1250. Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, AbsoluteAddress dest)
  1251. {
  1252. // Move the high bits of the address into addressTempRegister,
  1253. // and load the value into dataTempRegister.
  1254. move(TrustedImmPtr(dest.m_ptr), addressTempRegister);
  1255. m_assembler.ldr(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt16(0));
  1256. // Do the add.
  1257. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
  1258. if (armImm.isValid())
  1259. m_assembler.add_S(dataTempRegister, dataTempRegister, armImm);
  1260. else {
  1261. // If the operand does not fit into an immediate then load it temporarily
  1262. // into addressTempRegister; since we're overwriting addressTempRegister
  1263. // we'll need to reload it with the high bits of the address afterwards.
  1264. move(imm, addressTempRegister);
  1265. m_assembler.add_S(dataTempRegister, dataTempRegister, addressTempRegister);
  1266. move(TrustedImmPtr(dest.m_ptr), addressTempRegister);
  1267. }
  1268. // Store the result.
  1269. m_assembler.str(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt16(0));
  1270. return Jump(makeBranch(cond));
  1271. }
  1272. Jump branchMul32(ResultCondition cond, RegisterID src1, RegisterID src2, RegisterID dest)
  1273. {
  1274. m_assembler.smull(dest, dataTempRegister, src1, src2);
  1275. if (cond == Overflow) {
  1276. m_assembler.asr(addressTempRegister, dest, 31);
  1277. return branch32(NotEqual, addressTempRegister, dataTempRegister);
  1278. }
  1279. return branchTest32(cond, dest);
  1280. }
  1281. Jump branchMul32(ResultCondition cond, RegisterID src, RegisterID dest)
  1282. {
  1283. return branchMul32(cond, src, dest, dest);
  1284. }
  1285. Jump branchMul32(ResultCondition cond, TrustedImm32 imm, RegisterID src, RegisterID dest)
  1286. {
  1287. move(imm, dataTempRegister);
  1288. return branchMul32(cond, dataTempRegister, src, dest);
  1289. }
  1290. Jump branchNeg32(ResultCondition cond, RegisterID srcDest)
  1291. {
  1292. ARMThumbImmediate zero = ARMThumbImmediate::makeUInt12(0);
  1293. m_assembler.sub_S(srcDest, zero, srcDest);
  1294. return Jump(makeBranch(cond));
  1295. }
  1296. Jump branchOr32(ResultCondition cond, RegisterID src, RegisterID dest)
  1297. {
  1298. m_assembler.orr_S(dest, dest, src);
  1299. return Jump(makeBranch(cond));
  1300. }
  1301. Jump branchSub32(ResultCondition cond, RegisterID op1, RegisterID op2, RegisterID dest)
  1302. {
  1303. m_assembler.sub_S(dest, op1, op2);
  1304. return Jump(makeBranch(cond));
  1305. }
  1306. Jump branchSub32(ResultCondition cond, RegisterID op1, TrustedImm32 imm, RegisterID dest)
  1307. {
  1308. ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
  1309. if (armImm.isValid())
  1310. m_assembler.sub_S(dest, op1, armImm);
  1311. else {
  1312. move(imm, dataTempRegister);
  1313. m_assembler.sub_S(dest, op1, dataTempRegister);
  1314. }
  1315. return Jump(makeBranch(cond));
  1316. }
  1317. Jump branchSub32(ResultCondition cond, RegisterID src, RegisterID dest)
  1318. {
  1319. return branchSub32(cond, dest, src, dest);
  1320. }
  1321. Jump branchSub32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
  1322. {
  1323. return branchSub32(cond, dest, imm, dest);
  1324. }
  1325. void relativeTableJump(RegisterID index, int scale)
  1326. {
  1327. ASSERT(scale >= 0 && scale <= 31);
  1328. // dataTempRegister will point after the jump if index register contains zero
  1329. move(ARMRegisters::pc, dataTempRegister);
  1330. m_assembler.add(dataTempRegister, dataTempRegister, ARMThumbImmediate::makeEncodedImm(9));
  1331. ShiftTypeAndAmount shift(SRType_LSL, scale);
  1332. m_assembler.add(dataTempRegister, dataTempRegister, index, shift);
  1333. jump(dataTempRegister);
  1334. }
  1335. // Miscellaneous operations:
  1336. void breakpoint(uint8_t imm = 0)
  1337. {
  1338. m_assembler.bkpt(imm);
  1339. }
  1340. ALWAYS_INLINE Call nearCall()
  1341. {
  1342. moveFixedWidthEncoding(TrustedImm32(0), dataTempRegister);
  1343. return Call(m_assembler.blx(dataTempRegister), Call::LinkableNear);
  1344. }
  1345. ALWAYS_INLINE Call call()
  1346. {
  1347. moveFixedWidthEncoding(TrustedImm32(0), dataTempRegister);
  1348. return Call(m_assembler.blx(dataTempRegister), Call::Linkable);
  1349. }
  1350. ALWAYS_INLINE Call call(RegisterID target)
  1351. {
  1352. return Call(m_assembler.blx(target), Call::None);
  1353. }
  1354. ALWAYS_INLINE Call call(Address address)
  1355. {
  1356. load32(address, dataTempRegister);
  1357. return Call(m_assembler.blx(dataTempRegister), Call::None);
  1358. }
  1359. ALWAYS_INLINE void ret()
  1360. {
  1361. m_assembler.bx(linkRegister);
  1362. }
  1363. void compare32(RelationalCondition cond, RegisterID left, RegisterID right, RegisterID dest)
  1364. {
  1365. m_assembler.cmp(left, right);
  1366. m_assembler.it(armV7Condition(cond), false);
  1367. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(1));
  1368. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
  1369. }
  1370. void compare32(RelationalCondition cond, Address left, RegisterID right, RegisterID dest)
  1371. {
  1372. load32(left, dataTempRegister);
  1373. compare32(cond, dataTempRegister, right, dest);
  1374. }
  1375. void compare8(RelationalCondition cond, Address left, TrustedImm32 right, RegisterID dest)
  1376. {
  1377. load8(left, addressTempRegister);
  1378. compare32(cond, addressTempRegister, right, dest);
  1379. }
  1380. void compare32(RelationalCondition cond, RegisterID left, TrustedImm32 right, RegisterID dest)
  1381. {
  1382. compare32(left, right);
  1383. m_assembler.it(armV7Condition(cond), false);
  1384. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(1));
  1385. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
  1386. }
  1387. // FIXME:
  1388. // The mask should be optional... paerhaps the argument order should be
  1389. // dest-src, operations always have a dest? ... possibly not true, considering
  1390. // asm ops like test, or pseudo ops like pop().
  1391. void test32(ResultCondition cond, Address address, TrustedImm32 mask, RegisterID dest)
  1392. {
  1393. load32(address, dataTempRegister);
  1394. test32(dataTempRegister, mask);
  1395. m_assembler.it(armV7Condition(cond), false);
  1396. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(1));
  1397. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
  1398. }
  1399. void test8(ResultCondition cond, Address address, TrustedImm32 mask, RegisterID dest)
  1400. {
  1401. load8(address, dataTempRegister);
  1402. test32(dataTempRegister, mask);
  1403. m_assembler.it(armV7Condition(cond), false);
  1404. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(1));
  1405. m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
  1406. }
  1407. ALWAYS_INLINE DataLabel32 moveWithPatch(TrustedImm32 imm, RegisterID dst)
  1408. {
  1409. padBeforePatch();
  1410. moveFixedWidthEncoding(imm, dst);
  1411. return DataLabel32(this);
  1412. }
  1413. ALWAYS_INLINE DataLabelPtr moveWithPatch(TrustedImmPtr imm, RegisterID dst)
  1414. {
  1415. padBeforePatch();
  1416. moveFixedWidthEncoding(TrustedImm32(imm), dst);
  1417. return DataLabelPtr(this);
  1418. }
  1419. ALWAYS_INLINE Jump branchPtrWithPatch(RelationalCondition cond, RegisterID left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
  1420. {
  1421. dataLabel = moveWithPatch(initialRightValue, dataTempRegister);
  1422. return branch32(cond, left, dataTempRegister);
  1423. }
  1424. ALWAYS_INLINE Jump branchPtrWithPatch(RelationalCondition cond, Address left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
  1425. {
  1426. load32(left, addressTempRegister);
  1427. dataLabel = moveWithPatch(initialRightValue, dataTempRegister);
  1428. return branch32(cond, addressTempRegister, dataTempRegister);
  1429. }
  1430. PatchableJump patchableBranchPtr(RelationalCondition cond, Address left, TrustedImmPtr right = TrustedImmPtr(0))
  1431. {
  1432. m_makeJumpPatchable = true;
  1433. Jump result = branch32(cond, left, TrustedImm32(right));
  1434. m_makeJumpPatchable = false;
  1435. return PatchableJump(result);
  1436. }
  1437. PatchableJump patchableBranchTest32(ResultCondition cond, RegisterID reg, TrustedImm32 mask = TrustedImm32(-1))
  1438. {
  1439. m_makeJumpPatchable = true;
  1440. Jump result = branchTest32(cond, reg, mask);
  1441. m_makeJumpPatchable = false;
  1442. return PatchableJump(result);
  1443. }
  1444. PatchableJump patchableBranch32(RelationalCondition cond, RegisterID reg, TrustedImm32 imm)
  1445. {
  1446. m_makeJumpPatchable = true;
  1447. Jump result = branch32(cond, reg, imm);
  1448. m_makeJumpPatchable = false;
  1449. return PatchableJump(result);
  1450. }
  1451. PatchableJump patchableBranchPtrWithPatch(RelationalCondition cond, Address left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
  1452. {
  1453. m_makeJumpPatchable = true;
  1454. Jump result = branchPtrWithPatch(cond, left, dataLabel, initialRightValue);
  1455. m_makeJumpPatchable = false;
  1456. return PatchableJump(result);
  1457. }
  1458. PatchableJump patchableJump()
  1459. {
  1460. padBeforePatch();
  1461. m_makeJumpPatchable = true;
  1462. Jump result = jump();
  1463. m_makeJumpPatchable = false;
  1464. return PatchableJump(result);
  1465. }
  1466. ALWAYS_INLINE DataLabelPtr storePtrWithPatch(TrustedImmPtr initialValue, ImplicitAddress address)
  1467. {
  1468. DataLabelPtr label = moveWithPatch(initialValue, dataTempRegister);
  1469. store32(dataTempRegister, address);
  1470. return label;
  1471. }
  1472. ALWAYS_INLINE DataLabelPtr storePtrWithPatch(ImplicitAddress address) { return storePtrWithPatch(TrustedImmPtr(0), address); }
  1473. ALWAYS_INLINE Call tailRecursiveCall()
  1474. {
  1475. // Like a normal call, but don't link.
  1476. moveFixedWidthEncoding(TrustedImm32(0), dataTempRegister);
  1477. return Call(m_assembler.bx(dataTempRegister), Call::Linkable);
  1478. }
  1479. ALWAYS_INLINE Call makeTailRecursiveCall(Jump oldJump)
  1480. {
  1481. oldJump.link(this);
  1482. return tailRecursiveCall();
  1483. }
  1484. int executableOffsetFor(int location)
  1485. {
  1486. return m_assembler.executableOffsetFor(location);
  1487. }
  1488. static FunctionPtr readCallTarget(CodeLocationCall call)
  1489. {
  1490. return FunctionPtr(reinterpret_cast<void(*)()>(ARMv7Assembler::readCallTarget(call.dataLocation())));
  1491. }
  1492. static bool canJumpReplacePatchableBranchPtrWithPatch() { return false; }
  1493. static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
  1494. {
  1495. const unsigned twoWordOpSize = 4;
  1496. return label.labelAtOffset(-twoWordOpSize * 2);
  1497. }
  1498. static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID rd, void* initialValue)
  1499. {
  1500. #if OS(LINUX) || OS(QNX) || OS(PSP2)
  1501. ARMv7Assembler::revertJumpTo_movT3movtcmpT2(instructionStart.dataLocation(), rd, dataTempRegister, reinterpret_cast<uintptr_t>(initialValue));
  1502. #else
  1503. UNUSED_PARAM(rd);
  1504. ARMv7Assembler::revertJumpTo_movT3(instructionStart.dataLocation(), dataTempRegister, ARMThumbImmediate::makeUInt16(reinterpret_cast<uintptr_t>(initialValue) & 0xffff));
  1505. #endif
  1506. }
  1507. static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr)
  1508. {
  1509. UNREACHABLE_FOR_PLATFORM();
  1510. return CodeLocationLabel();
  1511. }
  1512. static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel, Address, void*)
  1513. {
  1514. UNREACHABLE_FOR_PLATFORM();
  1515. }
  1516. protected:
  1517. ALWAYS_INLINE Jump jump()
  1518. {
  1519. m_assembler.label(); // Force nop-padding if we're in the middle of a watchpoint.
  1520. moveFixedWidthEncoding(TrustedImm32(0), dataTempRegister);
  1521. return Jump(m_assembler.bx(dataTempRegister), m_makeJumpPatchable ? ARMv7Assembler::JumpNoConditionFixedSize : ARMv7Assembler::JumpNoCondition);
  1522. }
  1523. ALWAYS_INLINE Jump makeBranch(ARMv7Assembler::Condition cond)
  1524. {
  1525. m_assembler.label(); // Force nop-padding if we're in the middle of a watchpoint.
  1526. m_assembler.it(cond, true, true);
  1527. moveFixedWidthEncoding(TrustedImm32(0), dataTempRegister);
  1528. return Jump(m_assembler.bx(dataTempRegister), m_makeJumpPatchable ? ARMv7Assembler::JumpConditionFixedSize : ARMv7Assembler::JumpCondition, cond);
  1529. }
  1530. ALWAYS_INLINE Jump makeBranch(RelationalCondition cond) { return makeBranch(armV7Condition(cond)); }
  1531. ALWAYS_INLINE Jump makeBranch(ResultCondition cond) { return makeBranch(armV7Condition(cond)); }
  1532. ALWAYS_INLINE Jump makeBranch(DoubleCondition cond) { return makeBranch(armV7Condition(cond)); }
  1533. ArmAddress setupArmAddress(BaseIndex address)
  1534. {
  1535. if (address.offset) {
  1536. ARMThumbImmediate imm = ARMThumbImmediate::makeUInt12OrEncodedImm(address.offset);
  1537. if (imm.isValid())
  1538. m_assembler.add(addressTempRegister, address.base, imm);
  1539. else {
  1540. move(TrustedImm32(address.offset), addressTempRegister);
  1541. m_assembler.add(addressTempRegister, addressTempRegister, address.base);
  1542. }
  1543. return ArmAddress(addressTempRegister, address.index, address.scale);
  1544. } else
  1545. return ArmAddress(address.base, address.index, address.scale);
  1546. }
  1547. ArmAddress setupArmAddress(Address address)
  1548. {
  1549. if ((address.offset >= -0xff) && (address.offset <= 0xfff))
  1550. return ArmAddress(address.base, address.offset);
  1551. move(TrustedImm32(address.offset), addressTempRegister);
  1552. return ArmAddress(address.base, addressTempRegister);
  1553. }
  1554. ArmAddress setupArmAddress(ImplicitAddress address)
  1555. {
  1556. if ((address.offset >= -0xff) && (address.offset <= 0xfff))
  1557. return ArmAddress(address.base, address.offset);
  1558. move(TrustedImm32(address.offset), addressTempRegister);
  1559. return ArmAddress(address.base, addressTempRegister);
  1560. }
  1561. RegisterID makeBaseIndexBase(BaseIndex address)
  1562. {
  1563. if (!address.offset)
  1564. return address.base;
  1565. ARMThumbImmediate imm = ARMThumbImmediate::makeUInt12OrEncodedImm(address.offset);
  1566. if (imm.isValid())
  1567. m_assembler.add(addressTempRegister, address.base, imm);
  1568. else {
  1569. move(TrustedImm32(address.offset), addressTempRegister);
  1570. m_assembler.add(addressTempRegister, addressTempRegister, address.base);
  1571. }
  1572. return addressTempRegister;
  1573. }
  1574. void moveFixedWidthEncoding(TrustedImm32 imm, RegisterID dst)
  1575. {
  1576. uint32_t value = imm.m_value;
  1577. m_assembler.movT3(dst, ARMThumbImmediate::makeUInt16(value & 0xffff));
  1578. m_assembler.movt(dst, ARMThumbImmediate::makeUInt16(value >> 16));
  1579. }
  1580. ARMv7Assembler::Condition armV7Condition(RelationalCondition cond)
  1581. {
  1582. return static_cast<ARMv7Assembler::Condition>(cond);
  1583. }
  1584. ARMv7Assembler::Condition armV7Condition(ResultCondition cond)
  1585. {
  1586. return static_cast<ARMv7Assembler::Condition>(cond);
  1587. }
  1588. ARMv7Assembler::Condition armV7Condition(DoubleCondition cond)
  1589. {
  1590. return static_cast<ARMv7Assembler::Condition>(cond);
  1591. }
  1592. private:
  1593. friend class LinkBuffer;
  1594. friend class RepatchBuffer;
  1595. static void linkCall(void* code, Call call, FunctionPtr function)
  1596. {
  1597. ARMv7Assembler::linkCall(code, call.m_label, function.value());
  1598. }
  1599. static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
  1600. {
  1601. ARMv7Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
  1602. }
  1603. static void repatchCall(CodeLocationCall call, FunctionPtr destination)
  1604. {
  1605. ARMv7Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
  1606. }
  1607. bool m_makeJumpPatchable;
  1608. };
  1609. } // namespace JSC
  1610. #endif // ENABLE(ASSEMBLER)
  1611. #endif // MacroAssemblerARMv7_h