JITInlines.h 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898
  1. /*
  2. * Copyright (C) 2008, 2012 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef JITInlines_h
  26. #define JITInlines_h
  27. #if ENABLE(JIT)
  28. namespace JSC {
  29. ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
  30. {
  31. return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
  32. }
  33. ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
  34. {
  35. ASSERT(m_codeBlock->isConstantRegisterIndex(src));
  36. return m_codeBlock->getConstant(src);
  37. }
  38. ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry entry)
  39. {
  40. #if USE(JSVALUE32_64)
  41. store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
  42. store32(from, intPayloadFor(entry, callFrameRegister));
  43. #else
  44. store64(from, addressFor(entry, callFrameRegister));
  45. #endif
  46. }
  47. ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
  48. {
  49. loadPtr(Address(from, entry * sizeof(Register)), to);
  50. #if USE(JSVALUE64)
  51. killLastResultRegister();
  52. #endif
  53. }
  54. ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
  55. {
  56. load32(Address(from, entry * sizeof(Register)), to);
  57. #if USE(JSVALUE64)
  58. killLastResultRegister();
  59. #endif
  60. }
  61. #if USE(JSVALUE64)
  62. ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
  63. {
  64. load64(Address(from, entry * sizeof(Register)), to);
  65. killLastResultRegister();
  66. }
  67. #endif
  68. ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
  69. {
  70. failures.append(branchPtr(NotEqual, Address(src, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  71. failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
  72. loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
  73. failures.append(branchTest32(Zero, dst));
  74. loadPtr(MacroAssembler::Address(dst, StringImpl::flagsOffset()), regT1);
  75. loadPtr(MacroAssembler::Address(dst, StringImpl::dataOffset()), dst);
  76. JumpList is16Bit;
  77. JumpList cont8Bit;
  78. is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
  79. load8(MacroAssembler::Address(dst, 0), dst);
  80. cont8Bit.append(jump());
  81. is16Bit.link(this);
  82. load16(MacroAssembler::Address(dst, 0), dst);
  83. cont8Bit.link(this);
  84. }
  85. ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
  86. {
  87. ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
  88. Call nakedCall = nearCall();
  89. m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
  90. return nakedCall;
  91. }
  92. ALWAYS_INLINE bool JIT::atJumpTarget()
  93. {
  94. while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeOffset) {
  95. if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeOffset)
  96. return true;
  97. ++m_jumpTargetsPosition;
  98. }
  99. return false;
  100. }
  101. #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
  102. ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
  103. {
  104. #if CPU(ARM_TRADITIONAL)
  105. #ifndef NDEBUG
  106. // Ensure the label after the sequence can also fit
  107. insnSpace += sizeof(ARMWord);
  108. constSpace += sizeof(uint64_t);
  109. #endif
  110. ensureSpace(insnSpace, constSpace);
  111. #elif CPU(SH4)
  112. #ifndef NDEBUG
  113. insnSpace += sizeof(SH4Word);
  114. constSpace += sizeof(uint64_t);
  115. #endif
  116. m_assembler.ensureSpace(insnSpace + m_assembler.maxInstructionSize + 2, constSpace + 8);
  117. #endif
  118. #ifndef NDEBUG
  119. m_uninterruptedInstructionSequenceBegin = label();
  120. m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
  121. #endif
  122. }
  123. ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace, int dst)
  124. {
  125. #ifndef NDEBUG
  126. /* There are several cases when the uninterrupted sequence is larger than
  127. * maximum required offset for pathing the same sequence. Eg.: if in a
  128. * uninterrupted sequence the last macroassembler's instruction is a stub
  129. * call, it emits store instruction(s) which should not be included in the
  130. * calculation of length of uninterrupted sequence. So, the insnSpace and
  131. * constSpace should be upper limit instead of hard limit.
  132. */
  133. #if CPU(SH4)
  134. if ((dst > 15) || (dst < -16)) {
  135. insnSpace += 8;
  136. constSpace += 2;
  137. }
  138. if (((dst >= -16) && (dst < 0)) || ((dst > 7) && (dst <= 15)))
  139. insnSpace += 8;
  140. #else
  141. UNUSED_PARAM(dst);
  142. #endif
  143. ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) <= insnSpace);
  144. ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin <= constSpace);
  145. #else
  146. UNUSED_PARAM(insnSpace);
  147. UNUSED_PARAM(constSpace);
  148. UNUSED_PARAM(dst);
  149. #endif
  150. }
  151. #endif // ASSEMBLER_HAS_CONSTANT_POOL
  152. ALWAYS_INLINE void JIT::updateTopCallFrame()
  153. {
  154. ASSERT(static_cast<int>(m_bytecodeOffset) >= 0);
  155. if (m_bytecodeOffset) {
  156. #if USE(JSVALUE32_64)
  157. storePtr(TrustedImmPtr(m_codeBlock->instructions().begin() + m_bytecodeOffset + 1), intTagFor(JSStack::ArgumentCount));
  158. #else
  159. store32(TrustedImm32(m_bytecodeOffset + 1), intTagFor(JSStack::ArgumentCount));
  160. #endif
  161. }
  162. storePtr(callFrameRegister, &m_vm->topCallFrame);
  163. }
  164. ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
  165. {
  166. #if CPU(X86)
  167. // Within a trampoline the return address will be on the stack at this point.
  168. addPtr(TrustedImm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
  169. #elif CPU(ARM)
  170. move(stackPointerRegister, firstArgumentRegister);
  171. #elif CPU(SH4)
  172. move(stackPointerRegister, firstArgumentRegister);
  173. #endif
  174. // In the trampoline on x86-64, the first argument register is not overwritten.
  175. }
  176. ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
  177. {
  178. return branchPtr(NotEqual, Address(reg, JSCell::structureOffset()), TrustedImmPtr(structure));
  179. }
  180. ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
  181. {
  182. if (!m_codeBlock->isKnownNotImmediate(vReg))
  183. linkSlowCase(iter);
  184. }
  185. ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
  186. {
  187. ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
  188. m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
  189. }
  190. ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
  191. {
  192. ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
  193. const JumpList::JumpVector& jumpVector = jumpList.jumps();
  194. size_t size = jumpVector.size();
  195. for (size_t i = 0; i < size; ++i)
  196. m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
  197. }
  198. ALWAYS_INLINE void JIT::addSlowCase()
  199. {
  200. ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
  201. Jump emptyJump; // Doing it this way to make Windows happy.
  202. m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset));
  203. }
  204. ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
  205. {
  206. ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
  207. m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
  208. }
  209. ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
  210. {
  211. ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
  212. jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
  213. }
  214. ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotObject(RegisterID structureReg)
  215. {
  216. return branch8(Below, Address(structureReg, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
  217. }
  218. #if ENABLE(SAMPLING_FLAGS)
  219. ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
  220. {
  221. ASSERT(flag >= 1);
  222. ASSERT(flag <= 32);
  223. or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
  224. }
  225. ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
  226. {
  227. ASSERT(flag >= 1);
  228. ASSERT(flag <= 32);
  229. and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
  230. }
  231. #endif
  232. #if ENABLE(SAMPLING_COUNTERS)
  233. ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, int32_t count)
  234. {
  235. add64(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
  236. }
  237. #endif
  238. #if ENABLE(OPCODE_SAMPLING)
  239. #if CPU(X86_64)
  240. ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
  241. {
  242. move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
  243. storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
  244. }
  245. #else
  246. ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
  247. {
  248. storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
  249. }
  250. #endif
  251. #endif
  252. #if ENABLE(CODEBLOCK_SAMPLING)
  253. #if CPU(X86_64)
  254. ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
  255. {
  256. move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
  257. storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
  258. }
  259. #else
  260. ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
  261. {
  262. storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
  263. }
  264. #endif
  265. #endif
  266. ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
  267. {
  268. return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
  269. }
  270. template<typename StructureType>
  271. inline void JIT::emitAllocateJSObject(RegisterID allocator, StructureType structure, RegisterID result, RegisterID scratch)
  272. {
  273. loadPtr(Address(allocator, MarkedAllocator::offsetOfFreeListHead()), result);
  274. addSlowCase(branchTestPtr(Zero, result));
  275. // remove the object from the free list
  276. loadPtr(Address(result), scratch);
  277. storePtr(scratch, Address(allocator, MarkedAllocator::offsetOfFreeListHead()));
  278. // initialize the object's structure
  279. storePtr(structure, Address(result, JSCell::structureOffset()));
  280. // initialize the object's property storage pointer
  281. storePtr(TrustedImmPtr(0), Address(result, JSObject::butterflyOffset()));
  282. }
  283. #if ENABLE(VALUE_PROFILER)
  284. inline void JIT::emitValueProfilingSite(ValueProfile* valueProfile)
  285. {
  286. ASSERT(shouldEmitProfiling());
  287. ASSERT(valueProfile);
  288. const RegisterID value = regT0;
  289. #if USE(JSVALUE32_64)
  290. const RegisterID valueTag = regT1;
  291. #endif
  292. const RegisterID scratch = regT3;
  293. if (ValueProfile::numberOfBuckets == 1) {
  294. // We're in a simple configuration: only one bucket, so we can just do a direct
  295. // store.
  296. #if USE(JSVALUE64)
  297. store64(value, valueProfile->m_buckets);
  298. #else
  299. EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets);
  300. store32(value, &descriptor->asBits.payload);
  301. store32(valueTag, &descriptor->asBits.tag);
  302. #endif
  303. return;
  304. }
  305. if (m_randomGenerator.getUint32() & 1)
  306. add32(TrustedImm32(1), bucketCounterRegister);
  307. else
  308. add32(TrustedImm32(3), bucketCounterRegister);
  309. and32(TrustedImm32(ValueProfile::bucketIndexMask), bucketCounterRegister);
  310. move(TrustedImmPtr(valueProfile->m_buckets), scratch);
  311. #if USE(JSVALUE64)
  312. store64(value, BaseIndex(scratch, bucketCounterRegister, TimesEight));
  313. #elif USE(JSVALUE32_64)
  314. store32(value, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
  315. store32(valueTag, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
  316. #endif
  317. }
  318. inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset)
  319. {
  320. if (!shouldEmitProfiling())
  321. return;
  322. emitValueProfilingSite(m_codeBlock->valueProfileForBytecodeOffset(bytecodeOffset));
  323. }
  324. inline void JIT::emitValueProfilingSite()
  325. {
  326. emitValueProfilingSite(m_bytecodeOffset);
  327. }
  328. #endif // ENABLE(VALUE_PROFILER)
  329. inline void JIT::emitArrayProfilingSite(RegisterID structureAndIndexingType, RegisterID scratch, ArrayProfile* arrayProfile)
  330. {
  331. UNUSED_PARAM(scratch); // We had found this scratch register useful here before, so I will keep it for now.
  332. RegisterID structure = structureAndIndexingType;
  333. RegisterID indexingType = structureAndIndexingType;
  334. if (shouldEmitProfiling())
  335. storePtr(structure, arrayProfile->addressOfLastSeenStructure());
  336. load8(Address(structure, Structure::indexingTypeOffset()), indexingType);
  337. }
  338. inline void JIT::emitArrayProfilingSiteForBytecodeIndex(RegisterID structureAndIndexingType, RegisterID scratch, unsigned bytecodeIndex)
  339. {
  340. #if ENABLE(VALUE_PROFILER)
  341. emitArrayProfilingSite(structureAndIndexingType, scratch, m_codeBlock->getOrAddArrayProfile(bytecodeIndex));
  342. #else
  343. UNUSED_PARAM(bytecodeIndex);
  344. emitArrayProfilingSite(structureAndIndexingType, scratch, 0);
  345. #endif
  346. }
  347. inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile* arrayProfile)
  348. {
  349. #if ENABLE(VALUE_PROFILER)
  350. store8(TrustedImm32(1), arrayProfile->addressOfMayStoreToHole());
  351. #else
  352. UNUSED_PARAM(arrayProfile);
  353. #endif
  354. }
  355. inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile* arrayProfile)
  356. {
  357. #if ENABLE(VALUE_PROFILER)
  358. store8(TrustedImm32(1), arrayProfile->addressOfOutOfBounds());
  359. #else
  360. UNUSED_PARAM(arrayProfile);
  361. #endif
  362. }
  363. static inline bool arrayProfileSaw(ArrayModes arrayModes, IndexingType capability)
  364. {
  365. #if ENABLE(VALUE_PROFILER)
  366. return arrayModesInclude(arrayModes, capability);
  367. #else
  368. UNUSED_PARAM(arrayModes);
  369. UNUSED_PARAM(capability);
  370. return false;
  371. #endif
  372. }
  373. inline JITArrayMode JIT::chooseArrayMode(ArrayProfile* profile)
  374. {
  375. #if ENABLE(VALUE_PROFILER)
  376. profile->computeUpdatedPrediction(m_codeBlock);
  377. ArrayModes arrayModes = profile->observedArrayModes();
  378. if (arrayProfileSaw(arrayModes, DoubleShape))
  379. return JITDouble;
  380. if (arrayProfileSaw(arrayModes, Int32Shape))
  381. return JITInt32;
  382. if (arrayProfileSaw(arrayModes, ArrayStorageShape))
  383. return JITArrayStorage;
  384. return JITContiguous;
  385. #else
  386. UNUSED_PARAM(profile);
  387. return JITContiguous;
  388. #endif
  389. }
  390. #if USE(JSVALUE32_64)
  391. inline void JIT::emitLoadTag(int index, RegisterID tag)
  392. {
  393. RegisterID mappedTag;
  394. if (getMappedTag(index, mappedTag)) {
  395. move(mappedTag, tag);
  396. unmap(tag);
  397. return;
  398. }
  399. if (m_codeBlock->isConstantRegisterIndex(index)) {
  400. move(Imm32(getConstantOperand(index).tag()), tag);
  401. unmap(tag);
  402. return;
  403. }
  404. load32(tagFor(index), tag);
  405. unmap(tag);
  406. }
  407. inline void JIT::emitLoadPayload(int index, RegisterID payload)
  408. {
  409. RegisterID mappedPayload;
  410. if (getMappedPayload(index, mappedPayload)) {
  411. move(mappedPayload, payload);
  412. unmap(payload);
  413. return;
  414. }
  415. if (m_codeBlock->isConstantRegisterIndex(index)) {
  416. move(Imm32(getConstantOperand(index).payload()), payload);
  417. unmap(payload);
  418. return;
  419. }
  420. load32(payloadFor(index), payload);
  421. unmap(payload);
  422. }
  423. inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
  424. {
  425. move(Imm32(v.payload()), payload);
  426. move(Imm32(v.tag()), tag);
  427. }
  428. inline void JIT::emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base)
  429. {
  430. RELEASE_ASSERT(tag != payload);
  431. if (base == callFrameRegister) {
  432. RELEASE_ASSERT(payload != base);
  433. emitLoadPayload(index, payload);
  434. emitLoadTag(index, tag);
  435. return;
  436. }
  437. if (payload == base) { // avoid stomping base
  438. load32(tagFor(index, base), tag);
  439. load32(payloadFor(index, base), payload);
  440. return;
  441. }
  442. load32(payloadFor(index, base), payload);
  443. load32(tagFor(index, base), tag);
  444. }
  445. inline void JIT::emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2)
  446. {
  447. if (isMapped(index1)) {
  448. emitLoad(index1, tag1, payload1);
  449. emitLoad(index2, tag2, payload2);
  450. return;
  451. }
  452. emitLoad(index2, tag2, payload2);
  453. emitLoad(index1, tag1, payload1);
  454. }
  455. inline void JIT::emitLoadDouble(int index, FPRegisterID value)
  456. {
  457. if (m_codeBlock->isConstantRegisterIndex(index)) {
  458. WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
  459. loadDouble(&inConstantPool, value);
  460. } else
  461. loadDouble(addressFor(index), value);
  462. }
  463. inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
  464. {
  465. if (m_codeBlock->isConstantRegisterIndex(index)) {
  466. WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
  467. char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
  468. convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
  469. } else
  470. convertInt32ToDouble(payloadFor(index), value);
  471. }
  472. inline void JIT::emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base)
  473. {
  474. store32(payload, payloadFor(index, base));
  475. store32(tag, tagFor(index, base));
  476. }
  477. inline void JIT::emitStoreInt32(int index, RegisterID payload, bool indexIsInt32)
  478. {
  479. store32(payload, payloadFor(index, callFrameRegister));
  480. if (!indexIsInt32)
  481. store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
  482. }
  483. inline void JIT::emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength)
  484. {
  485. emitStoreInt32(index, payload, indexIsInt32);
  486. map(m_bytecodeOffset + opcodeLength, index, tag, payload);
  487. }
  488. inline void JIT::emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32)
  489. {
  490. store32(payload, payloadFor(index, callFrameRegister));
  491. if (!indexIsInt32)
  492. store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
  493. }
  494. inline void JIT::emitStoreCell(int index, RegisterID payload, bool indexIsCell)
  495. {
  496. store32(payload, payloadFor(index, callFrameRegister));
  497. if (!indexIsCell)
  498. store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
  499. }
  500. inline void JIT::emitStoreBool(int index, RegisterID payload, bool indexIsBool)
  501. {
  502. store32(payload, payloadFor(index, callFrameRegister));
  503. if (!indexIsBool)
  504. store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
  505. }
  506. inline void JIT::emitStoreDouble(int index, FPRegisterID value)
  507. {
  508. storeDouble(value, addressFor(index));
  509. }
  510. inline void JIT::emitStore(int index, const JSValue constant, RegisterID base)
  511. {
  512. store32(Imm32(constant.payload()), payloadFor(index, base));
  513. store32(Imm32(constant.tag()), tagFor(index, base));
  514. }
  515. ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
  516. {
  517. emitStore(dst, jsUndefined());
  518. }
  519. inline bool JIT::isLabeled(unsigned bytecodeOffset)
  520. {
  521. for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
  522. unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
  523. if (jumpTarget == bytecodeOffset)
  524. return true;
  525. if (jumpTarget > bytecodeOffset)
  526. return false;
  527. }
  528. return false;
  529. }
  530. inline void JIT::map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload)
  531. {
  532. if (isLabeled(bytecodeOffset))
  533. return;
  534. m_mappedBytecodeOffset = bytecodeOffset;
  535. m_mappedVirtualRegisterIndex = virtualRegisterIndex;
  536. m_mappedTag = tag;
  537. m_mappedPayload = payload;
  538. ASSERT(!canBeOptimizedOrInlined() || m_mappedPayload == regT0);
  539. ASSERT(!canBeOptimizedOrInlined() || m_mappedTag == regT1);
  540. }
  541. inline void JIT::unmap(RegisterID registerID)
  542. {
  543. if (m_mappedTag == registerID)
  544. m_mappedTag = (RegisterID)-1;
  545. else if (m_mappedPayload == registerID)
  546. m_mappedPayload = (RegisterID)-1;
  547. }
  548. inline void JIT::unmap()
  549. {
  550. m_mappedBytecodeOffset = (unsigned)-1;
  551. m_mappedVirtualRegisterIndex = JSStack::ReturnPC;
  552. m_mappedTag = (RegisterID)-1;
  553. m_mappedPayload = (RegisterID)-1;
  554. }
  555. inline bool JIT::isMapped(int virtualRegisterIndex)
  556. {
  557. if (m_mappedBytecodeOffset != m_bytecodeOffset)
  558. return false;
  559. if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
  560. return false;
  561. return true;
  562. }
  563. inline bool JIT::getMappedPayload(int virtualRegisterIndex, RegisterID& payload)
  564. {
  565. if (m_mappedBytecodeOffset != m_bytecodeOffset)
  566. return false;
  567. if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
  568. return false;
  569. if (m_mappedPayload == (RegisterID)-1)
  570. return false;
  571. payload = m_mappedPayload;
  572. return true;
  573. }
  574. inline bool JIT::getMappedTag(int virtualRegisterIndex, RegisterID& tag)
  575. {
  576. if (m_mappedBytecodeOffset != m_bytecodeOffset)
  577. return false;
  578. if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
  579. return false;
  580. if (m_mappedTag == (RegisterID)-1)
  581. return false;
  582. tag = m_mappedTag;
  583. return true;
  584. }
  585. inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex)
  586. {
  587. if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
  588. if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
  589. addSlowCase(jump());
  590. else
  591. addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
  592. }
  593. }
  594. inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag)
  595. {
  596. if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
  597. if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
  598. addSlowCase(jump());
  599. else
  600. addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
  601. }
  602. }
  603. ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
  604. {
  605. return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
  606. }
  607. ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
  608. {
  609. if (isOperandConstantImmediateInt(op1)) {
  610. constant = getConstantOperand(op1).asInt32();
  611. op = op2;
  612. return true;
  613. }
  614. if (isOperandConstantImmediateInt(op2)) {
  615. constant = getConstantOperand(op2).asInt32();
  616. op = op1;
  617. return true;
  618. }
  619. return false;
  620. }
  621. #else // USE(JSVALUE32_64)
  622. /* Deprecated: Please use JITStubCall instead. */
  623. ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
  624. {
  625. unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
  626. peek64(dst, argumentStackOffset);
  627. }
  628. ALWAYS_INLINE void JIT::killLastResultRegister()
  629. {
  630. m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
  631. }
  632. // get arg puts an arg from the SF register array into a h/w register
  633. ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
  634. {
  635. ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
  636. // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
  637. if (m_codeBlock->isConstantRegisterIndex(src)) {
  638. JSValue value = m_codeBlock->getConstant(src);
  639. if (!value.isNumber())
  640. move(TrustedImm64(JSValue::encode(value)), dst);
  641. else
  642. move(Imm64(JSValue::encode(value)), dst);
  643. killLastResultRegister();
  644. return;
  645. }
  646. if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src) && !atJumpTarget()) {
  647. // The argument we want is already stored in eax
  648. if (dst != cachedResultRegister)
  649. move(cachedResultRegister, dst);
  650. killLastResultRegister();
  651. return;
  652. }
  653. load64(Address(callFrameRegister, src * sizeof(Register)), dst);
  654. killLastResultRegister();
  655. }
  656. ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
  657. {
  658. if (src2 == m_lastResultBytecodeRegister) {
  659. emitGetVirtualRegister(src2, dst2);
  660. emitGetVirtualRegister(src1, dst1);
  661. } else {
  662. emitGetVirtualRegister(src1, dst1);
  663. emitGetVirtualRegister(src2, dst2);
  664. }
  665. }
  666. ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
  667. {
  668. return getConstantOperand(src).asInt32();
  669. }
  670. ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
  671. {
  672. return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
  673. }
  674. ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
  675. {
  676. store64(from, Address(callFrameRegister, dst * sizeof(Register)));
  677. m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
  678. }
  679. ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
  680. {
  681. store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
  682. }
  683. ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
  684. {
  685. return branchTest64(Zero, reg, tagMaskRegister);
  686. }
  687. ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
  688. {
  689. move(reg1, scratch);
  690. or64(reg2, scratch);
  691. return emitJumpIfJSCell(scratch);
  692. }
  693. ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
  694. {
  695. addSlowCase(emitJumpIfJSCell(reg));
  696. }
  697. ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
  698. {
  699. addSlowCase(emitJumpIfNotJSCell(reg));
  700. }
  701. ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
  702. {
  703. if (!m_codeBlock->isKnownNotImmediate(vReg))
  704. emitJumpSlowCaseIfNotJSCell(reg);
  705. }
  706. inline void JIT::emitLoadDouble(int index, FPRegisterID value)
  707. {
  708. if (m_codeBlock->isConstantRegisterIndex(index)) {
  709. WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
  710. loadDouble(&inConstantPool, value);
  711. } else
  712. loadDouble(addressFor(index), value);
  713. }
  714. inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
  715. {
  716. if (m_codeBlock->isConstantRegisterIndex(index)) {
  717. ASSERT(isOperandConstantImmediateInt(index));
  718. convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
  719. } else
  720. convertInt32ToDouble(addressFor(index), value);
  721. }
  722. ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
  723. {
  724. return branch64(AboveOrEqual, reg, tagTypeNumberRegister);
  725. }
  726. ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
  727. {
  728. return branch64(Below, reg, tagTypeNumberRegister);
  729. }
  730. ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
  731. {
  732. move(reg1, scratch);
  733. and64(reg2, scratch);
  734. return emitJumpIfNotImmediateInteger(scratch);
  735. }
  736. ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
  737. {
  738. addSlowCase(emitJumpIfNotImmediateInteger(reg));
  739. }
  740. ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
  741. {
  742. addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
  743. }
  744. ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
  745. {
  746. addSlowCase(emitJumpIfNotImmediateNumber(reg));
  747. }
  748. ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
  749. {
  750. emitFastArithIntToImmNoCheck(src, dest);
  751. }
  752. ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
  753. {
  754. or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
  755. }
  756. #endif // USE(JSVALUE32_64)
  757. } // namespace JSC
  758. #endif // ENABLE(JIT)
  759. #endif // JITInlines_h