MacroAssemblerCodeRef.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417
  1. /*
  2. * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef MacroAssemblerCodeRef_h
  26. #define MacroAssemblerCodeRef_h
  27. #include "Disassembler.h"
  28. #include "ExecutableAllocator.h"
  29. #include "LLIntData.h"
  30. #include "RemotePointerWrapper.h"
  31. #include <wtf/DataLog.h>
  32. #include <wtf/PassRefPtr.h>
  33. #include <wtf/RefPtr.h>
  34. // ASSERT_VALID_CODE_POINTER checks that ptr is a non-null pointer, and that it is a valid
  35. // instruction address on the platform (for example, check any alignment requirements).
  36. #if CPU(ARM_THUMB2) && !ENABLE(LLINT_C_LOOP)
  37. // ARM/thumb instructions must be 16-bit aligned, but all code pointers to be loaded
  38. // into the processor are decorated with the bottom bit set, indicating that this is
  39. // thumb code (as oposed to 32-bit traditional ARM). The first test checks for both
  40. // decorated and undectorated null, and the second test ensures that the pointer is
  41. // decorated.
  42. #define ASSERT_VALID_CODE_POINTER(ptr) \
  43. ASSERT(reinterpret_cast<intptr_t>(ptr) & ~1); \
  44. ASSERT(reinterpret_cast<intptr_t>(ptr) & 1)
  45. #define ASSERT_VALID_CODE_OFFSET(offset) \
  46. ASSERT(!(offset & 1)) // Must be multiple of 2.
  47. #else
  48. #define ASSERT_VALID_CODE_POINTER(ptr) \
  49. ASSERT(ptr)
  50. #define ASSERT_VALID_CODE_OFFSET(offset) // Anything goes!
  51. #endif
  52. #if CPU(X86) && OS(WINDOWS)
  53. #define CALLING_CONVENTION_IS_STDCALL 1
  54. #ifndef CDECL
  55. #if COMPILER(MSVC)
  56. #define CDECL __cdecl
  57. #else
  58. #define CDECL __attribute__ ((__cdecl))
  59. #endif // COMPILER(MSVC)
  60. #endif // CDECL
  61. #else
  62. #define CALLING_CONVENTION_IS_STDCALL 0
  63. #endif
  64. #if CPU(X86)
  65. #define HAS_FASTCALL_CALLING_CONVENTION 1
  66. #ifndef FASTCALL
  67. #if COMPILER(MSVC)
  68. #define FASTCALL __fastcall
  69. #else
  70. #define FASTCALL __attribute__ ((fastcall))
  71. #endif // COMPILER(MSVC)
  72. #endif // FASTCALL
  73. #else
  74. #define HAS_FASTCALL_CALLING_CONVENTION 0
  75. #endif // CPU(X86)
  76. namespace JSC {
  77. // FunctionPtr:
  78. //
  79. // FunctionPtr should be used to wrap pointers to C/C++ functions in JSC
  80. // (particularly, the stub functions).
  81. class FunctionPtr {
  82. public:
  83. FunctionPtr()
  84. : m_value(0)
  85. {
  86. }
  87. template<typename returnType>
  88. FunctionPtr(returnType(*value)())
  89. : m_value((void*)value)
  90. {
  91. ASSERT_VALID_CODE_POINTER(m_value);
  92. }
  93. template<typename returnType, typename argType1>
  94. FunctionPtr(returnType(*value)(argType1))
  95. : m_value((void*)value)
  96. {
  97. ASSERT_VALID_CODE_POINTER(m_value);
  98. }
  99. template<typename returnType, typename argType1, typename argType2>
  100. FunctionPtr(returnType(*value)(argType1, argType2))
  101. : m_value((void*)value)
  102. {
  103. ASSERT_VALID_CODE_POINTER(m_value);
  104. }
  105. template<typename returnType, typename argType1, typename argType2, typename argType3>
  106. FunctionPtr(returnType(*value)(argType1, argType2, argType3))
  107. : m_value((void*)value)
  108. {
  109. ASSERT_VALID_CODE_POINTER(m_value);
  110. }
  111. template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4>
  112. FunctionPtr(returnType(*value)(argType1, argType2, argType3, argType4))
  113. : m_value((void*)value)
  114. {
  115. ASSERT_VALID_CODE_POINTER(m_value);
  116. }
  117. template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4, typename argType5>
  118. FunctionPtr(returnType(*value)(argType1, argType2, argType3, argType4, argType5))
  119. : m_value((void*)value)
  120. {
  121. ASSERT_VALID_CODE_POINTER(m_value);
  122. }
  123. #if ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT
  124. FunctionPtr(RemotePointerWrapper value)
  125. : m_value((void*)value)
  126. {
  127. ASSERT_VALID_CODE_POINTER(m_value);
  128. }
  129. template<class T>
  130. FunctionPtr(RemoteFunctionWrapper<T> value)
  131. : m_value(static_cast<void*>(value))
  132. {
  133. ASSERT_VALID_CODE_POINTER(m_value);
  134. }
  135. #endif
  136. // MSVC doesn't seem to treat functions with different calling conventions as
  137. // different types; these methods already defined for fastcall, below.
  138. #if CALLING_CONVENTION_IS_STDCALL && !OS(WINDOWS)
  139. template<typename returnType>
  140. FunctionPtr(returnType (CDECL *value)())
  141. : m_value((void*)value)
  142. {
  143. ASSERT_VALID_CODE_POINTER(m_value);
  144. }
  145. template<typename returnType, typename argType1>
  146. FunctionPtr(returnType (CDECL *value)(argType1))
  147. : m_value((void*)value)
  148. {
  149. ASSERT_VALID_CODE_POINTER(m_value);
  150. }
  151. template<typename returnType, typename argType1, typename argType2>
  152. FunctionPtr(returnType (CDECL *value)(argType1, argType2))
  153. : m_value((void*)value)
  154. {
  155. ASSERT_VALID_CODE_POINTER(m_value);
  156. }
  157. template<typename returnType, typename argType1, typename argType2, typename argType3>
  158. FunctionPtr(returnType (CDECL *value)(argType1, argType2, argType3))
  159. : m_value((void*)value)
  160. {
  161. ASSERT_VALID_CODE_POINTER(m_value);
  162. }
  163. template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4>
  164. FunctionPtr(returnType (CDECL *value)(argType1, argType2, argType3, argType4))
  165. : m_value((void*)value)
  166. {
  167. ASSERT_VALID_CODE_POINTER(m_value);
  168. }
  169. #endif
  170. #if HAS_FASTCALL_CALLING_CONVENTION
  171. template<typename returnType>
  172. FunctionPtr(returnType (FASTCALL *value)())
  173. : m_value((void*)value)
  174. {
  175. ASSERT_VALID_CODE_POINTER(m_value);
  176. }
  177. template<typename returnType, typename argType1>
  178. FunctionPtr(returnType (FASTCALL *value)(argType1))
  179. : m_value((void*)value)
  180. {
  181. ASSERT_VALID_CODE_POINTER(m_value);
  182. }
  183. template<typename returnType, typename argType1, typename argType2>
  184. FunctionPtr(returnType (FASTCALL *value)(argType1, argType2))
  185. : m_value((void*)value)
  186. {
  187. ASSERT_VALID_CODE_POINTER(m_value);
  188. }
  189. template<typename returnType, typename argType1, typename argType2, typename argType3>
  190. FunctionPtr(returnType (FASTCALL *value)(argType1, argType2, argType3))
  191. : m_value((void*)value)
  192. {
  193. ASSERT_VALID_CODE_POINTER(m_value);
  194. }
  195. template<typename returnType, typename argType1, typename argType2, typename argType3, typename argType4>
  196. FunctionPtr(returnType (FASTCALL *value)(argType1, argType2, argType3, argType4))
  197. : m_value((void*)value)
  198. {
  199. ASSERT_VALID_CODE_POINTER(m_value);
  200. }
  201. #endif
  202. template<typename FunctionType>
  203. explicit FunctionPtr(FunctionType* value)
  204. // Using a C-ctyle cast here to avoid compiler error on RVTC:
  205. // Error: #694: reinterpret_cast cannot cast away const or other type qualifiers
  206. // (I guess on RVTC function pointers have a different constness to GCC/MSVC?)
  207. : m_value((void*)value)
  208. {
  209. ASSERT_VALID_CODE_POINTER(m_value);
  210. }
  211. void* value() const { return m_value; }
  212. void* executableAddress() const { return m_value; }
  213. private:
  214. void* m_value;
  215. };
  216. // ReturnAddressPtr:
  217. //
  218. // ReturnAddressPtr should be used to wrap return addresses generated by processor
  219. // 'call' instructions exectued in JIT code. We use return addresses to look up
  220. // exception and optimization information, and to repatch the call instruction
  221. // that is the source of the return address.
  222. class ReturnAddressPtr {
  223. public:
  224. ReturnAddressPtr()
  225. : m_value(0)
  226. {
  227. }
  228. explicit ReturnAddressPtr(void* value)
  229. : m_value(value)
  230. {
  231. ASSERT_VALID_CODE_POINTER(m_value);
  232. }
  233. explicit ReturnAddressPtr(FunctionPtr function)
  234. : m_value(function.value())
  235. {
  236. ASSERT_VALID_CODE_POINTER(m_value);
  237. }
  238. void* value() const { return m_value; }
  239. private:
  240. void* m_value;
  241. };
  242. // MacroAssemblerCodePtr:
  243. //
  244. // MacroAssemblerCodePtr should be used to wrap pointers to JIT generated code.
  245. class MacroAssemblerCodePtr {
  246. #if ENABLE(DETACHED_JIT)
  247. DETACHED_JIT_MAKE_SHARED_DATA_ALLOCATED;
  248. #endif
  249. public:
  250. MacroAssemblerCodePtr()
  251. : m_value(0)
  252. {
  253. }
  254. explicit MacroAssemblerCodePtr(void* value)
  255. #if CPU(ARM_THUMB2)
  256. // Decorate the pointer as a thumb code pointer.
  257. : m_value(reinterpret_cast<char*>(value) + 1)
  258. #else
  259. : m_value(value)
  260. #endif
  261. {
  262. ASSERT_VALID_CODE_POINTER(m_value);
  263. }
  264. static MacroAssemblerCodePtr createFromExecutableAddress(void* value)
  265. {
  266. ASSERT_VALID_CODE_POINTER(value);
  267. MacroAssemblerCodePtr result;
  268. result.m_value = value;
  269. return result;
  270. }
  271. #if ENABLE(LLINT)
  272. static MacroAssemblerCodePtr createLLIntCodePtr(LLIntCode codeId)
  273. {
  274. return createFromExecutableAddress(LLInt::getCodePtr(codeId));
  275. }
  276. #endif
  277. explicit MacroAssemblerCodePtr(ReturnAddressPtr ra)
  278. : m_value(ra.value())
  279. {
  280. ASSERT_VALID_CODE_POINTER(m_value);
  281. }
  282. void* executableAddress() const { return m_value; }
  283. #if CPU(ARM_THUMB2)
  284. // To use this pointer as a data address remove the decoration.
  285. void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value); return reinterpret_cast<char*>(m_value) - 1; }
  286. #else
  287. void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value); return m_value; }
  288. #endif
  289. bool operator!() const
  290. {
  291. return !m_value;
  292. }
  293. private:
  294. void* m_value;
  295. };
  296. // MacroAssemblerCodeRef:
  297. //
  298. // A reference to a section of JIT generated code. A CodeRef consists of a
  299. // pointer to the code, and a ref pointer to the pool from within which it
  300. // was allocated.
  301. class MacroAssemblerCodeRef {
  302. private:
  303. // This is private because it's dangerous enough that we want uses of it
  304. // to be easy to find - hence the static create method below.
  305. explicit MacroAssemblerCodeRef(MacroAssemblerCodePtr codePtr)
  306. : m_codePtr(codePtr)
  307. {
  308. ASSERT(m_codePtr);
  309. }
  310. public:
  311. MacroAssemblerCodeRef()
  312. {
  313. }
  314. MacroAssemblerCodeRef(PassRefPtr<ExecutableMemoryHandle> executableMemory)
  315. : m_codePtr(executableMemory->start())
  316. , m_executableMemory(executableMemory)
  317. {
  318. ASSERT(m_executableMemory->isManaged());
  319. ASSERT(m_executableMemory->start());
  320. ASSERT(m_codePtr);
  321. }
  322. // Use this only when you know that the codePtr refers to code that is
  323. // already being kept alive through some other means. Typically this means
  324. // that codePtr is immortal.
  325. static MacroAssemblerCodeRef createSelfManagedCodeRef(MacroAssemblerCodePtr codePtr)
  326. {
  327. return MacroAssemblerCodeRef(codePtr);
  328. }
  329. #if ENABLE(LLINT)
  330. // Helper for creating self-managed code refs from LLInt.
  331. static MacroAssemblerCodeRef createLLIntCodeRef(LLIntCode codeId)
  332. {
  333. return createSelfManagedCodeRef(MacroAssemblerCodePtr::createFromExecutableAddress(LLInt::getCodePtr(codeId)));
  334. }
  335. #endif
  336. ExecutableMemoryHandle* executableMemory() const
  337. {
  338. return m_executableMemory.get();
  339. }
  340. MacroAssemblerCodePtr code() const
  341. {
  342. return m_codePtr;
  343. }
  344. size_t size() const
  345. {
  346. if (!m_executableMemory)
  347. return 0;
  348. return m_executableMemory->sizeInBytes();
  349. }
  350. bool tryToDisassemble(const char* prefix) const
  351. {
  352. return JSC::tryToDisassemble(m_codePtr, size(), prefix, WTF::dataFile());
  353. }
  354. bool operator!() const { return !m_codePtr; }
  355. private:
  356. MacroAssemblerCodePtr m_codePtr;
  357. RefPtr<ExecutableMemoryHandle> m_executableMemory;
  358. };
  359. } // namespace JSC
  360. #endif // MacroAssemblerCodeRef_h