| 1 | /* | 
| 2 |  * Copyright (C) 2009 Apple Inc. All rights reserved. | 
| 3 |  * | 
| 4 |  * Redistribution and use in source and binary forms, with or without | 
| 5 |  * modification, are permitted provided that the following conditions | 
| 6 |  * are met: | 
| 7 |  * 1. Redistributions of source code must retain the above copyright | 
| 8 |  *    notice, this list of conditions and the following disclaimer. | 
| 9 |  * 2. Redistributions in binary form must reproduce the above copyright | 
| 10 |  *    notice, this list of conditions and the following disclaimer in the | 
| 11 |  *    documentation and/or other materials provided with the distribution. | 
| 12 |  * | 
| 13 |  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | 
| 14 |  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | 
| 15 |  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | 
| 16 |  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR | 
| 17 |  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | 
| 18 |  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | 
| 19 |  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | 
| 20 |  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | 
| 21 |  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 
| 22 |  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 
| 23 |  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.  | 
| 24 |  */ | 
| 25 |  | 
| 26 | #ifndef MacroAssemblerCodeRef_h | 
| 27 | #define MacroAssemblerCodeRef_h | 
| 28 |  | 
| 29 | #include <wtf/Platform.h> | 
| 30 |  | 
| 31 | #include "ExecutableAllocator.h" | 
| 32 | #include "PassRefPtr.h" | 
| 33 | #include "RefPtr.h" | 
| 34 | #include "UnusedParam.h" | 
| 35 |  | 
| 36 | #if ENABLE(ASSEMBLER) | 
| 37 |  | 
| 38 | // ASSERT_VALID_CODE_POINTER checks that ptr is a non-null pointer, and that it is a valid | 
| 39 | // instruction address on the platform (for example, check any alignment requirements). | 
| 40 | #if CPU(ARM_THUMB2) | 
| 41 | // ARM/thumb instructions must be 16-bit aligned, but all code pointers to be loaded | 
| 42 | // into the processor are decorated with the bottom bit set, indicating that this is | 
| 43 | // thumb code (as oposed to 32-bit traditional ARM).  The first test checks for both | 
| 44 | // decorated and undectorated null, and the second test ensures that the pointer is | 
| 45 | // decorated. | 
| 46 | #define ASSERT_VALID_CODE_POINTER(ptr) \ | 
| 47 |     ASSERT(reinterpret_cast<intptr_t>(ptr) & ~1); \ | 
| 48 |     ASSERT(reinterpret_cast<intptr_t>(ptr) & 1) | 
| 49 | #define ASSERT_VALID_CODE_OFFSET(offset) \ | 
| 50 |     ASSERT(!(offset & 1)) // Must be multiple of 2. | 
| 51 | #else | 
| 52 | #define ASSERT_VALID_CODE_POINTER(ptr) \ | 
| 53 |     ASSERT(ptr) | 
| 54 | #define ASSERT_VALID_CODE_OFFSET(offset) // Anything goes! | 
| 55 | #endif | 
| 56 |  | 
| 57 | namespace JSC { | 
| 58 |  | 
| 59 | // FunctionPtr: | 
| 60 | // | 
| 61 | // FunctionPtr should be used to wrap pointers to C/C++ functions in JSC | 
| 62 | // (particularly, the stub functions). | 
| 63 | class FunctionPtr { | 
| 64 | public: | 
| 65 |     FunctionPtr() | 
| 66 |         : m_value(0) | 
| 67 |     { | 
| 68 |     } | 
| 69 |  | 
| 70 |     template<typename FunctionType> | 
| 71 |     explicit FunctionPtr(FunctionType* value) | 
| 72 | #if COMPILER(RVCT) | 
| 73 |      // RVTC compiler needs C-style cast as it fails with the following error | 
| 74 |      // Error:  #694: reinterpret_cast cannot cast away const or other type qualifiers | 
| 75 |         : m_value((void*)(value)) | 
| 76 | #else | 
| 77 |         : m_value(reinterpret_cast<void*>(value)) | 
| 78 | #endif | 
| 79 |     { | 
| 80 |         ASSERT_VALID_CODE_POINTER(m_value); | 
| 81 |     } | 
| 82 |  | 
| 83 |     void* value() const { return m_value; } | 
| 84 |     void* executableAddress() const { return m_value; } | 
| 85 |  | 
| 86 |  | 
| 87 | private: | 
| 88 |     void* m_value; | 
| 89 | }; | 
| 90 |  | 
| 91 | // ReturnAddressPtr: | 
| 92 | // | 
| 93 | // ReturnAddressPtr should be used to wrap return addresses generated by processor | 
| 94 | // 'call' instructions exectued in JIT code.  We use return addresses to look up | 
| 95 | // exception and optimization information, and to repatch the call instruction | 
| 96 | // that is the source of the return address. | 
| 97 | class ReturnAddressPtr { | 
| 98 | public: | 
| 99 |     ReturnAddressPtr() | 
| 100 |         : m_value(0) | 
| 101 |     { | 
| 102 |     } | 
| 103 |  | 
| 104 |     explicit ReturnAddressPtr(void* value) | 
| 105 |         : m_value(value) | 
| 106 |     { | 
| 107 |         ASSERT_VALID_CODE_POINTER(m_value); | 
| 108 |     } | 
| 109 |  | 
| 110 |     explicit ReturnAddressPtr(FunctionPtr function) | 
| 111 |         : m_value(function.value()) | 
| 112 |     { | 
| 113 |         ASSERT_VALID_CODE_POINTER(m_value); | 
| 114 |     } | 
| 115 |  | 
| 116 |     void* value() const { return m_value; } | 
| 117 |  | 
| 118 | private: | 
| 119 |     void* m_value; | 
| 120 | }; | 
| 121 |  | 
| 122 | // MacroAssemblerCodePtr: | 
| 123 | // | 
| 124 | // MacroAssemblerCodePtr should be used to wrap pointers to JIT generated code. | 
| 125 | class MacroAssemblerCodePtr { | 
| 126 | public: | 
| 127 |     MacroAssemblerCodePtr() | 
| 128 |         : m_value(0) | 
| 129 |     { | 
| 130 |     } | 
| 131 |  | 
| 132 |     explicit MacroAssemblerCodePtr(void* value) | 
| 133 | #if CPU(ARM_THUMB2) | 
| 134 |         // Decorate the pointer as a thumb code pointer. | 
| 135 |         : m_value(reinterpret_cast<char*>(value) + 1) | 
| 136 | #else | 
| 137 |         : m_value(value) | 
| 138 | #endif | 
| 139 |     { | 
| 140 |         ASSERT_VALID_CODE_POINTER(m_value); | 
| 141 |     } | 
| 142 |  | 
| 143 |     explicit MacroAssemblerCodePtr(ReturnAddressPtr ra) | 
| 144 |         : m_value(ra.value()) | 
| 145 |     { | 
| 146 |         ASSERT_VALID_CODE_POINTER(m_value); | 
| 147 |     } | 
| 148 |  | 
| 149 |     void* executableAddress() const { return m_value; } | 
| 150 | #if CPU(ARM_THUMB2) | 
| 151 |     // To use this pointer as a data address remove the decoration. | 
| 152 |     void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value); return reinterpret_cast<char*>(m_value) - 1; } | 
| 153 | #else | 
| 154 |     void* dataLocation() const { ASSERT_VALID_CODE_POINTER(m_value); return m_value; } | 
| 155 | #endif | 
| 156 |  | 
| 157 |     bool operator!() | 
| 158 |     { | 
| 159 |         return !m_value; | 
| 160 |     } | 
| 161 |  | 
| 162 | private: | 
| 163 |     void* m_value; | 
| 164 | }; | 
| 165 |  | 
| 166 | // MacroAssemblerCodeRef: | 
| 167 | // | 
| 168 | // A reference to a section of JIT generated code.  A CodeRef consists of a | 
| 169 | // pointer to the code, and a ref pointer to the pool from within which it | 
| 170 | // was allocated. | 
| 171 | class MacroAssemblerCodeRef { | 
| 172 | public: | 
| 173 |     MacroAssemblerCodeRef() | 
| 174 |         : m_size(0) | 
| 175 |     { | 
| 176 |     } | 
| 177 |  | 
| 178 |     MacroAssemblerCodeRef(void* code, PassRefPtr<ExecutablePool> executablePool, size_t size) | 
| 179 |         : m_code(code) | 
| 180 |         , m_executablePool(executablePool) | 
| 181 |         , m_size(size) | 
| 182 |     { | 
| 183 |     } | 
| 184 |  | 
| 185 |     MacroAssemblerCodePtr m_code; | 
| 186 |     RefPtr<ExecutablePool> m_executablePool; | 
| 187 |     size_t m_size; | 
| 188 | }; | 
| 189 |  | 
| 190 | } // namespace JSC | 
| 191 |  | 
| 192 | #endif // ENABLE(ASSEMBLER) | 
| 193 |  | 
| 194 | #endif // MacroAssemblerCodeRef_h | 
| 195 |  |