|
1 /* |
|
2 * Copyright (C) 2008 Apple Inc. All rights reserved. |
|
3 * |
|
4 * Redistribution and use in source and binary forms, with or without |
|
5 * modification, are permitted provided that the following conditions |
|
6 * are met: |
|
7 * 1. Redistributions of source code must retain the above copyright |
|
8 * notice, this list of conditions and the following disclaimer. |
|
9 * 2. Redistributions in binary form must reproduce the above copyright |
|
10 * notice, this list of conditions and the following disclaimer in the |
|
11 * documentation and/or other materials provided with the distribution. |
|
12 * |
|
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
|
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
|
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
|
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
|
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
|
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
|
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
|
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
|
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
|
24 */ |
|
25 |
|
26 #include "config.h" |
|
27 |
|
28 #if ENABLE(JIT) |
|
29 #if !USE(JSVALUE32_64) |
|
30 #include "JIT.h" |
|
31 |
|
32 #include "CodeBlock.h" |
|
33 #include "JITInlineMethods.h" |
|
34 #include "JITStubCall.h" |
|
35 #include "JSArray.h" |
|
36 #include "JSFunction.h" |
|
37 #include "Interpreter.h" |
|
38 #include "ResultType.h" |
|
39 #include "SamplingTool.h" |
|
40 |
|
41 #ifndef NDEBUG |
|
42 #include <stdio.h> |
|
43 #endif |
|
44 |
|
45 using namespace std; |
|
46 |
|
47 namespace JSC { |
|
48 |
|
49 void JIT::compileOpCallInitializeCallFrame() |
|
50 { |
|
51 store32(regT1, Address(callFrameRegister, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register)))); |
|
52 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT3); // newScopeChain |
|
53 storePtr(regT0, Address(callFrameRegister, RegisterFile::Callee * static_cast<int>(sizeof(Register)))); |
|
54 storePtr(regT3, Address(callFrameRegister, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register)))); |
|
55 } |
|
56 |
|
57 void JIT::emit_op_call_put_result(Instruction* instruction) |
|
58 { |
|
59 int dst = instruction[1].u.operand; |
|
60 emitPutVirtualRegister(dst); |
|
61 } |
|
62 |
|
63 void JIT::compileOpCallVarargs(Instruction* instruction) |
|
64 { |
|
65 int callee = instruction[1].u.operand; |
|
66 int argCountRegister = instruction[2].u.operand; |
|
67 int registerOffset = instruction[3].u.operand; |
|
68 |
|
69 emitGetVirtualRegister(argCountRegister, regT1); |
|
70 emitGetVirtualRegister(callee, regT0); |
|
71 addPtr(Imm32(registerOffset), regT1, regT2); |
|
72 |
|
73 // Check for JSFunctions. |
|
74 emitJumpSlowCaseIfNotJSCell(regT0); |
|
75 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr))); |
|
76 |
|
77 // Speculatively roll the callframe, assuming argCount will match the arity. |
|
78 mul32(Imm32(sizeof(Register)), regT2, regT2); |
|
79 intptr_t offset = (intptr_t)sizeof(Register) * (intptr_t)RegisterFile::CallerFrame; |
|
80 addPtr(Imm32((int32_t)offset), regT2, regT3); |
|
81 addPtr(callFrameRegister, regT3); |
|
82 storePtr(callFrameRegister, regT3); |
|
83 addPtr(regT2, callFrameRegister); |
|
84 emitNakedCall(m_globalData->jitStubs->ctiVirtualCall()); |
|
85 |
|
86 sampleCodeBlock(m_codeBlock); |
|
87 } |
|
88 |
|
89 void JIT::compileOpCallVarargsSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator& iter) |
|
90 { |
|
91 linkSlowCase(iter); |
|
92 linkSlowCase(iter); |
|
93 |
|
94 JITStubCall stubCall(this, cti_op_call_NotJSFunction); |
|
95 stubCall.addArgument(regT0); |
|
96 stubCall.addArgument(regT2); |
|
97 stubCall.addArgument(regT1); |
|
98 stubCall.call(); |
|
99 |
|
100 sampleCodeBlock(m_codeBlock); |
|
101 } |
|
102 |
|
103 #if !ENABLE(JIT_OPTIMIZE_CALL) |
|
104 |
|
105 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */ |
|
106 |
|
107 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned) |
|
108 { |
|
109 int callee = instruction[1].u.operand; |
|
110 int argCount = instruction[2].u.operand; |
|
111 int registerOffset = instruction[3].u.operand; |
|
112 |
|
113 // Handle eval |
|
114 Jump wasEval; |
|
115 if (opcodeID == op_call_eval) { |
|
116 JITStubCall stubCall(this, cti_op_call_eval); |
|
117 stubCall.addArgument(callee, regT0); |
|
118 stubCall.addArgument(JIT::Imm32(registerOffset)); |
|
119 stubCall.addArgument(JIT::Imm32(argCount)); |
|
120 stubCall.call(); |
|
121 wasEval = branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue()))); |
|
122 } |
|
123 |
|
124 emitGetVirtualRegister(callee, regT0); |
|
125 |
|
126 // Check for JSFunctions. |
|
127 emitJumpSlowCaseIfNotJSCell(regT0); |
|
128 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr))); |
|
129 |
|
130 // Speculatively roll the callframe, assuming argCount will match the arity. |
|
131 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register)))); |
|
132 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister); |
|
133 move(Imm32(argCount), regT1); |
|
134 |
|
135 emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstruct() : m_globalData->jitStubs->ctiVirtualCall()); |
|
136 |
|
137 if (opcodeID == op_call_eval) |
|
138 wasEval.link(this); |
|
139 |
|
140 sampleCodeBlock(m_codeBlock); |
|
141 } |
|
142 |
|
143 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID) |
|
144 { |
|
145 int argCount = instruction[2].u.operand; |
|
146 int registerOffset = instruction[3].u.operand; |
|
147 |
|
148 linkSlowCase(iter); |
|
149 linkSlowCase(iter); |
|
150 |
|
151 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction); |
|
152 stubCall.addArgument(regT0); |
|
153 stubCall.addArgument(JIT::Imm32(registerOffset)); |
|
154 stubCall.addArgument(JIT::Imm32(argCount)); |
|
155 stubCall.call(); |
|
156 |
|
157 sampleCodeBlock(m_codeBlock); |
|
158 } |
|
159 |
|
160 #else // !ENABLE(JIT_OPTIMIZE_CALL) |
|
161 |
|
162 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */ |
|
163 |
|
164 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex) |
|
165 { |
|
166 int callee = instruction[1].u.operand; |
|
167 int argCount = instruction[2].u.operand; |
|
168 int registerOffset = instruction[3].u.operand; |
|
169 |
|
170 // Handle eval |
|
171 Jump wasEval; |
|
172 if (opcodeID == op_call_eval) { |
|
173 JITStubCall stubCall(this, cti_op_call_eval); |
|
174 stubCall.addArgument(callee, regT0); |
|
175 stubCall.addArgument(JIT::Imm32(registerOffset)); |
|
176 stubCall.addArgument(JIT::Imm32(argCount)); |
|
177 stubCall.call(); |
|
178 wasEval = branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue()))); |
|
179 } |
|
180 |
|
181 // This plants a check for a cached JSFunction value, so we can plant a fast link to the callee. |
|
182 // This deliberately leaves the callee in ecx, used when setting up the stack frame below |
|
183 emitGetVirtualRegister(callee, regT0); |
|
184 DataLabelPtr addressOfLinkedFunctionCheck; |
|
185 |
|
186 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall); |
|
187 |
|
188 Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, ImmPtr(JSValue::encode(JSValue()))); |
|
189 |
|
190 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall); |
|
191 |
|
192 addSlowCase(jumpToSlow); |
|
193 ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow), patchOffsetOpCallCompareToJump); |
|
194 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck; |
|
195 |
|
196 // The following is the fast case, only used whan a callee can be linked. |
|
197 |
|
198 // Fast version of stack frame initialization, directly relative to edi. |
|
199 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee |
|
200 |
|
201 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // newScopeChain |
|
202 |
|
203 store32(Imm32(argCount), Address(callFrameRegister, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register)))); |
|
204 storePtr(callFrameRegister, Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register)))); |
|
205 storePtr(regT0, Address(callFrameRegister, (registerOffset + RegisterFile::Callee) * static_cast<int>(sizeof(Register)))); |
|
206 storePtr(regT1, Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register)))); |
|
207 addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister); |
|
208 |
|
209 // Call to the callee |
|
210 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall(); |
|
211 |
|
212 if (opcodeID == op_call_eval) |
|
213 wasEval.link(this); |
|
214 |
|
215 sampleCodeBlock(m_codeBlock); |
|
216 } |
|
217 |
|
218 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID) |
|
219 { |
|
220 int argCount = instruction[2].u.operand; |
|
221 int registerOffset = instruction[3].u.operand; |
|
222 |
|
223 linkSlowCase(iter); |
|
224 |
|
225 // Fast check for JS function. |
|
226 Jump callLinkFailNotObject = emitJumpIfNotJSCell(regT0); |
|
227 Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)); |
|
228 |
|
229 // Speculatively roll the callframe, assuming argCount will match the arity. |
|
230 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register)))); |
|
231 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister); |
|
232 move(Imm32(argCount), regT1); |
|
233 |
|
234 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstructLink() : m_globalData->jitStubs->ctiVirtualCallLink()); |
|
235 |
|
236 // Done! - return back to the hot path. |
|
237 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval)); |
|
238 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct)); |
|
239 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call)); |
|
240 |
|
241 // This handles host functions |
|
242 callLinkFailNotObject.link(this); |
|
243 callLinkFailNotJSFunction.link(this); |
|
244 |
|
245 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction); |
|
246 stubCall.addArgument(regT0); |
|
247 stubCall.addArgument(JIT::Imm32(registerOffset)); |
|
248 stubCall.addArgument(JIT::Imm32(argCount)); |
|
249 stubCall.call(); |
|
250 |
|
251 sampleCodeBlock(m_codeBlock); |
|
252 } |
|
253 |
|
254 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */ |
|
255 |
|
256 #endif // !ENABLE(JIT_OPTIMIZE_CALL) |
|
257 |
|
258 } // namespace JSC |
|
259 |
|
260 #endif // !USE(JSVALUE32_64) |
|
261 #endif // ENABLE(JIT) |