|
1 /* |
|
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved. |
|
3 * |
|
4 * Redistribution and use in source and binary forms, with or without |
|
5 * modification, are permitted provided that the following conditions |
|
6 * are met: |
|
7 * 1. Redistributions of source code must retain the above copyright |
|
8 * notice, this list of conditions and the following disclaimer. |
|
9 * 2. Redistributions in binary form must reproduce the above copyright |
|
10 * notice, this list of conditions and the following disclaimer in the |
|
11 * documentation and/or other materials provided with the distribution. |
|
12 * |
|
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
|
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
|
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
|
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
|
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
|
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
|
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
|
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
|
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
|
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
|
24 */ |
|
25 |
|
26 #include "config.h" |
|
27 |
|
28 #if ENABLE(JIT) |
|
29 #include "JIT.h" |
|
30 |
|
31 // This probably does not belong here; adding here for now as a quick Windows build fix. |
|
32 #if ENABLE(ASSEMBLER) && CPU(X86) && !OS(MAC_OS_X) |
|
33 #include "MacroAssembler.h" |
|
34 JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2; |
|
35 #endif |
|
36 |
|
37 #include "CodeBlock.h" |
|
38 #include "Interpreter.h" |
|
39 #include "JITInlineMethods.h" |
|
40 #include "JITStubCall.h" |
|
41 #include "JSArray.h" |
|
42 #include "JSFunction.h" |
|
43 #include "LinkBuffer.h" |
|
44 #include "RepatchBuffer.h" |
|
45 #include "ResultType.h" |
|
46 #include "SamplingTool.h" |
|
47 |
|
48 #ifndef NDEBUG |
|
49 #include <stdio.h> |
|
50 #endif |
|
51 |
|
52 using namespace std; |
|
53 |
|
54 namespace JSC { |
|
55 |
|
56 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction) |
|
57 { |
|
58 RepatchBuffer repatchBuffer(codeblock); |
|
59 repatchBuffer.relinkNearCallerToTrampoline(returnAddress, newCalleeFunction); |
|
60 } |
|
61 |
|
62 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction) |
|
63 { |
|
64 RepatchBuffer repatchBuffer(codeblock); |
|
65 repatchBuffer.relinkCallerToTrampoline(returnAddress, newCalleeFunction); |
|
66 } |
|
67 |
|
68 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction) |
|
69 { |
|
70 RepatchBuffer repatchBuffer(codeblock); |
|
71 repatchBuffer.relinkCallerToFunction(returnAddress, newCalleeFunction); |
|
72 } |
|
73 |
|
74 JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock) |
|
75 : m_interpreter(globalData->interpreter) |
|
76 , m_globalData(globalData) |
|
77 , m_codeBlock(codeBlock) |
|
78 , m_labels(codeBlock ? codeBlock->instructions().size() : 0) |
|
79 , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0) |
|
80 , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0) |
|
81 , m_bytecodeOffset((unsigned)-1) |
|
82 #if USE(JSVALUE32_64) |
|
83 , m_jumpTargetIndex(0) |
|
84 , m_mappedBytecodeOffset((unsigned)-1) |
|
85 , m_mappedVirtualRegisterIndex((unsigned)-1) |
|
86 , m_mappedTag((RegisterID)-1) |
|
87 , m_mappedPayload((RegisterID)-1) |
|
88 #else |
|
89 , m_lastResultBytecodeRegister(std::numeric_limits<int>::max()) |
|
90 , m_jumpTargetsPosition(0) |
|
91 #endif |
|
92 { |
|
93 } |
|
94 |
|
95 #if USE(JSVALUE32_64) |
|
96 void JIT::emitTimeoutCheck() |
|
97 { |
|
98 Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister); |
|
99 JITStubCall stubCall(this, cti_timeout_check); |
|
100 stubCall.addArgument(regT1, regT0); // save last result registers. |
|
101 stubCall.call(timeoutCheckRegister); |
|
102 stubCall.getArgument(0, regT1, regT0); // reload last result registers. |
|
103 skipTimeout.link(this); |
|
104 } |
|
105 #else |
|
106 void JIT::emitTimeoutCheck() |
|
107 { |
|
108 Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister); |
|
109 JITStubCall(this, cti_timeout_check).call(timeoutCheckRegister); |
|
110 skipTimeout.link(this); |
|
111 |
|
112 killLastResultRegister(); |
|
113 } |
|
114 #endif |
|
115 |
|
116 #define NEXT_OPCODE(name) \ |
|
117 m_bytecodeOffset += OPCODE_LENGTH(name); \ |
|
118 break; |
|
119 |
|
120 #if USE(JSVALUE32_64) |
|
121 #define DEFINE_BINARY_OP(name) \ |
|
122 case name: { \ |
|
123 JITStubCall stubCall(this, cti_##name); \ |
|
124 stubCall.addArgument(currentInstruction[2].u.operand); \ |
|
125 stubCall.addArgument(currentInstruction[3].u.operand); \ |
|
126 stubCall.call(currentInstruction[1].u.operand); \ |
|
127 NEXT_OPCODE(name); \ |
|
128 } |
|
129 |
|
130 #define DEFINE_UNARY_OP(name) \ |
|
131 case name: { \ |
|
132 JITStubCall stubCall(this, cti_##name); \ |
|
133 stubCall.addArgument(currentInstruction[2].u.operand); \ |
|
134 stubCall.call(currentInstruction[1].u.operand); \ |
|
135 NEXT_OPCODE(name); \ |
|
136 } |
|
137 |
|
138 #else // USE(JSVALUE32_64) |
|
139 |
|
140 #define DEFINE_BINARY_OP(name) \ |
|
141 case name: { \ |
|
142 JITStubCall stubCall(this, cti_##name); \ |
|
143 stubCall.addArgument(currentInstruction[2].u.operand, regT2); \ |
|
144 stubCall.addArgument(currentInstruction[3].u.operand, regT2); \ |
|
145 stubCall.call(currentInstruction[1].u.operand); \ |
|
146 NEXT_OPCODE(name); \ |
|
147 } |
|
148 |
|
149 #define DEFINE_UNARY_OP(name) \ |
|
150 case name: { \ |
|
151 JITStubCall stubCall(this, cti_##name); \ |
|
152 stubCall.addArgument(currentInstruction[2].u.operand, regT2); \ |
|
153 stubCall.call(currentInstruction[1].u.operand); \ |
|
154 NEXT_OPCODE(name); \ |
|
155 } |
|
156 #endif // USE(JSVALUE32_64) |
|
157 |
|
158 #define DEFINE_OP(name) \ |
|
159 case name: { \ |
|
160 emit_##name(currentInstruction); \ |
|
161 NEXT_OPCODE(name); \ |
|
162 } |
|
163 |
|
164 #define DEFINE_SLOWCASE_OP(name) \ |
|
165 case name: { \ |
|
166 emitSlow_##name(currentInstruction, iter); \ |
|
167 NEXT_OPCODE(name); \ |
|
168 } |
|
169 |
|
170 void JIT::privateCompileMainPass() |
|
171 { |
|
172 Instruction* instructionsBegin = m_codeBlock->instructions().begin(); |
|
173 unsigned instructionCount = m_codeBlock->instructions().size(); |
|
174 |
|
175 m_propertyAccessInstructionIndex = 0; |
|
176 m_globalResolveInfoIndex = 0; |
|
177 m_callLinkInfoIndex = 0; |
|
178 |
|
179 for (m_bytecodeOffset = 0; m_bytecodeOffset < instructionCount; ) { |
|
180 Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset; |
|
181 ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeOffset); |
|
182 |
|
183 #if ENABLE(OPCODE_SAMPLING) |
|
184 if (m_bytecodeOffset > 0) // Avoid the overhead of sampling op_enter twice. |
|
185 sampleInstruction(currentInstruction); |
|
186 #endif |
|
187 |
|
188 #if !USE(JSVALUE32_64) |
|
189 if (m_labels[m_bytecodeOffset].isUsed()) |
|
190 killLastResultRegister(); |
|
191 #endif |
|
192 |
|
193 m_labels[m_bytecodeOffset] = label(); |
|
194 |
|
195 switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) { |
|
196 DEFINE_BINARY_OP(op_del_by_val) |
|
197 #if USE(JSVALUE32) |
|
198 DEFINE_BINARY_OP(op_div) |
|
199 #endif |
|
200 DEFINE_BINARY_OP(op_in) |
|
201 DEFINE_BINARY_OP(op_less) |
|
202 DEFINE_BINARY_OP(op_lesseq) |
|
203 DEFINE_UNARY_OP(op_is_boolean) |
|
204 DEFINE_UNARY_OP(op_is_function) |
|
205 DEFINE_UNARY_OP(op_is_number) |
|
206 DEFINE_UNARY_OP(op_is_object) |
|
207 DEFINE_UNARY_OP(op_is_string) |
|
208 DEFINE_UNARY_OP(op_is_undefined) |
|
209 #if !USE(JSVALUE32_64) |
|
210 DEFINE_UNARY_OP(op_negate) |
|
211 #endif |
|
212 DEFINE_UNARY_OP(op_typeof) |
|
213 |
|
214 DEFINE_OP(op_add) |
|
215 DEFINE_OP(op_bitand) |
|
216 DEFINE_OP(op_bitnot) |
|
217 DEFINE_OP(op_bitor) |
|
218 DEFINE_OP(op_bitxor) |
|
219 DEFINE_OP(op_call) |
|
220 DEFINE_OP(op_call_eval) |
|
221 DEFINE_OP(op_call_varargs) |
|
222 DEFINE_OP(op_catch) |
|
223 DEFINE_OP(op_construct) |
|
224 DEFINE_OP(op_get_callee) |
|
225 DEFINE_OP(op_create_this) |
|
226 DEFINE_OP(op_convert_this) |
|
227 DEFINE_OP(op_init_arguments) |
|
228 DEFINE_OP(op_create_arguments) |
|
229 DEFINE_OP(op_debug) |
|
230 DEFINE_OP(op_del_by_id) |
|
231 #if !USE(JSVALUE32) |
|
232 DEFINE_OP(op_div) |
|
233 #endif |
|
234 DEFINE_OP(op_end) |
|
235 DEFINE_OP(op_enter) |
|
236 DEFINE_OP(op_enter_with_activation) |
|
237 DEFINE_OP(op_eq) |
|
238 DEFINE_OP(op_eq_null) |
|
239 DEFINE_OP(op_get_by_id) |
|
240 DEFINE_OP(op_get_by_val) |
|
241 DEFINE_OP(op_get_by_pname) |
|
242 DEFINE_OP(op_get_global_var) |
|
243 DEFINE_OP(op_get_pnames) |
|
244 DEFINE_OP(op_get_scoped_var) |
|
245 DEFINE_OP(op_instanceof) |
|
246 DEFINE_OP(op_jeq_null) |
|
247 DEFINE_OP(op_jfalse) |
|
248 DEFINE_OP(op_jmp) |
|
249 DEFINE_OP(op_jmp_scopes) |
|
250 DEFINE_OP(op_jneq_null) |
|
251 DEFINE_OP(op_jneq_ptr) |
|
252 DEFINE_OP(op_jnless) |
|
253 DEFINE_OP(op_jless) |
|
254 DEFINE_OP(op_jlesseq) |
|
255 DEFINE_OP(op_jnlesseq) |
|
256 DEFINE_OP(op_jsr) |
|
257 DEFINE_OP(op_jtrue) |
|
258 DEFINE_OP(op_load_varargs) |
|
259 DEFINE_OP(op_loop) |
|
260 DEFINE_OP(op_loop_if_less) |
|
261 DEFINE_OP(op_loop_if_lesseq) |
|
262 DEFINE_OP(op_loop_if_true) |
|
263 DEFINE_OP(op_loop_if_false) |
|
264 DEFINE_OP(op_lshift) |
|
265 DEFINE_OP(op_method_check) |
|
266 DEFINE_OP(op_mod) |
|
267 DEFINE_OP(op_mov) |
|
268 DEFINE_OP(op_mul) |
|
269 #if USE(JSVALUE32_64) |
|
270 DEFINE_OP(op_negate) |
|
271 #endif |
|
272 DEFINE_OP(op_neq) |
|
273 DEFINE_OP(op_neq_null) |
|
274 DEFINE_OP(op_new_array) |
|
275 DEFINE_OP(op_new_error) |
|
276 DEFINE_OP(op_new_func) |
|
277 DEFINE_OP(op_new_func_exp) |
|
278 DEFINE_OP(op_new_object) |
|
279 DEFINE_OP(op_new_regexp) |
|
280 DEFINE_OP(op_next_pname) |
|
281 DEFINE_OP(op_not) |
|
282 DEFINE_OP(op_nstricteq) |
|
283 DEFINE_OP(op_pop_scope) |
|
284 DEFINE_OP(op_post_dec) |
|
285 DEFINE_OP(op_post_inc) |
|
286 DEFINE_OP(op_pre_dec) |
|
287 DEFINE_OP(op_pre_inc) |
|
288 DEFINE_OP(op_profile_did_call) |
|
289 DEFINE_OP(op_profile_will_call) |
|
290 DEFINE_OP(op_push_new_scope) |
|
291 DEFINE_OP(op_push_scope) |
|
292 DEFINE_OP(op_put_by_id) |
|
293 DEFINE_OP(op_put_by_index) |
|
294 DEFINE_OP(op_put_by_val) |
|
295 DEFINE_OP(op_put_getter) |
|
296 DEFINE_OP(op_put_global_var) |
|
297 DEFINE_OP(op_put_scoped_var) |
|
298 DEFINE_OP(op_put_setter) |
|
299 DEFINE_OP(op_resolve) |
|
300 DEFINE_OP(op_resolve_base) |
|
301 DEFINE_OP(op_resolve_global) |
|
302 DEFINE_OP(op_resolve_global_dynamic) |
|
303 DEFINE_OP(op_resolve_skip) |
|
304 DEFINE_OP(op_resolve_with_base) |
|
305 DEFINE_OP(op_ret) |
|
306 DEFINE_OP(op_call_put_result) |
|
307 DEFINE_OP(op_ret_object_or_this) |
|
308 DEFINE_OP(op_rshift) |
|
309 DEFINE_OP(op_urshift) |
|
310 DEFINE_OP(op_sret) |
|
311 DEFINE_OP(op_strcat) |
|
312 DEFINE_OP(op_stricteq) |
|
313 DEFINE_OP(op_sub) |
|
314 DEFINE_OP(op_switch_char) |
|
315 DEFINE_OP(op_switch_imm) |
|
316 DEFINE_OP(op_switch_string) |
|
317 DEFINE_OP(op_tear_off_activation) |
|
318 DEFINE_OP(op_tear_off_arguments) |
|
319 DEFINE_OP(op_throw) |
|
320 DEFINE_OP(op_to_jsnumber) |
|
321 DEFINE_OP(op_to_primitive) |
|
322 |
|
323 case op_get_array_length: |
|
324 case op_get_by_id_chain: |
|
325 case op_get_by_id_generic: |
|
326 case op_get_by_id_proto: |
|
327 case op_get_by_id_proto_list: |
|
328 case op_get_by_id_self: |
|
329 case op_get_by_id_self_list: |
|
330 case op_get_by_id_getter_chain: |
|
331 case op_get_by_id_getter_proto: |
|
332 case op_get_by_id_getter_proto_list: |
|
333 case op_get_by_id_getter_self: |
|
334 case op_get_by_id_getter_self_list: |
|
335 case op_get_by_id_custom_chain: |
|
336 case op_get_by_id_custom_proto: |
|
337 case op_get_by_id_custom_proto_list: |
|
338 case op_get_by_id_custom_self: |
|
339 case op_get_by_id_custom_self_list: |
|
340 case op_get_string_length: |
|
341 case op_put_by_id_generic: |
|
342 case op_put_by_id_replace: |
|
343 case op_put_by_id_transition: |
|
344 ASSERT_NOT_REACHED(); |
|
345 } |
|
346 } |
|
347 |
|
348 ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos()); |
|
349 ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos()); |
|
350 |
|
351 #ifndef NDEBUG |
|
352 // Reset this, in order to guard its use with ASSERTs. |
|
353 m_bytecodeOffset = (unsigned)-1; |
|
354 #endif |
|
355 } |
|
356 |
|
357 |
|
358 void JIT::privateCompileLinkPass() |
|
359 { |
|
360 unsigned jmpTableCount = m_jmpTable.size(); |
|
361 for (unsigned i = 0; i < jmpTableCount; ++i) |
|
362 m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeOffset], this); |
|
363 m_jmpTable.clear(); |
|
364 } |
|
365 |
|
366 void JIT::privateCompileSlowCases() |
|
367 { |
|
368 Instruction* instructionsBegin = m_codeBlock->instructions().begin(); |
|
369 |
|
370 m_propertyAccessInstructionIndex = 0; |
|
371 m_globalResolveInfoIndex = 0; |
|
372 m_callLinkInfoIndex = 0; |
|
373 |
|
374 for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) { |
|
375 #if !USE(JSVALUE32_64) |
|
376 killLastResultRegister(); |
|
377 #endif |
|
378 |
|
379 m_bytecodeOffset = iter->to; |
|
380 #ifndef NDEBUG |
|
381 unsigned firstTo = m_bytecodeOffset; |
|
382 #endif |
|
383 Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset; |
|
384 |
|
385 switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) { |
|
386 DEFINE_SLOWCASE_OP(op_add) |
|
387 DEFINE_SLOWCASE_OP(op_bitand) |
|
388 DEFINE_SLOWCASE_OP(op_bitnot) |
|
389 DEFINE_SLOWCASE_OP(op_bitor) |
|
390 DEFINE_SLOWCASE_OP(op_bitxor) |
|
391 DEFINE_SLOWCASE_OP(op_call) |
|
392 DEFINE_SLOWCASE_OP(op_call_eval) |
|
393 DEFINE_SLOWCASE_OP(op_call_varargs) |
|
394 DEFINE_SLOWCASE_OP(op_construct) |
|
395 DEFINE_SLOWCASE_OP(op_convert_this) |
|
396 #if !USE(JSVALUE32) |
|
397 DEFINE_SLOWCASE_OP(op_div) |
|
398 #endif |
|
399 DEFINE_SLOWCASE_OP(op_eq) |
|
400 DEFINE_SLOWCASE_OP(op_get_by_id) |
|
401 DEFINE_SLOWCASE_OP(op_get_by_val) |
|
402 DEFINE_SLOWCASE_OP(op_get_by_pname) |
|
403 DEFINE_SLOWCASE_OP(op_instanceof) |
|
404 DEFINE_SLOWCASE_OP(op_jfalse) |
|
405 DEFINE_SLOWCASE_OP(op_jnless) |
|
406 DEFINE_SLOWCASE_OP(op_jless) |
|
407 DEFINE_SLOWCASE_OP(op_jlesseq) |
|
408 DEFINE_SLOWCASE_OP(op_jnlesseq) |
|
409 DEFINE_SLOWCASE_OP(op_jtrue) |
|
410 DEFINE_SLOWCASE_OP(op_loop_if_less) |
|
411 DEFINE_SLOWCASE_OP(op_loop_if_lesseq) |
|
412 DEFINE_SLOWCASE_OP(op_loop_if_true) |
|
413 DEFINE_SLOWCASE_OP(op_loop_if_false) |
|
414 DEFINE_SLOWCASE_OP(op_lshift) |
|
415 DEFINE_SLOWCASE_OP(op_method_check) |
|
416 DEFINE_SLOWCASE_OP(op_mod) |
|
417 DEFINE_SLOWCASE_OP(op_mul) |
|
418 #if USE(JSVALUE32_64) |
|
419 DEFINE_SLOWCASE_OP(op_negate) |
|
420 #endif |
|
421 DEFINE_SLOWCASE_OP(op_neq) |
|
422 DEFINE_SLOWCASE_OP(op_not) |
|
423 DEFINE_SLOWCASE_OP(op_nstricteq) |
|
424 DEFINE_SLOWCASE_OP(op_post_dec) |
|
425 DEFINE_SLOWCASE_OP(op_post_inc) |
|
426 DEFINE_SLOWCASE_OP(op_pre_dec) |
|
427 DEFINE_SLOWCASE_OP(op_pre_inc) |
|
428 DEFINE_SLOWCASE_OP(op_put_by_id) |
|
429 DEFINE_SLOWCASE_OP(op_put_by_val) |
|
430 DEFINE_SLOWCASE_OP(op_resolve_global) |
|
431 DEFINE_SLOWCASE_OP(op_resolve_global_dynamic) |
|
432 DEFINE_SLOWCASE_OP(op_rshift) |
|
433 DEFINE_SLOWCASE_OP(op_urshift) |
|
434 DEFINE_SLOWCASE_OP(op_stricteq) |
|
435 DEFINE_SLOWCASE_OP(op_sub) |
|
436 DEFINE_SLOWCASE_OP(op_to_jsnumber) |
|
437 DEFINE_SLOWCASE_OP(op_to_primitive) |
|
438 default: |
|
439 ASSERT_NOT_REACHED(); |
|
440 } |
|
441 |
|
442 ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to,"Not enough jumps linked in slow case codegen."); |
|
443 ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen."); |
|
444 |
|
445 emitJumpSlowToHot(jump(), 0); |
|
446 } |
|
447 |
|
448 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) |
|
449 ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos()); |
|
450 #endif |
|
451 ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos()); |
|
452 |
|
453 #ifndef NDEBUG |
|
454 // Reset this, in order to guard its use with ASSERTs. |
|
455 m_bytecodeOffset = (unsigned)-1; |
|
456 #endif |
|
457 } |
|
458 |
|
459 JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck) |
|
460 { |
|
461 // Could use a pop_m, but would need to offset the following instruction if so. |
|
462 preserveReturnAddressAfterCall(regT2); |
|
463 emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC); |
|
464 |
|
465 Label beginLabel(this); |
|
466 |
|
467 sampleCodeBlock(m_codeBlock); |
|
468 #if ENABLE(OPCODE_SAMPLING) |
|
469 sampleInstruction(m_codeBlock->instructions().begin()); |
|
470 #endif |
|
471 |
|
472 Jump registerFileCheck; |
|
473 if (m_codeBlock->codeType() == FunctionCode) { |
|
474 // In the case of a fast linked call, we do not set this up in the caller. |
|
475 emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock); |
|
476 |
|
477 addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, regT1); |
|
478 registerFileCheck = branchPtr(Below, AbsoluteAddress(&m_globalData->interpreter->registerFile(). |
|
479 m_end), regT1); |
|
480 } |
|
481 |
|
482 Label functionBody = label(); |
|
483 |
|
484 privateCompileMainPass(); |
|
485 privateCompileLinkPass(); |
|
486 privateCompileSlowCases(); |
|
487 |
|
488 Label arityCheck; |
|
489 Call callArityCheck; |
|
490 if (m_codeBlock->codeType() == FunctionCode) { |
|
491 registerFileCheck.link(this); |
|
492 m_bytecodeOffset = 0; |
|
493 JITStubCall(this, cti_register_file_check).call(); |
|
494 #ifndef NDEBUG |
|
495 m_bytecodeOffset = (unsigned)-1; // Reset this, in order to guard its use with ASSERTs. |
|
496 #endif |
|
497 jump(functionBody); |
|
498 |
|
499 arityCheck = label(); |
|
500 preserveReturnAddressAfterCall(regT2); |
|
501 emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC); |
|
502 branch32(Equal, regT1, Imm32(m_codeBlock->m_numParameters)).linkTo(beginLabel, this); |
|
503 restoreArgumentReference(); |
|
504 callArityCheck = call(); |
|
505 move(regT0, callFrameRegister); |
|
506 jump(beginLabel); |
|
507 } |
|
508 |
|
509 ASSERT(m_jmpTable.isEmpty()); |
|
510 |
|
511 LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size())); |
|
512 |
|
513 // Translate vPC offsets into addresses in JIT generated code, for switch tables. |
|
514 for (unsigned i = 0; i < m_switches.size(); ++i) { |
|
515 SwitchRecord record = m_switches[i]; |
|
516 unsigned bytecodeOffset = record.bytecodeOffset; |
|
517 |
|
518 if (record.type != SwitchRecord::String) { |
|
519 ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character); |
|
520 ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size()); |
|
521 |
|
522 record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]); |
|
523 |
|
524 for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) { |
|
525 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j]; |
|
526 record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.simpleJumpTable->ctiDefault; |
|
527 } |
|
528 } else { |
|
529 ASSERT(record.type == SwitchRecord::String); |
|
530 |
|
531 record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]); |
|
532 |
|
533 StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end(); |
|
534 for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) { |
|
535 unsigned offset = it->second.branchOffset; |
|
536 it->second.ctiOffset = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.stringJumpTable->ctiDefault; |
|
537 } |
|
538 } |
|
539 } |
|
540 |
|
541 for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) { |
|
542 HandlerInfo& handler = m_codeBlock->exceptionHandler(i); |
|
543 handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target]); |
|
544 } |
|
545 |
|
546 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { |
|
547 if (iter->to) |
|
548 patchBuffer.link(iter->from, FunctionPtr(iter->to)); |
|
549 } |
|
550 |
|
551 if (m_codeBlock->hasExceptionInfo()) { |
|
552 m_codeBlock->callReturnIndexVector().reserveCapacity(m_calls.size()); |
|
553 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) |
|
554 m_codeBlock->callReturnIndexVector().append(CallReturnOffsetToBytecodeOffset(patchBuffer.returnAddressOffset(iter->from), iter->bytecodeOffset)); |
|
555 } |
|
556 |
|
557 // Link absolute addresses for jsr |
|
558 for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter) |
|
559 patchBuffer.patch(iter->storeLocation, patchBuffer.locationOf(iter->target).executableAddress()); |
|
560 |
|
561 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) |
|
562 for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) { |
|
563 StructureStubInfo& info = m_codeBlock->structureStubInfo(i); |
|
564 info.callReturnLocation = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].callReturnLocation); |
|
565 info.hotPathBegin = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].hotPathBegin); |
|
566 } |
|
567 #endif |
|
568 #if ENABLE(JIT_OPTIMIZE_CALL) |
|
569 for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) { |
|
570 CallLinkInfo& info = m_codeBlock->callLinkInfo(i); |
|
571 info.ownerCodeBlock = m_codeBlock; |
|
572 info.callReturnLocation = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].callReturnLocation); |
|
573 info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin); |
|
574 info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther); |
|
575 } |
|
576 #endif |
|
577 unsigned methodCallCount = m_methodCallCompilationInfo.size(); |
|
578 m_codeBlock->addMethodCallLinkInfos(methodCallCount); |
|
579 for (unsigned i = 0; i < methodCallCount; ++i) { |
|
580 MethodCallLinkInfo& info = m_codeBlock->methodCallLinkInfo(i); |
|
581 info.structureLabel = patchBuffer.locationOf(m_methodCallCompilationInfo[i].structureToCompare); |
|
582 info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation; |
|
583 } |
|
584 |
|
585 if (m_codeBlock->codeType() == FunctionCode && functionEntryArityCheck) { |
|
586 patchBuffer.link(callArityCheck, FunctionPtr(m_codeBlock->m_isConstructor ? cti_op_construct_arityCheck : cti_op_call_arityCheck)); |
|
587 *functionEntryArityCheck = patchBuffer.locationOf(arityCheck); |
|
588 } |
|
589 |
|
590 return patchBuffer.finalizeCode(); |
|
591 } |
|
592 |
|
593 #if !USE(JSVALUE32_64) |
|
594 void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst) |
|
595 { |
|
596 loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), dst); |
|
597 loadPtr(Address(dst, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), dst); |
|
598 loadPtr(Address(dst, index * sizeof(Register)), dst); |
|
599 } |
|
600 |
|
601 void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index) |
|
602 { |
|
603 loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), variableObject); |
|
604 loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), variableObject); |
|
605 storePtr(src, Address(variableObject, index * sizeof(Register))); |
|
606 } |
|
607 #endif |
|
608 |
|
609 #if ENABLE(JIT_OPTIMIZE_CALL) |
|
610 void JIT::unlinkCallOrConstruct(CallLinkInfo* callLinkInfo) |
|
611 { |
|
612 // When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid |
|
613 // (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive |
|
614 // match). Reset the check so it no longer matches. |
|
615 RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock); |
|
616 #if USE(JSVALUE32_64) |
|
617 repatchBuffer.repatch(callLinkInfo->hotPathBegin, 0); |
|
618 #else |
|
619 repatchBuffer.repatch(callLinkInfo->hotPathBegin, JSValue::encode(JSValue())); |
|
620 #endif |
|
621 } |
|
622 |
|
623 void JIT::linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData) |
|
624 { |
|
625 RepatchBuffer repatchBuffer(callerCodeBlock); |
|
626 |
|
627 // Currently we only link calls with the exact number of arguments. |
|
628 // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant |
|
629 if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) { |
|
630 ASSERT(!callLinkInfo->isLinked()); |
|
631 |
|
632 if (calleeCodeBlock) |
|
633 calleeCodeBlock->addCaller(callLinkInfo); |
|
634 |
|
635 repatchBuffer.repatch(callLinkInfo->hotPathBegin, callee); |
|
636 repatchBuffer.relink(callLinkInfo->hotPathOther, code); |
|
637 } |
|
638 |
|
639 // patch the call so we do not continue to try to link. |
|
640 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs->ctiVirtualCall()); |
|
641 } |
|
642 |
|
643 void JIT::linkConstruct(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData) |
|
644 { |
|
645 RepatchBuffer repatchBuffer(callerCodeBlock); |
|
646 |
|
647 // Currently we only link calls with the exact number of arguments. |
|
648 // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant |
|
649 if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) { |
|
650 ASSERT(!callLinkInfo->isLinked()); |
|
651 |
|
652 if (calleeCodeBlock) |
|
653 calleeCodeBlock->addCaller(callLinkInfo); |
|
654 |
|
655 repatchBuffer.repatch(callLinkInfo->hotPathBegin, callee); |
|
656 repatchBuffer.relink(callLinkInfo->hotPathOther, code); |
|
657 } |
|
658 |
|
659 // patch the call so we do not continue to try to link. |
|
660 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs->ctiVirtualConstruct()); |
|
661 } |
|
662 #endif // ENABLE(JIT_OPTIMIZE_CALL) |
|
663 |
|
664 } // namespace JSC |
|
665 |
|
666 #endif // ENABLE(JIT) |