|
1 /* |
|
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved. |
|
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca> |
|
4 * |
|
5 * Redistribution and use in source and binary forms, with or without |
|
6 * modification, are permitted provided that the following conditions |
|
7 * are met: |
|
8 * |
|
9 * 1. Redistributions of source code must retain the above copyright |
|
10 * notice, this list of conditions and the following disclaimer. |
|
11 * 2. Redistributions in binary form must reproduce the above copyright |
|
12 * notice, this list of conditions and the following disclaimer in the |
|
13 * documentation and/or other materials provided with the distribution. |
|
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of |
|
15 * its contributors may be used to endorse or promote products derived |
|
16 * from this software without specific prior written permission. |
|
17 * |
|
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY |
|
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
|
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY |
|
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
|
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
|
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND |
|
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
|
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
|
28 */ |
|
29 |
|
30 #include "config.h" |
|
31 #include "BytecodeGenerator.h" |
|
32 |
|
33 #include "BatchedTransitionOptimizer.h" |
|
34 #include "PrototypeFunction.h" |
|
35 #include "JSFunction.h" |
|
36 #include "Interpreter.h" |
|
37 #include "UString.h" |
|
38 |
|
39 using namespace std; |
|
40 |
|
41 namespace JSC { |
|
42 |
|
43 /* |
|
44 The layout of a register frame looks like this: |
|
45 |
|
46 For |
|
47 |
|
48 function f(x, y) { |
|
49 var v1; |
|
50 function g() { } |
|
51 var v2; |
|
52 return (x) * (y); |
|
53 } |
|
54 |
|
55 assuming (x) and (y) generated temporaries t1 and t2, you would have |
|
56 |
|
57 ------------------------------------ |
|
58 | x | y | g | v2 | v1 | t1 | t2 | <-- value held |
|
59 ------------------------------------ |
|
60 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index |
|
61 ------------------------------------ |
|
62 | params->|<-locals | temps-> |
|
63 |
|
64 Because temporary registers are allocated in a stack-like fashion, we |
|
65 can reclaim them with a simple popping algorithm. The same goes for labels. |
|
66 (We never reclaim parameter or local registers, because parameters and |
|
67 locals are DontDelete.) |
|
68 |
|
69 The register layout before a function call looks like this: |
|
70 |
|
71 For |
|
72 |
|
73 function f(x, y) |
|
74 { |
|
75 } |
|
76 |
|
77 f(1); |
|
78 |
|
79 > <------------------------------ |
|
80 < > reserved: call frame | 1 | <-- value held |
|
81 > >snip< <------------------------------ |
|
82 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index |
|
83 > <------------------------------ |
|
84 | params->|<-locals | temps-> |
|
85 |
|
86 The call instruction fills in the "call frame" registers. It also pads |
|
87 missing arguments at the end of the call: |
|
88 |
|
89 > <----------------------------------- |
|
90 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined") |
|
91 > >snip< <----------------------------------- |
|
92 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index |
|
93 > <----------------------------------- |
|
94 | params->|<-locals | temps-> |
|
95 |
|
96 After filling in missing arguments, the call instruction sets up the new |
|
97 stack frame to overlap the end of the old stack frame: |
|
98 |
|
99 |----------------------------------> < |
|
100 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined") |
|
101 |----------------------------------> >snip< < |
|
102 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index |
|
103 |----------------------------------> < |
|
104 | | params->|<-locals | temps-> |
|
105 |
|
106 That way, arguments are "copied" into the callee's stack frame for free. |
|
107 |
|
108 If the caller supplies too many arguments, this trick doesn't work. The |
|
109 extra arguments protrude into space reserved for locals and temporaries. |
|
110 In that case, the call instruction makes a real copy of the call frame header, |
|
111 along with just the arguments expected by the callee, leaving the original |
|
112 call frame header and arguments behind. (The call instruction can't just discard |
|
113 extra arguments, because the "arguments" object may access them later.) |
|
114 This copying strategy ensures that all named values will be at the indices |
|
115 expected by the callee. |
|
116 */ |
|
117 |
|
118 #ifndef NDEBUG |
|
119 static bool s_dumpsGeneratedCode = false; |
|
120 #endif |
|
121 |
|
122 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode) |
|
123 { |
|
124 #ifndef NDEBUG |
|
125 s_dumpsGeneratedCode = dumpsGeneratedCode; |
|
126 #else |
|
127 UNUSED_PARAM(dumpsGeneratedCode); |
|
128 #endif |
|
129 } |
|
130 |
|
131 bool BytecodeGenerator::dumpsGeneratedCode() |
|
132 { |
|
133 #ifndef NDEBUG |
|
134 return s_dumpsGeneratedCode; |
|
135 #else |
|
136 return false; |
|
137 #endif |
|
138 } |
|
139 |
|
140 void BytecodeGenerator::generate() |
|
141 { |
|
142 m_codeBlock->setThisRegister(m_thisRegister.index()); |
|
143 |
|
144 m_scopeNode->emitBytecode(*this); |
|
145 |
|
146 #ifndef NDEBUG |
|
147 m_codeBlock->setInstructionCount(m_codeBlock->instructions().size()); |
|
148 |
|
149 if (s_dumpsGeneratedCode) |
|
150 m_codeBlock->dump(m_scopeChain->globalObject()->globalExec()); |
|
151 #endif |
|
152 |
|
153 if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode) |
|
154 symbolTable().clear(); |
|
155 |
|
156 #if !ENABLE(OPCODE_SAMPLING) |
|
157 if (!m_regeneratingForExceptionInfo && !m_usesExceptions && (m_codeType == FunctionCode || m_codeType == EvalCode)) |
|
158 m_codeBlock->clearExceptionInfo(); |
|
159 #endif |
|
160 |
|
161 m_codeBlock->shrinkToFit(); |
|
162 } |
|
163 |
|
164 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0) |
|
165 { |
|
166 int index = m_calleeRegisters.size(); |
|
167 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0); |
|
168 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry); |
|
169 |
|
170 if (!result.second) { |
|
171 r0 = ®isterFor(result.first->second.getIndex()); |
|
172 return false; |
|
173 } |
|
174 |
|
175 r0 = addVar(); |
|
176 return true; |
|
177 } |
|
178 |
|
179 bool BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant, RegisterID*& r0) |
|
180 { |
|
181 int index = m_nextGlobalIndex; |
|
182 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0); |
|
183 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry); |
|
184 |
|
185 if (!result.second) |
|
186 index = result.first->second.getIndex(); |
|
187 else { |
|
188 --m_nextGlobalIndex; |
|
189 m_globals.append(index + m_globalVarStorageOffset); |
|
190 } |
|
191 |
|
192 r0 = ®isterFor(index); |
|
193 return result.second; |
|
194 } |
|
195 |
|
196 void BytecodeGenerator::preserveLastVar() |
|
197 { |
|
198 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0) |
|
199 m_lastVar = &m_calleeRegisters.last(); |
|
200 } |
|
201 |
|
202 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock) |
|
203 : m_shouldEmitDebugHooks(!!debugger) |
|
204 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling()) |
|
205 , m_scopeChain(&scopeChain) |
|
206 , m_symbolTable(symbolTable) |
|
207 , m_scopeNode(programNode) |
|
208 , m_codeBlock(codeBlock) |
|
209 , m_thisRegister(RegisterFile::ProgramCodeThisRegister) |
|
210 , m_finallyDepth(0) |
|
211 , m_dynamicScopeDepth(0) |
|
212 , m_baseScopeDepth(0) |
|
213 , m_codeType(GlobalCode) |
|
214 , m_nextGlobalIndex(-1) |
|
215 , m_nextConstantOffset(0) |
|
216 , m_globalConstantIndex(0) |
|
217 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData()) |
|
218 , m_lastOpcodeID(op_end) |
|
219 , m_emitNodeDepth(0) |
|
220 , m_usesExceptions(false) |
|
221 , m_regeneratingForExceptionInfo(false) |
|
222 , m_codeBlockBeingRegeneratedFrom(0) |
|
223 { |
|
224 if (m_shouldEmitDebugHooks) |
|
225 m_codeBlock->setNeedsFullScopeChain(true); |
|
226 |
|
227 emitOpcode(op_enter); |
|
228 codeBlock->setGlobalData(m_globalData); |
|
229 |
|
230 // FIXME: Move code that modifies the global object to Interpreter::execute. |
|
231 |
|
232 m_codeBlock->m_numParameters = 1; // Allocate space for "this" |
|
233 |
|
234 JSGlobalObject* globalObject = scopeChain.globalObject(); |
|
235 ExecState* exec = globalObject->globalExec(); |
|
236 RegisterFile* registerFile = &exec->globalData().interpreter->registerFile(); |
|
237 |
|
238 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames. |
|
239 m_globalVarStorageOffset = -RegisterFile::CallFrameHeaderSize - m_codeBlock->m_numParameters - registerFile->size(); |
|
240 |
|
241 // Add previously defined symbols to bookkeeping. |
|
242 m_globals.grow(symbolTable->size()); |
|
243 SymbolTable::iterator end = symbolTable->end(); |
|
244 for (SymbolTable::iterator it = symbolTable->begin(); it != end; ++it) |
|
245 registerFor(it->second.getIndex()).setIndex(it->second.getIndex() + m_globalVarStorageOffset); |
|
246 |
|
247 BatchedTransitionOptimizer optimizer(globalObject); |
|
248 |
|
249 const VarStack& varStack = programNode->varStack(); |
|
250 const FunctionStack& functionStack = programNode->functionStack(); |
|
251 bool canOptimizeNewGlobals = symbolTable->size() + functionStack.size() + varStack.size() < registerFile->maxGlobals(); |
|
252 if (canOptimizeNewGlobals) { |
|
253 // Shift new symbols so they get stored prior to existing symbols. |
|
254 m_nextGlobalIndex -= symbolTable->size(); |
|
255 |
|
256 for (size_t i = 0; i < functionStack.size(); ++i) { |
|
257 FunctionBodyNode* function = functionStack[i]; |
|
258 globalObject->removeDirect(function->ident()); // Make sure our new function is not shadowed by an old property. |
|
259 emitNewFunction(addGlobalVar(function->ident(), false), function); |
|
260 } |
|
261 |
|
262 Vector<RegisterID*, 32> newVars; |
|
263 for (size_t i = 0; i < varStack.size(); ++i) |
|
264 if (!globalObject->hasProperty(exec, *varStack[i].first)) |
|
265 newVars.append(addGlobalVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant)); |
|
266 |
|
267 preserveLastVar(); |
|
268 |
|
269 for (size_t i = 0; i < newVars.size(); ++i) |
|
270 emitLoad(newVars[i], jsUndefined()); |
|
271 } else { |
|
272 for (size_t i = 0; i < functionStack.size(); ++i) { |
|
273 FunctionBodyNode* function = functionStack[i]; |
|
274 globalObject->putWithAttributes(exec, function->ident(), new (exec) JSFunction(exec, makeFunction(exec, function), scopeChain.node()), DontDelete); |
|
275 } |
|
276 for (size_t i = 0; i < varStack.size(); ++i) { |
|
277 if (globalObject->hasProperty(exec, *varStack[i].first)) |
|
278 continue; |
|
279 int attributes = DontDelete; |
|
280 if (varStack[i].second & DeclarationStacks::IsConstant) |
|
281 attributes |= ReadOnly; |
|
282 globalObject->putWithAttributes(exec, *varStack[i].first, jsUndefined(), attributes); |
|
283 } |
|
284 |
|
285 preserveLastVar(); |
|
286 } |
|
287 } |
|
288 |
|
289 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock) |
|
290 : m_shouldEmitDebugHooks(!!debugger) |
|
291 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling()) |
|
292 , m_scopeChain(&scopeChain) |
|
293 , m_symbolTable(symbolTable) |
|
294 , m_scopeNode(functionBody) |
|
295 , m_codeBlock(codeBlock) |
|
296 , m_activationRegister(0) |
|
297 , m_finallyDepth(0) |
|
298 , m_dynamicScopeDepth(0) |
|
299 , m_baseScopeDepth(0) |
|
300 , m_codeType(FunctionCode) |
|
301 , m_nextConstantOffset(0) |
|
302 , m_globalConstantIndex(0) |
|
303 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData()) |
|
304 , m_lastOpcodeID(op_end) |
|
305 , m_emitNodeDepth(0) |
|
306 , m_usesExceptions(false) |
|
307 , m_regeneratingForExceptionInfo(false) |
|
308 , m_codeBlockBeingRegeneratedFrom(0) |
|
309 { |
|
310 if (m_shouldEmitDebugHooks) |
|
311 m_codeBlock->setNeedsFullScopeChain(true); |
|
312 |
|
313 codeBlock->setGlobalData(m_globalData); |
|
314 |
|
315 if (m_codeBlock->needsFullScopeChain()) { |
|
316 m_activationRegister = addVar(); |
|
317 emitOpcode(op_enter_with_activation); |
|
318 instructions().append(m_activationRegister->index()); |
|
319 } else |
|
320 emitOpcode(op_enter); |
|
321 |
|
322 // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments' |
|
323 // object, if created. |
|
324 if (m_codeBlock->needsFullScopeChain() || functionBody->usesArguments()) { |
|
325 RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code. |
|
326 RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'. |
|
327 |
|
328 // We can save a little space by hard-coding the knowledge that the two |
|
329 // 'arguments' values are stored in consecutive registers, and storing |
|
330 // only the index of the assignable one. |
|
331 codeBlock->setArgumentsRegister(argumentsRegister->index()); |
|
332 ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister())); |
|
333 |
|
334 emitOpcode(op_init_arguments); |
|
335 instructions().append(argumentsRegister->index()); |
|
336 |
|
337 // The debugger currently retrieves the arguments object from an activation rather than pulling |
|
338 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>), |
|
339 // but for now we force eager creation of the arguments object when debugging. |
|
340 if (m_shouldEmitDebugHooks) { |
|
341 emitOpcode(op_create_arguments); |
|
342 instructions().append(argumentsRegister->index()); |
|
343 } |
|
344 } |
|
345 |
|
346 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack(); |
|
347 for (size_t i = 0; i < functionStack.size(); ++i) { |
|
348 FunctionBodyNode* function = functionStack[i]; |
|
349 const Identifier& ident = function->ident(); |
|
350 m_functions.add(ident.ustring().rep()); |
|
351 emitNewFunction(addVar(ident, false), function); |
|
352 } |
|
353 |
|
354 const DeclarationStacks::VarStack& varStack = functionBody->varStack(); |
|
355 for (size_t i = 0; i < varStack.size(); ++i) |
|
356 addVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant); |
|
357 |
|
358 FunctionParameters& parameters = *functionBody->parameters(); |
|
359 size_t parameterCount = parameters.size(); |
|
360 int nextParameterIndex = -RegisterFile::CallFrameHeaderSize - parameterCount - 1; |
|
361 m_parameters.grow(1 + parameterCount); // reserve space for "this" |
|
362 |
|
363 // Add "this" as a parameter |
|
364 m_thisRegister.setIndex(nextParameterIndex); |
|
365 ++m_codeBlock->m_numParameters; |
|
366 |
|
367 for (size_t i = 0; i < parameterCount; ++i) |
|
368 addParameter(parameters[i], ++nextParameterIndex); |
|
369 |
|
370 preserveLastVar(); |
|
371 |
|
372 if (isConstructor()) { |
|
373 RefPtr<RegisterID> func = newTemporary(); |
|
374 RefPtr<RegisterID> funcProto = newTemporary(); |
|
375 |
|
376 emitOpcode(op_get_callee); |
|
377 instructions().append(func->index()); |
|
378 // Load prototype. |
|
379 emitGetByIdExceptionInfo(op_create_this); |
|
380 emitGetById(funcProto.get(), func.get(), globalData()->propertyNames->prototype); |
|
381 |
|
382 emitOpcode(op_create_this); |
|
383 instructions().append(m_thisRegister.index()); |
|
384 instructions().append(funcProto->index()); |
|
385 } else if (functionBody->usesThis() || m_shouldEmitDebugHooks) { |
|
386 emitOpcode(op_convert_this); |
|
387 instructions().append(m_thisRegister.index()); |
|
388 } |
|
389 } |
|
390 |
|
391 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock) |
|
392 : m_shouldEmitDebugHooks(!!debugger) |
|
393 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling()) |
|
394 , m_scopeChain(&scopeChain) |
|
395 , m_symbolTable(symbolTable) |
|
396 , m_scopeNode(evalNode) |
|
397 , m_codeBlock(codeBlock) |
|
398 , m_thisRegister(RegisterFile::ProgramCodeThisRegister) |
|
399 , m_finallyDepth(0) |
|
400 , m_dynamicScopeDepth(0) |
|
401 , m_baseScopeDepth(codeBlock->baseScopeDepth()) |
|
402 , m_codeType(EvalCode) |
|
403 , m_nextConstantOffset(0) |
|
404 , m_globalConstantIndex(0) |
|
405 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData()) |
|
406 , m_lastOpcodeID(op_end) |
|
407 , m_emitNodeDepth(0) |
|
408 , m_usesExceptions(false) |
|
409 , m_regeneratingForExceptionInfo(false) |
|
410 , m_codeBlockBeingRegeneratedFrom(0) |
|
411 { |
|
412 if (m_shouldEmitDebugHooks || m_baseScopeDepth) |
|
413 m_codeBlock->setNeedsFullScopeChain(true); |
|
414 |
|
415 emitOpcode(op_enter); |
|
416 codeBlock->setGlobalData(m_globalData); |
|
417 m_codeBlock->m_numParameters = 1; // Allocate space for "this" |
|
418 |
|
419 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack(); |
|
420 for (size_t i = 0; i < functionStack.size(); ++i) |
|
421 m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i])); |
|
422 |
|
423 const DeclarationStacks::VarStack& varStack = evalNode->varStack(); |
|
424 unsigned numVariables = varStack.size(); |
|
425 Vector<Identifier> variables; |
|
426 variables.reserveCapacity(numVariables); |
|
427 for (size_t i = 0; i < numVariables; ++i) |
|
428 variables.append(*varStack[i].first); |
|
429 codeBlock->adoptVariables(variables); |
|
430 |
|
431 preserveLastVar(); |
|
432 } |
|
433 |
|
434 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex) |
|
435 { |
|
436 // Parameters overwrite var declarations, but not function declarations. |
|
437 UString::Rep* rep = ident.ustring().rep(); |
|
438 if (!m_functions.contains(rep)) { |
|
439 symbolTable().set(rep, parameterIndex); |
|
440 RegisterID& parameter = registerFor(parameterIndex); |
|
441 parameter.setIndex(parameterIndex); |
|
442 } |
|
443 |
|
444 // To maintain the calling convention, we have to allocate unique space for |
|
445 // each parameter, even if the parameter doesn't make it into the symbol table. |
|
446 ++m_codeBlock->m_numParameters; |
|
447 } |
|
448 |
|
449 RegisterID* BytecodeGenerator::registerFor(const Identifier& ident) |
|
450 { |
|
451 if (ident == propertyNames().thisIdentifier) |
|
452 return &m_thisRegister; |
|
453 |
|
454 if (!shouldOptimizeLocals()) |
|
455 return 0; |
|
456 |
|
457 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep()); |
|
458 if (entry.isNull()) |
|
459 return 0; |
|
460 |
|
461 if (ident == propertyNames().arguments) |
|
462 createArgumentsIfNecessary(); |
|
463 |
|
464 return ®isterFor(entry.getIndex()); |
|
465 } |
|
466 |
|
467 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident) |
|
468 { |
|
469 if (ident != propertyNames().arguments) |
|
470 return false; |
|
471 |
|
472 if (!shouldOptimizeLocals()) |
|
473 return false; |
|
474 |
|
475 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep()); |
|
476 if (entry.isNull()) |
|
477 return false; |
|
478 |
|
479 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode) |
|
480 return true; |
|
481 |
|
482 return false; |
|
483 } |
|
484 |
|
485 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments() |
|
486 { |
|
487 ASSERT(willResolveToArguments(propertyNames().arguments)); |
|
488 |
|
489 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.ustring().rep()); |
|
490 ASSERT(!entry.isNull()); |
|
491 return ®isterFor(entry.getIndex()); |
|
492 } |
|
493 |
|
494 RegisterID* BytecodeGenerator::constRegisterFor(const Identifier& ident) |
|
495 { |
|
496 if (m_codeType == EvalCode) |
|
497 return 0; |
|
498 |
|
499 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep()); |
|
500 if (entry.isNull()) |
|
501 return 0; |
|
502 |
|
503 return ®isterFor(entry.getIndex()); |
|
504 } |
|
505 |
|
506 bool BytecodeGenerator::isLocal(const Identifier& ident) |
|
507 { |
|
508 if (ident == propertyNames().thisIdentifier) |
|
509 return true; |
|
510 |
|
511 return shouldOptimizeLocals() && symbolTable().contains(ident.ustring().rep()); |
|
512 } |
|
513 |
|
514 bool BytecodeGenerator::isLocalConstant(const Identifier& ident) |
|
515 { |
|
516 return symbolTable().get(ident.ustring().rep()).isReadOnly(); |
|
517 } |
|
518 |
|
519 RegisterID* BytecodeGenerator::newRegister() |
|
520 { |
|
521 m_calleeRegisters.append(m_calleeRegisters.size()); |
|
522 m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size()); |
|
523 return &m_calleeRegisters.last(); |
|
524 } |
|
525 |
|
526 RegisterID* BytecodeGenerator::newTemporary() |
|
527 { |
|
528 // Reclaim free register IDs. |
|
529 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount()) |
|
530 m_calleeRegisters.removeLast(); |
|
531 |
|
532 RegisterID* result = newRegister(); |
|
533 result->setTemporary(); |
|
534 return result; |
|
535 } |
|
536 |
|
537 RegisterID* BytecodeGenerator::highestUsedRegister() |
|
538 { |
|
539 size_t count = m_codeBlock->m_numCalleeRegisters; |
|
540 while (m_calleeRegisters.size() < count) |
|
541 newRegister(); |
|
542 return &m_calleeRegisters.last(); |
|
543 } |
|
544 |
|
545 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name) |
|
546 { |
|
547 // Reclaim free label scopes. |
|
548 while (m_labelScopes.size() && !m_labelScopes.last().refCount()) |
|
549 m_labelScopes.removeLast(); |
|
550 |
|
551 // Allocate new label scope. |
|
552 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets. |
|
553 m_labelScopes.append(scope); |
|
554 return &m_labelScopes.last(); |
|
555 } |
|
556 |
|
557 PassRefPtr<Label> BytecodeGenerator::newLabel() |
|
558 { |
|
559 // Reclaim free label IDs. |
|
560 while (m_labels.size() && !m_labels.last().refCount()) |
|
561 m_labels.removeLast(); |
|
562 |
|
563 // Allocate new label ID. |
|
564 m_labels.append(m_codeBlock); |
|
565 return &m_labels.last(); |
|
566 } |
|
567 |
|
568 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0) |
|
569 { |
|
570 unsigned newLabelIndex = instructions().size(); |
|
571 l0->setLocation(newLabelIndex); |
|
572 |
|
573 if (m_codeBlock->numberOfJumpTargets()) { |
|
574 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget(); |
|
575 ASSERT(lastLabelIndex <= newLabelIndex); |
|
576 if (newLabelIndex == lastLabelIndex) { |
|
577 // Peephole optimizations have already been disabled by emitting the last label |
|
578 return l0; |
|
579 } |
|
580 } |
|
581 |
|
582 m_codeBlock->addJumpTarget(newLabelIndex); |
|
583 |
|
584 // This disables peephole optimizations when an instruction is a jump target |
|
585 m_lastOpcodeID = op_end; |
|
586 return l0; |
|
587 } |
|
588 |
|
589 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID) |
|
590 { |
|
591 instructions().append(globalData()->interpreter->getOpcode(opcodeID)); |
|
592 m_lastOpcodeID = opcodeID; |
|
593 } |
|
594 |
|
595 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index) |
|
596 { |
|
597 ASSERT(instructions().size() >= 4); |
|
598 size_t size = instructions().size(); |
|
599 dstIndex = instructions().at(size - 3).u.operand; |
|
600 src1Index = instructions().at(size - 2).u.operand; |
|
601 src2Index = instructions().at(size - 1).u.operand; |
|
602 } |
|
603 |
|
604 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex) |
|
605 { |
|
606 ASSERT(instructions().size() >= 3); |
|
607 size_t size = instructions().size(); |
|
608 dstIndex = instructions().at(size - 2).u.operand; |
|
609 srcIndex = instructions().at(size - 1).u.operand; |
|
610 } |
|
611 |
|
612 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp() |
|
613 { |
|
614 ASSERT(instructions().size() >= 4); |
|
615 instructions().shrink(instructions().size() - 4); |
|
616 } |
|
617 |
|
618 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp() |
|
619 { |
|
620 ASSERT(instructions().size() >= 3); |
|
621 instructions().shrink(instructions().size() - 3); |
|
622 } |
|
623 |
|
624 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target) |
|
625 { |
|
626 size_t begin = instructions().size(); |
|
627 emitOpcode(target->isForward() ? op_jmp : op_loop); |
|
628 instructions().append(target->bind(begin, instructions().size())); |
|
629 return target; |
|
630 } |
|
631 |
|
632 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target) |
|
633 { |
|
634 if (m_lastOpcodeID == op_less) { |
|
635 int dstIndex; |
|
636 int src1Index; |
|
637 int src2Index; |
|
638 |
|
639 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); |
|
640 |
|
641 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
642 rewindBinaryOp(); |
|
643 |
|
644 size_t begin = instructions().size(); |
|
645 emitOpcode(target->isForward() ? op_jless : op_loop_if_less); |
|
646 instructions().append(src1Index); |
|
647 instructions().append(src2Index); |
|
648 instructions().append(target->bind(begin, instructions().size())); |
|
649 return target; |
|
650 } |
|
651 } else if (m_lastOpcodeID == op_lesseq) { |
|
652 int dstIndex; |
|
653 int src1Index; |
|
654 int src2Index; |
|
655 |
|
656 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); |
|
657 |
|
658 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
659 rewindBinaryOp(); |
|
660 |
|
661 size_t begin = instructions().size(); |
|
662 emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq); |
|
663 instructions().append(src1Index); |
|
664 instructions().append(src2Index); |
|
665 instructions().append(target->bind(begin, instructions().size())); |
|
666 return target; |
|
667 } |
|
668 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) { |
|
669 int dstIndex; |
|
670 int srcIndex; |
|
671 |
|
672 retrieveLastUnaryOp(dstIndex, srcIndex); |
|
673 |
|
674 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
675 rewindUnaryOp(); |
|
676 |
|
677 size_t begin = instructions().size(); |
|
678 emitOpcode(op_jeq_null); |
|
679 instructions().append(srcIndex); |
|
680 instructions().append(target->bind(begin, instructions().size())); |
|
681 return target; |
|
682 } |
|
683 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) { |
|
684 int dstIndex; |
|
685 int srcIndex; |
|
686 |
|
687 retrieveLastUnaryOp(dstIndex, srcIndex); |
|
688 |
|
689 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
690 rewindUnaryOp(); |
|
691 |
|
692 size_t begin = instructions().size(); |
|
693 emitOpcode(op_jneq_null); |
|
694 instructions().append(srcIndex); |
|
695 instructions().append(target->bind(begin, instructions().size())); |
|
696 return target; |
|
697 } |
|
698 } |
|
699 |
|
700 size_t begin = instructions().size(); |
|
701 |
|
702 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true); |
|
703 instructions().append(cond->index()); |
|
704 instructions().append(target->bind(begin, instructions().size())); |
|
705 return target; |
|
706 } |
|
707 |
|
708 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target) |
|
709 { |
|
710 if (m_lastOpcodeID == op_less && target->isForward()) { |
|
711 int dstIndex; |
|
712 int src1Index; |
|
713 int src2Index; |
|
714 |
|
715 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); |
|
716 |
|
717 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
718 rewindBinaryOp(); |
|
719 |
|
720 size_t begin = instructions().size(); |
|
721 emitOpcode(op_jnless); |
|
722 instructions().append(src1Index); |
|
723 instructions().append(src2Index); |
|
724 instructions().append(target->bind(begin, instructions().size())); |
|
725 return target; |
|
726 } |
|
727 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) { |
|
728 int dstIndex; |
|
729 int src1Index; |
|
730 int src2Index; |
|
731 |
|
732 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); |
|
733 |
|
734 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
735 rewindBinaryOp(); |
|
736 |
|
737 size_t begin = instructions().size(); |
|
738 emitOpcode(op_jnlesseq); |
|
739 instructions().append(src1Index); |
|
740 instructions().append(src2Index); |
|
741 instructions().append(target->bind(begin, instructions().size())); |
|
742 return target; |
|
743 } |
|
744 } else if (m_lastOpcodeID == op_not) { |
|
745 int dstIndex; |
|
746 int srcIndex; |
|
747 |
|
748 retrieveLastUnaryOp(dstIndex, srcIndex); |
|
749 |
|
750 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
751 rewindUnaryOp(); |
|
752 |
|
753 size_t begin = instructions().size(); |
|
754 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true); |
|
755 instructions().append(srcIndex); |
|
756 instructions().append(target->bind(begin, instructions().size())); |
|
757 return target; |
|
758 } |
|
759 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) { |
|
760 int dstIndex; |
|
761 int srcIndex; |
|
762 |
|
763 retrieveLastUnaryOp(dstIndex, srcIndex); |
|
764 |
|
765 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
766 rewindUnaryOp(); |
|
767 |
|
768 size_t begin = instructions().size(); |
|
769 emitOpcode(op_jneq_null); |
|
770 instructions().append(srcIndex); |
|
771 instructions().append(target->bind(begin, instructions().size())); |
|
772 return target; |
|
773 } |
|
774 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) { |
|
775 int dstIndex; |
|
776 int srcIndex; |
|
777 |
|
778 retrieveLastUnaryOp(dstIndex, srcIndex); |
|
779 |
|
780 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { |
|
781 rewindUnaryOp(); |
|
782 |
|
783 size_t begin = instructions().size(); |
|
784 emitOpcode(op_jeq_null); |
|
785 instructions().append(srcIndex); |
|
786 instructions().append(target->bind(begin, instructions().size())); |
|
787 return target; |
|
788 } |
|
789 } |
|
790 |
|
791 size_t begin = instructions().size(); |
|
792 emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false); |
|
793 instructions().append(cond->index()); |
|
794 instructions().append(target->bind(begin, instructions().size())); |
|
795 return target; |
|
796 } |
|
797 |
|
798 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target) |
|
799 { |
|
800 size_t begin = instructions().size(); |
|
801 |
|
802 emitOpcode(op_jneq_ptr); |
|
803 instructions().append(cond->index()); |
|
804 instructions().append(m_scopeChain->globalObject()->d()->callFunction); |
|
805 instructions().append(target->bind(begin, instructions().size())); |
|
806 return target; |
|
807 } |
|
808 |
|
809 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target) |
|
810 { |
|
811 size_t begin = instructions().size(); |
|
812 |
|
813 emitOpcode(op_jneq_ptr); |
|
814 instructions().append(cond->index()); |
|
815 instructions().append(m_scopeChain->globalObject()->d()->applyFunction); |
|
816 instructions().append(target->bind(begin, instructions().size())); |
|
817 return target; |
|
818 } |
|
819 |
|
820 unsigned BytecodeGenerator::addConstant(const Identifier& ident) |
|
821 { |
|
822 UString::Rep* rep = ident.ustring().rep(); |
|
823 pair<IdentifierMap::iterator, bool> result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers()); |
|
824 if (result.second) // new entry |
|
825 m_codeBlock->addIdentifier(Identifier(m_globalData, rep)); |
|
826 |
|
827 return result.first->second; |
|
828 } |
|
829 |
|
830 RegisterID* BytecodeGenerator::addConstantValue(JSValue v) |
|
831 { |
|
832 int index = m_nextConstantOffset; |
|
833 |
|
834 pair<JSValueMap::iterator, bool> result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset); |
|
835 if (result.second) { |
|
836 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset); |
|
837 ++m_nextConstantOffset; |
|
838 m_codeBlock->addConstantRegister(JSValue(v)); |
|
839 } else |
|
840 index = result.first->second; |
|
841 |
|
842 return &m_constantPoolRegisters[index]; |
|
843 } |
|
844 |
|
845 unsigned BytecodeGenerator::addRegExp(RegExp* r) |
|
846 { |
|
847 return m_codeBlock->addRegExp(r); |
|
848 } |
|
849 |
|
850 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src) |
|
851 { |
|
852 emitOpcode(op_mov); |
|
853 instructions().append(dst->index()); |
|
854 instructions().append(src->index()); |
|
855 return dst; |
|
856 } |
|
857 |
|
858 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src) |
|
859 { |
|
860 emitOpcode(opcodeID); |
|
861 instructions().append(dst->index()); |
|
862 instructions().append(src->index()); |
|
863 return dst; |
|
864 } |
|
865 |
|
866 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst) |
|
867 { |
|
868 emitOpcode(op_pre_inc); |
|
869 instructions().append(srcDst->index()); |
|
870 return srcDst; |
|
871 } |
|
872 |
|
873 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst) |
|
874 { |
|
875 emitOpcode(op_pre_dec); |
|
876 instructions().append(srcDst->index()); |
|
877 return srcDst; |
|
878 } |
|
879 |
|
880 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst) |
|
881 { |
|
882 emitOpcode(op_post_inc); |
|
883 instructions().append(dst->index()); |
|
884 instructions().append(srcDst->index()); |
|
885 return dst; |
|
886 } |
|
887 |
|
888 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst) |
|
889 { |
|
890 emitOpcode(op_post_dec); |
|
891 instructions().append(dst->index()); |
|
892 instructions().append(srcDst->index()); |
|
893 return dst; |
|
894 } |
|
895 |
|
896 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types) |
|
897 { |
|
898 emitOpcode(opcodeID); |
|
899 instructions().append(dst->index()); |
|
900 instructions().append(src1->index()); |
|
901 instructions().append(src2->index()); |
|
902 |
|
903 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor || |
|
904 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div) |
|
905 instructions().append(types.toInt()); |
|
906 |
|
907 return dst; |
|
908 } |
|
909 |
|
910 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2) |
|
911 { |
|
912 if (m_lastOpcodeID == op_typeof) { |
|
913 int dstIndex; |
|
914 int srcIndex; |
|
915 |
|
916 retrieveLastUnaryOp(dstIndex, srcIndex); |
|
917 |
|
918 if (src1->index() == dstIndex |
|
919 && src1->isTemporary() |
|
920 && m_codeBlock->isConstantRegisterIndex(src2->index()) |
|
921 && m_codeBlock->constantRegister(src2->index()).jsValue().isString()) { |
|
922 const UString& value = asString(m_codeBlock->constantRegister(src2->index()).jsValue())->tryGetValue(); |
|
923 if (value == "undefined") { |
|
924 rewindUnaryOp(); |
|
925 emitOpcode(op_is_undefined); |
|
926 instructions().append(dst->index()); |
|
927 instructions().append(srcIndex); |
|
928 return dst; |
|
929 } |
|
930 if (value == "boolean") { |
|
931 rewindUnaryOp(); |
|
932 emitOpcode(op_is_boolean); |
|
933 instructions().append(dst->index()); |
|
934 instructions().append(srcIndex); |
|
935 return dst; |
|
936 } |
|
937 if (value == "number") { |
|
938 rewindUnaryOp(); |
|
939 emitOpcode(op_is_number); |
|
940 instructions().append(dst->index()); |
|
941 instructions().append(srcIndex); |
|
942 return dst; |
|
943 } |
|
944 if (value == "string") { |
|
945 rewindUnaryOp(); |
|
946 emitOpcode(op_is_string); |
|
947 instructions().append(dst->index()); |
|
948 instructions().append(srcIndex); |
|
949 return dst; |
|
950 } |
|
951 if (value == "object") { |
|
952 rewindUnaryOp(); |
|
953 emitOpcode(op_is_object); |
|
954 instructions().append(dst->index()); |
|
955 instructions().append(srcIndex); |
|
956 return dst; |
|
957 } |
|
958 if (value == "function") { |
|
959 rewindUnaryOp(); |
|
960 emitOpcode(op_is_function); |
|
961 instructions().append(dst->index()); |
|
962 instructions().append(srcIndex); |
|
963 return dst; |
|
964 } |
|
965 } |
|
966 } |
|
967 |
|
968 emitOpcode(opcodeID); |
|
969 instructions().append(dst->index()); |
|
970 instructions().append(src1->index()); |
|
971 instructions().append(src2->index()); |
|
972 return dst; |
|
973 } |
|
974 |
|
975 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b) |
|
976 { |
|
977 return emitLoad(dst, jsBoolean(b)); |
|
978 } |
|
979 |
|
980 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number) |
|
981 { |
|
982 // FIXME: Our hash tables won't hold infinity, so we make a new JSNumberCell each time. |
|
983 // Later we can do the extra work to handle that like the other cases. |
|
984 if (number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number)) |
|
985 return emitLoad(dst, jsNumber(globalData(), number)); |
|
986 JSValue& valueInMap = m_numberMap.add(number, JSValue()).first->second; |
|
987 if (!valueInMap) |
|
988 valueInMap = jsNumber(globalData(), number); |
|
989 return emitLoad(dst, valueInMap); |
|
990 } |
|
991 |
|
992 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier) |
|
993 { |
|
994 JSString*& stringInMap = m_stringMap.add(identifier.ustring().rep(), 0).first->second; |
|
995 if (!stringInMap) |
|
996 stringInMap = jsOwnedString(globalData(), identifier.ustring()); |
|
997 return emitLoad(dst, JSValue(stringInMap)); |
|
998 } |
|
999 |
|
1000 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v) |
|
1001 { |
|
1002 RegisterID* constantID = addConstantValue(v); |
|
1003 if (dst) |
|
1004 return emitMove(dst, constantID); |
|
1005 return constantID; |
|
1006 } |
|
1007 |
|
1008 bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, bool& requiresDynamicChecks, JSObject*& globalObject) |
|
1009 { |
|
1010 // Cases where we cannot statically optimize the lookup. |
|
1011 if (property == propertyNames().arguments || !canOptimizeNonLocals()) { |
|
1012 stackDepth = 0; |
|
1013 index = missingSymbolMarker(); |
|
1014 |
|
1015 if (shouldOptimizeLocals() && m_codeType == GlobalCode) { |
|
1016 ScopeChainIterator iter = m_scopeChain->begin(); |
|
1017 globalObject = *iter; |
|
1018 ASSERT((++iter) == m_scopeChain->end()); |
|
1019 } |
|
1020 return false; |
|
1021 } |
|
1022 |
|
1023 size_t depth = 0; |
|
1024 requiresDynamicChecks = false; |
|
1025 ScopeChainIterator iter = m_scopeChain->begin(); |
|
1026 ScopeChainIterator end = m_scopeChain->end(); |
|
1027 for (; iter != end; ++iter, ++depth) { |
|
1028 JSObject* currentScope = *iter; |
|
1029 if (!currentScope->isVariableObject()) |
|
1030 break; |
|
1031 JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope); |
|
1032 SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.ustring().rep()); |
|
1033 |
|
1034 // Found the property |
|
1035 if (!entry.isNull()) { |
|
1036 if (entry.isReadOnly() && forWriting) { |
|
1037 stackDepth = 0; |
|
1038 index = missingSymbolMarker(); |
|
1039 if (++iter == end) |
|
1040 globalObject = currentVariableObject; |
|
1041 return false; |
|
1042 } |
|
1043 stackDepth = depth + m_codeBlock->needsFullScopeChain(); |
|
1044 index = entry.getIndex(); |
|
1045 if (++iter == end) |
|
1046 globalObject = currentVariableObject; |
|
1047 return true; |
|
1048 } |
|
1049 bool scopeRequiresDynamicChecks = false; |
|
1050 if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks)) |
|
1051 break; |
|
1052 requiresDynamicChecks |= scopeRequiresDynamicChecks; |
|
1053 } |
|
1054 // Can't locate the property but we're able to avoid a few lookups. |
|
1055 stackDepth = depth + m_codeBlock->needsFullScopeChain(); |
|
1056 index = missingSymbolMarker(); |
|
1057 JSObject* scope = *iter; |
|
1058 if (++iter == end) |
|
1059 globalObject = scope; |
|
1060 return true; |
|
1061 } |
|
1062 |
|
1063 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype) |
|
1064 { |
|
1065 emitOpcode(op_instanceof); |
|
1066 instructions().append(dst->index()); |
|
1067 instructions().append(value->index()); |
|
1068 instructions().append(base->index()); |
|
1069 instructions().append(basePrototype->index()); |
|
1070 return dst; |
|
1071 } |
|
1072 |
|
1073 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const Identifier& property) |
|
1074 { |
|
1075 size_t depth = 0; |
|
1076 int index = 0; |
|
1077 JSObject* globalObject = 0; |
|
1078 bool requiresDynamicChecks = false; |
|
1079 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) && !globalObject) { |
|
1080 // We can't optimise at all :-( |
|
1081 emitOpcode(op_resolve); |
|
1082 instructions().append(dst->index()); |
|
1083 instructions().append(addConstant(property)); |
|
1084 return dst; |
|
1085 } |
|
1086 |
|
1087 if (globalObject) { |
|
1088 bool forceGlobalResolve = false; |
|
1089 if (m_regeneratingForExceptionInfo) { |
|
1090 #if ENABLE(JIT) |
|
1091 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(instructions().size()); |
|
1092 #else |
|
1093 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size()); |
|
1094 #endif |
|
1095 } |
|
1096 |
|
1097 if (index != missingSymbolMarker() && !forceGlobalResolve && !requiresDynamicChecks) { |
|
1098 // Directly index the property lookup across multiple scopes. |
|
1099 return emitGetScopedVar(dst, depth, index, globalObject); |
|
1100 } |
|
1101 |
|
1102 #if ENABLE(JIT) |
|
1103 m_codeBlock->addGlobalResolveInfo(instructions().size()); |
|
1104 #else |
|
1105 m_codeBlock->addGlobalResolveInstruction(instructions().size()); |
|
1106 #endif |
|
1107 emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global); |
|
1108 instructions().append(dst->index()); |
|
1109 instructions().append(globalObject); |
|
1110 instructions().append(addConstant(property)); |
|
1111 instructions().append(0); |
|
1112 instructions().append(0); |
|
1113 if (requiresDynamicChecks) |
|
1114 instructions().append(depth); |
|
1115 return dst; |
|
1116 } |
|
1117 |
|
1118 if (requiresDynamicChecks) { |
|
1119 // If we get here we have eval nested inside a |with| just give up |
|
1120 emitOpcode(op_resolve); |
|
1121 instructions().append(dst->index()); |
|
1122 instructions().append(addConstant(property)); |
|
1123 return dst; |
|
1124 } |
|
1125 |
|
1126 if (index != missingSymbolMarker()) { |
|
1127 // Directly index the property lookup across multiple scopes. |
|
1128 return emitGetScopedVar(dst, depth, index, globalObject); |
|
1129 } |
|
1130 |
|
1131 // In this case we are at least able to drop a few scope chains from the |
|
1132 // lookup chain, although we still need to hash from then on. |
|
1133 emitOpcode(op_resolve_skip); |
|
1134 instructions().append(dst->index()); |
|
1135 instructions().append(addConstant(property)); |
|
1136 instructions().append(depth); |
|
1137 return dst; |
|
1138 } |
|
1139 |
|
1140 RegisterID* BytecodeGenerator::emitGetScopedVar(RegisterID* dst, size_t depth, int index, JSValue globalObject) |
|
1141 { |
|
1142 if (globalObject) { |
|
1143 emitOpcode(op_get_global_var); |
|
1144 instructions().append(dst->index()); |
|
1145 instructions().append(asCell(globalObject)); |
|
1146 instructions().append(index); |
|
1147 return dst; |
|
1148 } |
|
1149 |
|
1150 emitOpcode(op_get_scoped_var); |
|
1151 instructions().append(dst->index()); |
|
1152 instructions().append(index); |
|
1153 instructions().append(depth); |
|
1154 return dst; |
|
1155 } |
|
1156 |
|
1157 RegisterID* BytecodeGenerator::emitPutScopedVar(size_t depth, int index, RegisterID* value, JSValue globalObject) |
|
1158 { |
|
1159 if (globalObject) { |
|
1160 emitOpcode(op_put_global_var); |
|
1161 instructions().append(asCell(globalObject)); |
|
1162 instructions().append(index); |
|
1163 instructions().append(value->index()); |
|
1164 return value; |
|
1165 } |
|
1166 emitOpcode(op_put_scoped_var); |
|
1167 instructions().append(index); |
|
1168 instructions().append(depth); |
|
1169 instructions().append(value->index()); |
|
1170 return value; |
|
1171 } |
|
1172 |
|
1173 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const Identifier& property) |
|
1174 { |
|
1175 size_t depth = 0; |
|
1176 int index = 0; |
|
1177 JSObject* globalObject = 0; |
|
1178 bool requiresDynamicChecks = false; |
|
1179 findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject); |
|
1180 if (!globalObject || requiresDynamicChecks) { |
|
1181 // We can't optimise at all :-( |
|
1182 emitOpcode(op_resolve_base); |
|
1183 instructions().append(dst->index()); |
|
1184 instructions().append(addConstant(property)); |
|
1185 return dst; |
|
1186 } |
|
1187 |
|
1188 // Global object is the base |
|
1189 return emitLoad(dst, JSValue(globalObject)); |
|
1190 } |
|
1191 |
|
1192 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property) |
|
1193 { |
|
1194 size_t depth = 0; |
|
1195 int index = 0; |
|
1196 JSObject* globalObject = 0; |
|
1197 bool requiresDynamicChecks = false; |
|
1198 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) || !globalObject || requiresDynamicChecks) { |
|
1199 // We can't optimise at all :-( |
|
1200 emitOpcode(op_resolve_with_base); |
|
1201 instructions().append(baseDst->index()); |
|
1202 instructions().append(propDst->index()); |
|
1203 instructions().append(addConstant(property)); |
|
1204 return baseDst; |
|
1205 } |
|
1206 |
|
1207 bool forceGlobalResolve = false; |
|
1208 if (m_regeneratingForExceptionInfo) { |
|
1209 #if ENABLE(JIT) |
|
1210 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(instructions().size()); |
|
1211 #else |
|
1212 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size()); |
|
1213 #endif |
|
1214 } |
|
1215 |
|
1216 // Global object is the base |
|
1217 emitLoad(baseDst, JSValue(globalObject)); |
|
1218 |
|
1219 if (index != missingSymbolMarker() && !forceGlobalResolve) { |
|
1220 // Directly index the property lookup across multiple scopes. |
|
1221 emitGetScopedVar(propDst, depth, index, globalObject); |
|
1222 return baseDst; |
|
1223 } |
|
1224 |
|
1225 #if ENABLE(JIT) |
|
1226 m_codeBlock->addGlobalResolveInfo(instructions().size()); |
|
1227 #else |
|
1228 m_codeBlock->addGlobalResolveInstruction(instructions().size()); |
|
1229 #endif |
|
1230 emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global); |
|
1231 instructions().append(propDst->index()); |
|
1232 instructions().append(globalObject); |
|
1233 instructions().append(addConstant(property)); |
|
1234 instructions().append(0); |
|
1235 instructions().append(0); |
|
1236 if (requiresDynamicChecks) |
|
1237 instructions().append(depth); |
|
1238 return baseDst; |
|
1239 } |
|
1240 |
|
1241 void BytecodeGenerator::emitMethodCheck() |
|
1242 { |
|
1243 emitOpcode(op_method_check); |
|
1244 } |
|
1245 |
|
1246 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property) |
|
1247 { |
|
1248 #if ENABLE(JIT) |
|
1249 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_get_by_id)); |
|
1250 #else |
|
1251 m_codeBlock->addPropertyAccessInstruction(instructions().size()); |
|
1252 #endif |
|
1253 |
|
1254 emitOpcode(op_get_by_id); |
|
1255 instructions().append(dst->index()); |
|
1256 instructions().append(base->index()); |
|
1257 instructions().append(addConstant(property)); |
|
1258 instructions().append(0); |
|
1259 instructions().append(0); |
|
1260 instructions().append(0); |
|
1261 instructions().append(0); |
|
1262 return dst; |
|
1263 } |
|
1264 |
|
1265 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value) |
|
1266 { |
|
1267 #if ENABLE(JIT) |
|
1268 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id)); |
|
1269 #else |
|
1270 m_codeBlock->addPropertyAccessInstruction(instructions().size()); |
|
1271 #endif |
|
1272 |
|
1273 emitOpcode(op_put_by_id); |
|
1274 instructions().append(base->index()); |
|
1275 instructions().append(addConstant(property)); |
|
1276 instructions().append(value->index()); |
|
1277 instructions().append(0); |
|
1278 instructions().append(0); |
|
1279 instructions().append(0); |
|
1280 instructions().append(0); |
|
1281 instructions().append(0); |
|
1282 return value; |
|
1283 } |
|
1284 |
|
1285 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value) |
|
1286 { |
|
1287 #if ENABLE(JIT) |
|
1288 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id)); |
|
1289 #else |
|
1290 m_codeBlock->addPropertyAccessInstruction(instructions().size()); |
|
1291 #endif |
|
1292 |
|
1293 emitOpcode(op_put_by_id); |
|
1294 instructions().append(base->index()); |
|
1295 instructions().append(addConstant(property)); |
|
1296 instructions().append(value->index()); |
|
1297 instructions().append(0); |
|
1298 instructions().append(0); |
|
1299 instructions().append(0); |
|
1300 instructions().append(0); |
|
1301 instructions().append(property != m_globalData->propertyNames->underscoreProto); |
|
1302 return value; |
|
1303 } |
|
1304 |
|
1305 RegisterID* BytecodeGenerator::emitPutGetter(RegisterID* base, const Identifier& property, RegisterID* value) |
|
1306 { |
|
1307 emitOpcode(op_put_getter); |
|
1308 instructions().append(base->index()); |
|
1309 instructions().append(addConstant(property)); |
|
1310 instructions().append(value->index()); |
|
1311 return value; |
|
1312 } |
|
1313 |
|
1314 RegisterID* BytecodeGenerator::emitPutSetter(RegisterID* base, const Identifier& property, RegisterID* value) |
|
1315 { |
|
1316 emitOpcode(op_put_setter); |
|
1317 instructions().append(base->index()); |
|
1318 instructions().append(addConstant(property)); |
|
1319 instructions().append(value->index()); |
|
1320 return value; |
|
1321 } |
|
1322 |
|
1323 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property) |
|
1324 { |
|
1325 emitOpcode(op_del_by_id); |
|
1326 instructions().append(dst->index()); |
|
1327 instructions().append(base->index()); |
|
1328 instructions().append(addConstant(property)); |
|
1329 return dst; |
|
1330 } |
|
1331 |
|
1332 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property) |
|
1333 { |
|
1334 for (size_t i = m_forInContextStack.size(); i > 0; i--) { |
|
1335 ForInContext& context = m_forInContextStack[i - 1]; |
|
1336 if (context.propertyRegister == property) { |
|
1337 emitOpcode(op_get_by_pname); |
|
1338 instructions().append(dst->index()); |
|
1339 instructions().append(base->index()); |
|
1340 instructions().append(property->index()); |
|
1341 instructions().append(context.expectedSubscriptRegister->index()); |
|
1342 instructions().append(context.iterRegister->index()); |
|
1343 instructions().append(context.indexRegister->index()); |
|
1344 return dst; |
|
1345 } |
|
1346 } |
|
1347 emitOpcode(op_get_by_val); |
|
1348 instructions().append(dst->index()); |
|
1349 instructions().append(base->index()); |
|
1350 instructions().append(property->index()); |
|
1351 return dst; |
|
1352 } |
|
1353 |
|
1354 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value) |
|
1355 { |
|
1356 emitOpcode(op_put_by_val); |
|
1357 instructions().append(base->index()); |
|
1358 instructions().append(property->index()); |
|
1359 instructions().append(value->index()); |
|
1360 return value; |
|
1361 } |
|
1362 |
|
1363 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property) |
|
1364 { |
|
1365 emitOpcode(op_del_by_val); |
|
1366 instructions().append(dst->index()); |
|
1367 instructions().append(base->index()); |
|
1368 instructions().append(property->index()); |
|
1369 return dst; |
|
1370 } |
|
1371 |
|
1372 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value) |
|
1373 { |
|
1374 emitOpcode(op_put_by_index); |
|
1375 instructions().append(base->index()); |
|
1376 instructions().append(index); |
|
1377 instructions().append(value->index()); |
|
1378 return value; |
|
1379 } |
|
1380 |
|
1381 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst) |
|
1382 { |
|
1383 emitOpcode(op_new_object); |
|
1384 instructions().append(dst->index()); |
|
1385 return dst; |
|
1386 } |
|
1387 |
|
1388 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements) |
|
1389 { |
|
1390 Vector<RefPtr<RegisterID>, 16> argv; |
|
1391 for (ElementNode* n = elements; n; n = n->next()) { |
|
1392 if (n->elision()) |
|
1393 break; |
|
1394 argv.append(newTemporary()); |
|
1395 // op_new_array requires the initial values to be a sequential range of registers |
|
1396 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1); |
|
1397 emitNode(argv.last().get(), n->value()); |
|
1398 } |
|
1399 emitOpcode(op_new_array); |
|
1400 instructions().append(dst->index()); |
|
1401 instructions().append(argv.size() ? argv[0]->index() : 0); // argv |
|
1402 instructions().append(argv.size()); // argc |
|
1403 return dst; |
|
1404 } |
|
1405 |
|
1406 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function) |
|
1407 { |
|
1408 unsigned index = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)); |
|
1409 |
|
1410 emitOpcode(op_new_func); |
|
1411 instructions().append(dst->index()); |
|
1412 instructions().append(index); |
|
1413 return dst; |
|
1414 } |
|
1415 |
|
1416 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp) |
|
1417 { |
|
1418 emitOpcode(op_new_regexp); |
|
1419 instructions().append(dst->index()); |
|
1420 instructions().append(addRegExp(regExp)); |
|
1421 return dst; |
|
1422 } |
|
1423 |
|
1424 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n) |
|
1425 { |
|
1426 FunctionBodyNode* function = n->body(); |
|
1427 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function)); |
|
1428 |
|
1429 emitOpcode(op_new_func_exp); |
|
1430 instructions().append(r0->index()); |
|
1431 instructions().append(index); |
|
1432 return r0; |
|
1433 } |
|
1434 |
|
1435 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) |
|
1436 { |
|
1437 return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset); |
|
1438 } |
|
1439 |
|
1440 void BytecodeGenerator::createArgumentsIfNecessary() |
|
1441 { |
|
1442 if (m_codeType != FunctionCode) |
|
1443 return; |
|
1444 ASSERT(m_codeBlock->usesArguments()); |
|
1445 |
|
1446 emitOpcode(op_create_arguments); |
|
1447 instructions().append(m_codeBlock->argumentsRegister()); |
|
1448 } |
|
1449 |
|
1450 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) |
|
1451 { |
|
1452 return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset); |
|
1453 } |
|
1454 |
|
1455 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) |
|
1456 { |
|
1457 ASSERT(opcodeID == op_call || opcodeID == op_call_eval); |
|
1458 ASSERT(func->refCount()); |
|
1459 |
|
1460 if (m_shouldEmitProfileHooks) |
|
1461 emitMove(callArguments.profileHookRegister(), func); |
|
1462 |
|
1463 // Generate code for arguments. |
|
1464 unsigned argumentIndex = 0; |
|
1465 for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next) |
|
1466 emitNode(callArguments.argumentRegister(argumentIndex++), n); |
|
1467 |
|
1468 // Reserve space for call frame. |
|
1469 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame; |
|
1470 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i) |
|
1471 callFrame.append(newTemporary()); |
|
1472 |
|
1473 if (m_shouldEmitProfileHooks) { |
|
1474 emitOpcode(op_profile_will_call); |
|
1475 instructions().append(callArguments.profileHookRegister()->index()); |
|
1476 |
|
1477 #if ENABLE(JIT) |
|
1478 m_codeBlock->addFunctionRegisterInfo(instructions().size(), callArguments.profileHookRegister()->index()); |
|
1479 #endif |
|
1480 } |
|
1481 |
|
1482 emitExpressionInfo(divot, startOffset, endOffset); |
|
1483 |
|
1484 #if ENABLE(JIT) |
|
1485 m_codeBlock->addCallLinkInfo(); |
|
1486 #endif |
|
1487 |
|
1488 // Emit call. |
|
1489 emitOpcode(opcodeID); |
|
1490 instructions().append(func->index()); // func |
|
1491 instructions().append(callArguments.count()); // argCount |
|
1492 instructions().append(callArguments.callFrame()); // registerOffset |
|
1493 if (dst != ignoredResult()) { |
|
1494 emitOpcode(op_call_put_result); |
|
1495 instructions().append(dst->index()); // dst |
|
1496 } |
|
1497 |
|
1498 if (m_shouldEmitProfileHooks) { |
|
1499 emitOpcode(op_profile_did_call); |
|
1500 instructions().append(callArguments.profileHookRegister()->index()); |
|
1501 } |
|
1502 |
|
1503 return dst; |
|
1504 } |
|
1505 |
|
1506 RegisterID* BytecodeGenerator::emitLoadVarargs(RegisterID* argCountDst, RegisterID* arguments) |
|
1507 { |
|
1508 ASSERT(argCountDst->index() < arguments->index()); |
|
1509 emitOpcode(op_load_varargs); |
|
1510 instructions().append(argCountDst->index()); |
|
1511 instructions().append(arguments->index()); |
|
1512 return argCountDst; |
|
1513 } |
|
1514 |
|
1515 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* argCountRegister, unsigned divot, unsigned startOffset, unsigned endOffset) |
|
1516 { |
|
1517 ASSERT(func->refCount()); |
|
1518 ASSERT(thisRegister->refCount()); |
|
1519 ASSERT(dst != func); |
|
1520 if (m_shouldEmitProfileHooks) { |
|
1521 emitOpcode(op_profile_will_call); |
|
1522 instructions().append(func->index()); |
|
1523 |
|
1524 #if ENABLE(JIT) |
|
1525 m_codeBlock->addFunctionRegisterInfo(instructions().size(), func->index()); |
|
1526 #endif |
|
1527 } |
|
1528 |
|
1529 emitExpressionInfo(divot, startOffset, endOffset); |
|
1530 |
|
1531 // Emit call. |
|
1532 emitOpcode(op_call_varargs); |
|
1533 instructions().append(func->index()); // func |
|
1534 instructions().append(argCountRegister->index()); // arg count |
|
1535 instructions().append(thisRegister->index() + RegisterFile::CallFrameHeaderSize); // initial registerOffset |
|
1536 if (dst != ignoredResult()) { |
|
1537 emitOpcode(op_call_put_result); |
|
1538 instructions().append(dst->index()); // dst |
|
1539 } |
|
1540 if (m_shouldEmitProfileHooks) { |
|
1541 emitOpcode(op_profile_did_call); |
|
1542 instructions().append(func->index()); |
|
1543 } |
|
1544 return dst; |
|
1545 } |
|
1546 |
|
1547 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src) |
|
1548 { |
|
1549 if (m_codeBlock->needsFullScopeChain()) { |
|
1550 emitOpcode(op_tear_off_activation); |
|
1551 instructions().append(m_activationRegister->index()); |
|
1552 instructions().append(m_codeBlock->argumentsRegister()); |
|
1553 } else if (m_codeBlock->usesArguments() && m_codeBlock->m_numParameters > 1) { // If there are no named parameters, there's nothing to tear off, since extra / unnamed parameters get copied to the arguments object at construct time. |
|
1554 emitOpcode(op_tear_off_arguments); |
|
1555 instructions().append(m_codeBlock->argumentsRegister()); |
|
1556 } |
|
1557 |
|
1558 // Constructors use op_ret_object_or_this to check the result is an |
|
1559 // object, unless we can trivially determine the check is not |
|
1560 // necessary (currently, if the return value is 'this'). |
|
1561 if (isConstructor() && (src->index() != m_thisRegister.index())) { |
|
1562 emitOpcode(op_ret_object_or_this); |
|
1563 instructions().append(src->index()); |
|
1564 instructions().append(m_thisRegister.index()); |
|
1565 return src; |
|
1566 } |
|
1567 return emitUnaryNoDstOp(op_ret, src); |
|
1568 } |
|
1569 |
|
1570 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src) |
|
1571 { |
|
1572 emitOpcode(opcodeID); |
|
1573 instructions().append(src->index()); |
|
1574 return src; |
|
1575 } |
|
1576 |
|
1577 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) |
|
1578 { |
|
1579 ASSERT(func->refCount()); |
|
1580 |
|
1581 if (m_shouldEmitProfileHooks) |
|
1582 emitMove(callArguments.profileHookRegister(), func); |
|
1583 |
|
1584 // Generate code for arguments. |
|
1585 unsigned argumentIndex = 0; |
|
1586 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) { |
|
1587 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next) |
|
1588 emitNode(callArguments.argumentRegister(argumentIndex++), n); |
|
1589 } |
|
1590 |
|
1591 if (m_shouldEmitProfileHooks) { |
|
1592 emitOpcode(op_profile_will_call); |
|
1593 instructions().append(callArguments.profileHookRegister()->index()); |
|
1594 } |
|
1595 |
|
1596 // Reserve space for call frame. |
|
1597 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame; |
|
1598 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i) |
|
1599 callFrame.append(newTemporary()); |
|
1600 |
|
1601 emitExpressionInfo(divot, startOffset, endOffset); |
|
1602 |
|
1603 #if ENABLE(JIT) |
|
1604 m_codeBlock->addCallLinkInfo(); |
|
1605 #endif |
|
1606 |
|
1607 emitOpcode(op_construct); |
|
1608 instructions().append(func->index()); // func |
|
1609 instructions().append(callArguments.count()); // argCount |
|
1610 instructions().append(callArguments.callFrame()); // registerOffset |
|
1611 if (dst != ignoredResult()) { |
|
1612 emitOpcode(op_call_put_result); |
|
1613 instructions().append(dst->index()); // dst |
|
1614 } |
|
1615 |
|
1616 if (m_shouldEmitProfileHooks) { |
|
1617 emitOpcode(op_profile_did_call); |
|
1618 instructions().append(callArguments.profileHookRegister()->index()); |
|
1619 } |
|
1620 |
|
1621 return dst; |
|
1622 } |
|
1623 |
|
1624 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count) |
|
1625 { |
|
1626 emitOpcode(op_strcat); |
|
1627 instructions().append(dst->index()); |
|
1628 instructions().append(src->index()); |
|
1629 instructions().append(count); |
|
1630 |
|
1631 return dst; |
|
1632 } |
|
1633 |
|
1634 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src) |
|
1635 { |
|
1636 emitOpcode(op_to_primitive); |
|
1637 instructions().append(dst->index()); |
|
1638 instructions().append(src->index()); |
|
1639 } |
|
1640 |
|
1641 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope) |
|
1642 { |
|
1643 ASSERT(scope->isTemporary()); |
|
1644 ControlFlowContext context; |
|
1645 context.isFinallyBlock = false; |
|
1646 m_scopeContextStack.append(context); |
|
1647 m_dynamicScopeDepth++; |
|
1648 |
|
1649 return emitUnaryNoDstOp(op_push_scope, scope); |
|
1650 } |
|
1651 |
|
1652 void BytecodeGenerator::emitPopScope() |
|
1653 { |
|
1654 ASSERT(m_scopeContextStack.size()); |
|
1655 ASSERT(!m_scopeContextStack.last().isFinallyBlock); |
|
1656 |
|
1657 emitOpcode(op_pop_scope); |
|
1658 |
|
1659 m_scopeContextStack.removeLast(); |
|
1660 m_dynamicScopeDepth--; |
|
1661 } |
|
1662 |
|
1663 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine) |
|
1664 { |
|
1665 #if ENABLE(DEBUG_WITH_BREAKPOINT) |
|
1666 if (debugHookID != DidReachBreakpoint) |
|
1667 return; |
|
1668 #else |
|
1669 if (!m_shouldEmitDebugHooks) |
|
1670 return; |
|
1671 #endif |
|
1672 emitOpcode(op_debug); |
|
1673 instructions().append(debugHookID); |
|
1674 instructions().append(firstLine); |
|
1675 instructions().append(lastLine); |
|
1676 } |
|
1677 |
|
1678 void BytecodeGenerator::pushFinallyContext(Label* target, RegisterID* retAddrDst) |
|
1679 { |
|
1680 ControlFlowContext scope; |
|
1681 scope.isFinallyBlock = true; |
|
1682 FinallyContext context = { target, retAddrDst }; |
|
1683 scope.finallyContext = context; |
|
1684 m_scopeContextStack.append(scope); |
|
1685 m_finallyDepth++; |
|
1686 } |
|
1687 |
|
1688 void BytecodeGenerator::popFinallyContext() |
|
1689 { |
|
1690 ASSERT(m_scopeContextStack.size()); |
|
1691 ASSERT(m_scopeContextStack.last().isFinallyBlock); |
|
1692 ASSERT(m_finallyDepth > 0); |
|
1693 m_scopeContextStack.removeLast(); |
|
1694 m_finallyDepth--; |
|
1695 } |
|
1696 |
|
1697 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name) |
|
1698 { |
|
1699 // Reclaim free label scopes. |
|
1700 // |
|
1701 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()', |
|
1702 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with |
|
1703 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to |
|
1704 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the |
|
1705 // loop condition is a workaround. |
|
1706 while (m_labelScopes.size()) { |
|
1707 if (m_labelScopes.last().refCount()) |
|
1708 break; |
|
1709 m_labelScopes.removeLast(); |
|
1710 } |
|
1711 |
|
1712 if (!m_labelScopes.size()) |
|
1713 return 0; |
|
1714 |
|
1715 // We special-case the following, which is a syntax error in Firefox: |
|
1716 // label: |
|
1717 // break; |
|
1718 if (name.isEmpty()) { |
|
1719 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { |
|
1720 LabelScope* scope = &m_labelScopes[i]; |
|
1721 if (scope->type() != LabelScope::NamedLabel) { |
|
1722 ASSERT(scope->breakTarget()); |
|
1723 return scope; |
|
1724 } |
|
1725 } |
|
1726 return 0; |
|
1727 } |
|
1728 |
|
1729 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { |
|
1730 LabelScope* scope = &m_labelScopes[i]; |
|
1731 if (scope->name() && *scope->name() == name) { |
|
1732 ASSERT(scope->breakTarget()); |
|
1733 return scope; |
|
1734 } |
|
1735 } |
|
1736 return 0; |
|
1737 } |
|
1738 |
|
1739 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name) |
|
1740 { |
|
1741 // Reclaim free label scopes. |
|
1742 while (m_labelScopes.size() && !m_labelScopes.last().refCount()) |
|
1743 m_labelScopes.removeLast(); |
|
1744 |
|
1745 if (!m_labelScopes.size()) |
|
1746 return 0; |
|
1747 |
|
1748 if (name.isEmpty()) { |
|
1749 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { |
|
1750 LabelScope* scope = &m_labelScopes[i]; |
|
1751 if (scope->type() == LabelScope::Loop) { |
|
1752 ASSERT(scope->continueTarget()); |
|
1753 return scope; |
|
1754 } |
|
1755 } |
|
1756 return 0; |
|
1757 } |
|
1758 |
|
1759 // Continue to the loop nested nearest to the label scope that matches |
|
1760 // 'name'. |
|
1761 LabelScope* result = 0; |
|
1762 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { |
|
1763 LabelScope* scope = &m_labelScopes[i]; |
|
1764 if (scope->type() == LabelScope::Loop) { |
|
1765 ASSERT(scope->continueTarget()); |
|
1766 result = scope; |
|
1767 } |
|
1768 if (scope->name() && *scope->name() == name) |
|
1769 return result; // may be 0 |
|
1770 } |
|
1771 return 0; |
|
1772 } |
|
1773 |
|
1774 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope) |
|
1775 { |
|
1776 while (topScope > bottomScope) { |
|
1777 // First we count the number of dynamic scopes we need to remove to get |
|
1778 // to a finally block. |
|
1779 int nNormalScopes = 0; |
|
1780 while (topScope > bottomScope) { |
|
1781 if (topScope->isFinallyBlock) |
|
1782 break; |
|
1783 ++nNormalScopes; |
|
1784 --topScope; |
|
1785 } |
|
1786 |
|
1787 if (nNormalScopes) { |
|
1788 size_t begin = instructions().size(); |
|
1789 |
|
1790 // We need to remove a number of dynamic scopes to get to the next |
|
1791 // finally block |
|
1792 emitOpcode(op_jmp_scopes); |
|
1793 instructions().append(nNormalScopes); |
|
1794 |
|
1795 // If topScope == bottomScope then there isn't actually a finally block |
|
1796 // left to emit, so make the jmp_scopes jump directly to the target label |
|
1797 if (topScope == bottomScope) { |
|
1798 instructions().append(target->bind(begin, instructions().size())); |
|
1799 return target; |
|
1800 } |
|
1801 |
|
1802 // Otherwise we just use jmp_scopes to pop a group of scopes and go |
|
1803 // to the next instruction |
|
1804 RefPtr<Label> nextInsn = newLabel(); |
|
1805 instructions().append(nextInsn->bind(begin, instructions().size())); |
|
1806 emitLabel(nextInsn.get()); |
|
1807 } |
|
1808 |
|
1809 while (topScope > bottomScope && topScope->isFinallyBlock) { |
|
1810 emitJumpSubroutine(topScope->finallyContext.retAddrDst, topScope->finallyContext.finallyAddr); |
|
1811 --topScope; |
|
1812 } |
|
1813 } |
|
1814 return emitJump(target); |
|
1815 } |
|
1816 |
|
1817 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth) |
|
1818 { |
|
1819 ASSERT(scopeDepth() - targetScopeDepth >= 0); |
|
1820 ASSERT(target->isForward()); |
|
1821 |
|
1822 size_t scopeDelta = scopeDepth() - targetScopeDepth; |
|
1823 ASSERT(scopeDelta <= m_scopeContextStack.size()); |
|
1824 if (!scopeDelta) |
|
1825 return emitJump(target); |
|
1826 |
|
1827 if (m_finallyDepth) |
|
1828 return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta); |
|
1829 |
|
1830 size_t begin = instructions().size(); |
|
1831 |
|
1832 emitOpcode(op_jmp_scopes); |
|
1833 instructions().append(scopeDelta); |
|
1834 instructions().append(target->bind(begin, instructions().size())); |
|
1835 return target; |
|
1836 } |
|
1837 |
|
1838 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget) |
|
1839 { |
|
1840 size_t begin = instructions().size(); |
|
1841 |
|
1842 emitOpcode(op_get_pnames); |
|
1843 instructions().append(dst->index()); |
|
1844 instructions().append(base->index()); |
|
1845 instructions().append(i->index()); |
|
1846 instructions().append(size->index()); |
|
1847 instructions().append(breakTarget->bind(begin, instructions().size())); |
|
1848 return dst; |
|
1849 } |
|
1850 |
|
1851 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target) |
|
1852 { |
|
1853 size_t begin = instructions().size(); |
|
1854 |
|
1855 emitOpcode(op_next_pname); |
|
1856 instructions().append(dst->index()); |
|
1857 instructions().append(base->index()); |
|
1858 instructions().append(i->index()); |
|
1859 instructions().append(size->index()); |
|
1860 instructions().append(iter->index()); |
|
1861 instructions().append(target->bind(begin, instructions().size())); |
|
1862 return dst; |
|
1863 } |
|
1864 |
|
1865 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end) |
|
1866 { |
|
1867 m_usesExceptions = true; |
|
1868 #if ENABLE(JIT) |
|
1869 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() }; |
|
1870 #else |
|
1871 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth }; |
|
1872 #endif |
|
1873 |
|
1874 m_codeBlock->addExceptionHandler(info); |
|
1875 emitOpcode(op_catch); |
|
1876 instructions().append(targetRegister->index()); |
|
1877 return targetRegister; |
|
1878 } |
|
1879 |
|
1880 RegisterID* BytecodeGenerator::emitNewError(RegisterID* dst, bool isReferenceError, JSValue message) |
|
1881 { |
|
1882 emitOpcode(op_new_error); |
|
1883 instructions().append(dst->index()); |
|
1884 instructions().append(isReferenceError); |
|
1885 instructions().append(addConstantValue(message)->index()); |
|
1886 return dst; |
|
1887 } |
|
1888 |
|
1889 PassRefPtr<Label> BytecodeGenerator::emitJumpSubroutine(RegisterID* retAddrDst, Label* finally) |
|
1890 { |
|
1891 size_t begin = instructions().size(); |
|
1892 |
|
1893 emitOpcode(op_jsr); |
|
1894 instructions().append(retAddrDst->index()); |
|
1895 instructions().append(finally->bind(begin, instructions().size())); |
|
1896 emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it. |
|
1897 return finally; |
|
1898 } |
|
1899 |
|
1900 void BytecodeGenerator::emitSubroutineReturn(RegisterID* retAddrSrc) |
|
1901 { |
|
1902 emitOpcode(op_sret); |
|
1903 instructions().append(retAddrSrc->index()); |
|
1904 } |
|
1905 |
|
1906 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value) |
|
1907 { |
|
1908 ControlFlowContext context; |
|
1909 context.isFinallyBlock = false; |
|
1910 m_scopeContextStack.append(context); |
|
1911 m_dynamicScopeDepth++; |
|
1912 |
|
1913 emitOpcode(op_push_new_scope); |
|
1914 instructions().append(dst->index()); |
|
1915 instructions().append(addConstant(property)); |
|
1916 instructions().append(value->index()); |
|
1917 } |
|
1918 |
|
1919 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type) |
|
1920 { |
|
1921 SwitchInfo info = { instructions().size(), type }; |
|
1922 switch (type) { |
|
1923 case SwitchInfo::SwitchImmediate: |
|
1924 emitOpcode(op_switch_imm); |
|
1925 break; |
|
1926 case SwitchInfo::SwitchCharacter: |
|
1927 emitOpcode(op_switch_char); |
|
1928 break; |
|
1929 case SwitchInfo::SwitchString: |
|
1930 emitOpcode(op_switch_string); |
|
1931 break; |
|
1932 default: |
|
1933 ASSERT_NOT_REACHED(); |
|
1934 } |
|
1935 |
|
1936 instructions().append(0); // place holder for table index |
|
1937 instructions().append(0); // place holder for default target |
|
1938 instructions().append(scrutineeRegister->index()); |
|
1939 m_switchContextStack.append(info); |
|
1940 } |
|
1941 |
|
1942 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max) |
|
1943 { |
|
1944 UNUSED_PARAM(max); |
|
1945 ASSERT(node->isNumber()); |
|
1946 double value = static_cast<NumberNode*>(node)->value(); |
|
1947 int32_t key = static_cast<int32_t>(value); |
|
1948 ASSERT(key == value); |
|
1949 ASSERT(key >= min); |
|
1950 ASSERT(key <= max); |
|
1951 return key - min; |
|
1952 } |
|
1953 |
|
1954 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max) |
|
1955 { |
|
1956 jumpTable.min = min; |
|
1957 jumpTable.branchOffsets.resize(max - min + 1); |
|
1958 jumpTable.branchOffsets.fill(0); |
|
1959 for (uint32_t i = 0; i < clauseCount; ++i) { |
|
1960 // We're emitting this after the clause labels should have been fixed, so |
|
1961 // the labels should not be "forward" references |
|
1962 ASSERT(!labels[i]->isForward()); |
|
1963 jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); |
|
1964 } |
|
1965 } |
|
1966 |
|
1967 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max) |
|
1968 { |
|
1969 UNUSED_PARAM(max); |
|
1970 ASSERT(node->isString()); |
|
1971 UString::Rep* clause = static_cast<StringNode*>(node)->value().ustring().rep(); |
|
1972 ASSERT(clause->length() == 1); |
|
1973 |
|
1974 int32_t key = clause->characters()[0]; |
|
1975 ASSERT(key >= min); |
|
1976 ASSERT(key <= max); |
|
1977 return key - min; |
|
1978 } |
|
1979 |
|
1980 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max) |
|
1981 { |
|
1982 jumpTable.min = min; |
|
1983 jumpTable.branchOffsets.resize(max - min + 1); |
|
1984 jumpTable.branchOffsets.fill(0); |
|
1985 for (uint32_t i = 0; i < clauseCount; ++i) { |
|
1986 // We're emitting this after the clause labels should have been fixed, so |
|
1987 // the labels should not be "forward" references |
|
1988 ASSERT(!labels[i]->isForward()); |
|
1989 jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); |
|
1990 } |
|
1991 } |
|
1992 |
|
1993 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes) |
|
1994 { |
|
1995 for (uint32_t i = 0; i < clauseCount; ++i) { |
|
1996 // We're emitting this after the clause labels should have been fixed, so |
|
1997 // the labels should not be "forward" references |
|
1998 ASSERT(!labels[i]->isForward()); |
|
1999 |
|
2000 ASSERT(nodes[i]->isString()); |
|
2001 UString::Rep* clause = static_cast<StringNode*>(nodes[i])->value().ustring().rep(); |
|
2002 OffsetLocation location; |
|
2003 location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3); |
|
2004 jumpTable.offsetTable.add(clause, location); |
|
2005 } |
|
2006 } |
|
2007 |
|
2008 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max) |
|
2009 { |
|
2010 SwitchInfo switchInfo = m_switchContextStack.last(); |
|
2011 m_switchContextStack.removeLast(); |
|
2012 if (switchInfo.switchType == SwitchInfo::SwitchImmediate) { |
|
2013 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables(); |
|
2014 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3); |
|
2015 |
|
2016 SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable(); |
|
2017 prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max); |
|
2018 } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) { |
|
2019 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables(); |
|
2020 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3); |
|
2021 |
|
2022 SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable(); |
|
2023 prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max); |
|
2024 } else { |
|
2025 ASSERT(switchInfo.switchType == SwitchInfo::SwitchString); |
|
2026 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables(); |
|
2027 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3); |
|
2028 |
|
2029 StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable(); |
|
2030 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes); |
|
2031 } |
|
2032 } |
|
2033 |
|
2034 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException() |
|
2035 { |
|
2036 // It would be nice to do an even better job of identifying exactly where the expression is. |
|
2037 // And we could make the caller pass the node pointer in, if there was some way of getting |
|
2038 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data |
|
2039 // is still good enough to get us an accurate line number. |
|
2040 emitExpressionInfo(0, 0, 0); |
|
2041 RegisterID* exception = emitNewError(newTemporary(), false, jsString(globalData(), "Expression too deep")); |
|
2042 emitThrow(exception); |
|
2043 return exception; |
|
2044 } |
|
2045 |
|
2046 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction) |
|
2047 { |
|
2048 m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction); |
|
2049 } |
|
2050 |
|
2051 int BytecodeGenerator::argumentNumberFor(const Identifier& ident) |
|
2052 { |
|
2053 int parameterCount = m_parameters.size(); // includes 'this' |
|
2054 RegisterID* registerID = registerFor(ident); |
|
2055 if (!registerID) |
|
2056 return 0; |
|
2057 int index = registerID->index() + RegisterFile::CallFrameHeaderSize + parameterCount; |
|
2058 return (index > 0 && index < parameterCount) ? index : 0; |
|
2059 } |
|
2060 |
|
2061 } // namespace JSC |