|
1 /* |
|
2 * Copyright (C) 2008, 2009, 2010 Apple Inc. All rights reserved. |
|
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca> |
|
4 * |
|
5 * Redistribution and use in source and binary forms, with or without |
|
6 * modification, are permitted provided that the following conditions |
|
7 * are met: |
|
8 * |
|
9 * 1. Redistributions of source code must retain the above copyright |
|
10 * notice, this list of conditions and the following disclaimer. |
|
11 * 2. Redistributions in binary form must reproduce the above copyright |
|
12 * notice, this list of conditions and the following disclaimer in the |
|
13 * documentation and/or other materials provided with the distribution. |
|
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of |
|
15 * its contributors may be used to endorse or promote products derived |
|
16 * from this software without specific prior written permission. |
|
17 * |
|
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY |
|
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
|
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
|
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY |
|
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
|
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
|
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND |
|
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
|
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
|
28 */ |
|
29 |
|
30 #ifndef CodeBlock_h |
|
31 #define CodeBlock_h |
|
32 |
|
33 #include "EvalCodeCache.h" |
|
34 #include "Instruction.h" |
|
35 #include "JITCode.h" |
|
36 #include "JSGlobalObject.h" |
|
37 #include "JumpTable.h" |
|
38 #include "Nodes.h" |
|
39 #include "RegExp.h" |
|
40 #include "UString.h" |
|
41 #include <wtf/FastAllocBase.h> |
|
42 #include <wtf/PassOwnPtr.h> |
|
43 #include <wtf/RefPtr.h> |
|
44 #include <wtf/Vector.h> |
|
45 |
|
46 #if ENABLE(JIT) |
|
47 #include "StructureStubInfo.h" |
|
48 #endif |
|
49 |
|
50 // Register numbers used in bytecode operations have different meaning accoring to their ranges: |
|
51 // 0x80000000-0xFFFFFFFF Negative indicies from the CallFrame pointer are entries in the call frame, see RegisterFile.h. |
|
52 // 0x00000000-0x3FFFFFFF Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe. |
|
53 // 0x40000000-0x7FFFFFFF Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock. |
|
54 static const int FirstConstantRegisterIndex = 0x40000000; |
|
55 |
|
56 namespace JSC { |
|
57 |
|
58 enum HasSeenShouldRepatch { |
|
59 hasSeenShouldRepatch |
|
60 }; |
|
61 |
|
62 class ExecState; |
|
63 |
|
64 enum CodeType { GlobalCode, EvalCode, FunctionCode }; |
|
65 |
|
66 inline int unmodifiedArgumentsRegister(int argumentsRegister) { return argumentsRegister - 1; } |
|
67 |
|
68 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); } |
|
69 |
|
70 struct HandlerInfo { |
|
71 uint32_t start; |
|
72 uint32_t end; |
|
73 uint32_t target; |
|
74 uint32_t scopeDepth; |
|
75 #if ENABLE(JIT) |
|
76 CodeLocationLabel nativeCode; |
|
77 #endif |
|
78 }; |
|
79 |
|
80 struct ExpressionRangeInfo { |
|
81 enum { |
|
82 MaxOffset = (1 << 7) - 1, |
|
83 MaxDivot = (1 << 25) - 1 |
|
84 }; |
|
85 uint32_t instructionOffset : 25; |
|
86 uint32_t divotPoint : 25; |
|
87 uint32_t startOffset : 7; |
|
88 uint32_t endOffset : 7; |
|
89 }; |
|
90 |
|
91 struct LineInfo { |
|
92 uint32_t instructionOffset; |
|
93 int32_t lineNumber; |
|
94 }; |
|
95 |
|
96 // Both op_construct and op_instanceof require a use of op_get_by_id to get |
|
97 // the prototype property from an object. The exception messages for exceptions |
|
98 // thrown by these instances op_get_by_id need to reflect this. |
|
99 struct GetByIdExceptionInfo { |
|
100 unsigned bytecodeOffset : 31; |
|
101 bool isOpCreateThis : 1; |
|
102 }; |
|
103 |
|
104 #if ENABLE(JIT) |
|
105 struct CallLinkInfo { |
|
106 CallLinkInfo() |
|
107 : callee(0) |
|
108 , position(0) |
|
109 , hasSeenShouldRepatch(0) |
|
110 { |
|
111 } |
|
112 |
|
113 unsigned bytecodeOffset; |
|
114 CodeLocationNearCall callReturnLocation; |
|
115 CodeLocationDataLabelPtr hotPathBegin; |
|
116 CodeLocationNearCall hotPathOther; |
|
117 CodeBlock* ownerCodeBlock; |
|
118 CodeBlock* callee; |
|
119 unsigned position : 31; |
|
120 unsigned hasSeenShouldRepatch : 1; |
|
121 |
|
122 void setUnlinked() { callee = 0; } |
|
123 bool isLinked() { return callee; } |
|
124 |
|
125 bool seenOnce() |
|
126 { |
|
127 return hasSeenShouldRepatch; |
|
128 } |
|
129 |
|
130 void setSeen() |
|
131 { |
|
132 hasSeenShouldRepatch = true; |
|
133 } |
|
134 }; |
|
135 |
|
136 struct MethodCallLinkInfo { |
|
137 MethodCallLinkInfo() |
|
138 : cachedStructure(0) |
|
139 , cachedPrototypeStructure(0) |
|
140 { |
|
141 } |
|
142 |
|
143 bool seenOnce() |
|
144 { |
|
145 ASSERT(!cachedStructure); |
|
146 return cachedPrototypeStructure; |
|
147 } |
|
148 |
|
149 void setSeen() |
|
150 { |
|
151 ASSERT(!cachedStructure && !cachedPrototypeStructure); |
|
152 // We use the values of cachedStructure & cachedPrototypeStructure to indicate the |
|
153 // current state. |
|
154 // - In the initial state, both are null. |
|
155 // - Once this transition has been taken once, cachedStructure is |
|
156 // null and cachedPrototypeStructure is set to a nun-null value. |
|
157 // - Once the call is linked both structures are set to non-null values. |
|
158 cachedPrototypeStructure = (Structure*)1; |
|
159 } |
|
160 |
|
161 CodeLocationCall callReturnLocation; |
|
162 CodeLocationDataLabelPtr structureLabel; |
|
163 Structure* cachedStructure; |
|
164 Structure* cachedPrototypeStructure; |
|
165 }; |
|
166 |
|
167 struct FunctionRegisterInfo { |
|
168 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex) |
|
169 : bytecodeOffset(bytecodeOffset) |
|
170 , functionRegisterIndex(functionRegisterIndex) |
|
171 { |
|
172 } |
|
173 |
|
174 unsigned bytecodeOffset; |
|
175 int functionRegisterIndex; |
|
176 }; |
|
177 |
|
178 struct GlobalResolveInfo { |
|
179 GlobalResolveInfo(unsigned bytecodeOffset) |
|
180 : structure(0) |
|
181 , offset(0) |
|
182 , bytecodeOffset(bytecodeOffset) |
|
183 { |
|
184 } |
|
185 |
|
186 Structure* structure; |
|
187 unsigned offset; |
|
188 unsigned bytecodeOffset; |
|
189 }; |
|
190 |
|
191 // This structure is used to map from a call return location |
|
192 // (given as an offset in bytes into the JIT code) back to |
|
193 // the bytecode index of the corresponding bytecode operation. |
|
194 // This is then used to look up the corresponding handler. |
|
195 struct CallReturnOffsetToBytecodeOffset { |
|
196 CallReturnOffsetToBytecodeOffset(unsigned callReturnOffset, unsigned bytecodeOffset) |
|
197 : callReturnOffset(callReturnOffset) |
|
198 , bytecodeOffset(bytecodeOffset) |
|
199 { |
|
200 } |
|
201 |
|
202 unsigned callReturnOffset; |
|
203 unsigned bytecodeOffset; |
|
204 }; |
|
205 |
|
206 // valueAtPosition helpers for the binaryChop algorithm below. |
|
207 |
|
208 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo) |
|
209 { |
|
210 return structureStubInfo->callReturnLocation.executableAddress(); |
|
211 } |
|
212 |
|
213 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo) |
|
214 { |
|
215 return callLinkInfo->callReturnLocation.executableAddress(); |
|
216 } |
|
217 |
|
218 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo) |
|
219 { |
|
220 return methodCallLinkInfo->callReturnLocation.executableAddress(); |
|
221 } |
|
222 |
|
223 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeOffset* pc) |
|
224 { |
|
225 return pc->callReturnOffset; |
|
226 } |
|
227 |
|
228 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array, |
|
229 // compares result with key (KeyTypes should be comparable with '--', '<', '>'). |
|
230 // Optimized for cases where the array contains the key, checked by assertions. |
|
231 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)> |
|
232 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key) |
|
233 { |
|
234 // The array must contain at least one element (pre-condition, array does conatin key). |
|
235 // If the array only contains one element, no need to do the comparison. |
|
236 while (size > 1) { |
|
237 // Pick an element to check, half way through the array, and read the value. |
|
238 int pos = (size - 1) >> 1; |
|
239 KeyType val = valueAtPosition(&array[pos]); |
|
240 |
|
241 // If the key matches, success! |
|
242 if (val == key) |
|
243 return &array[pos]; |
|
244 // The item we are looking for is smaller than the item being check; reduce the value of 'size', |
|
245 // chopping off the right hand half of the array. |
|
246 else if (key < val) |
|
247 size = pos; |
|
248 // Discard all values in the left hand half of the array, up to and including the item at pos. |
|
249 else { |
|
250 size -= (pos + 1); |
|
251 array += (pos + 1); |
|
252 } |
|
253 |
|
254 // 'size' should never reach zero. |
|
255 ASSERT(size); |
|
256 } |
|
257 |
|
258 // If we reach this point we've chopped down to one element, no need to check it matches |
|
259 ASSERT(size == 1); |
|
260 ASSERT(key == valueAtPosition(&array[0])); |
|
261 return &array[0]; |
|
262 } |
|
263 #endif |
|
264 |
|
265 struct ExceptionInfo : FastAllocBase { |
|
266 Vector<ExpressionRangeInfo> m_expressionInfo; |
|
267 Vector<LineInfo> m_lineInfo; |
|
268 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo; |
|
269 |
|
270 #if ENABLE(JIT) |
|
271 Vector<CallReturnOffsetToBytecodeOffset> m_callReturnIndexVector; |
|
272 #endif |
|
273 }; |
|
274 |
|
275 class CodeBlock : public FastAllocBase { |
|
276 friend class JIT; |
|
277 protected: |
|
278 CodeBlock(ScriptExecutable* ownerExecutable, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset, SymbolTable* symbolTable, bool isConstructor); |
|
279 public: |
|
280 virtual ~CodeBlock(); |
|
281 |
|
282 void markAggregate(MarkStack&); |
|
283 void refStructures(Instruction* vPC) const; |
|
284 void derefStructures(Instruction* vPC) const; |
|
285 #if ENABLE(JIT_OPTIMIZE_CALL) |
|
286 void unlinkCallers(); |
|
287 #endif |
|
288 |
|
289 static void dumpStatistics(); |
|
290 |
|
291 #if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING |
|
292 void dump(ExecState*) const; |
|
293 void printStructures(const Instruction*) const; |
|
294 void printStructure(const char* name, const Instruction*, int operand) const; |
|
295 #endif |
|
296 |
|
297 inline bool isKnownNotImmediate(int index) |
|
298 { |
|
299 if (index == m_thisRegister) |
|
300 return true; |
|
301 |
|
302 if (isConstantRegisterIndex(index)) |
|
303 return getConstant(index).isCell(); |
|
304 |
|
305 return false; |
|
306 } |
|
307 |
|
308 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index) |
|
309 { |
|
310 return index >= m_numVars; |
|
311 } |
|
312 |
|
313 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset); |
|
314 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset); |
|
315 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset); |
|
316 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&); |
|
317 |
|
318 #if ENABLE(JIT) |
|
319 void addCaller(CallLinkInfo* caller) |
|
320 { |
|
321 caller->callee = this; |
|
322 caller->position = m_linkedCallerList.size(); |
|
323 m_linkedCallerList.append(caller); |
|
324 } |
|
325 |
|
326 void removeCaller(CallLinkInfo* caller) |
|
327 { |
|
328 unsigned pos = caller->position; |
|
329 unsigned lastPos = m_linkedCallerList.size() - 1; |
|
330 |
|
331 if (pos != lastPos) { |
|
332 m_linkedCallerList[pos] = m_linkedCallerList[lastPos]; |
|
333 m_linkedCallerList[pos]->position = pos; |
|
334 } |
|
335 m_linkedCallerList.shrink(lastPos); |
|
336 } |
|
337 |
|
338 StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress) |
|
339 { |
|
340 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value())); |
|
341 } |
|
342 |
|
343 CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress) |
|
344 { |
|
345 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value())); |
|
346 } |
|
347 |
|
348 MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress) |
|
349 { |
|
350 return *(binaryChop<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value())); |
|
351 } |
|
352 |
|
353 unsigned bytecodeOffset(CallFrame* callFrame, ReturnAddressPtr returnAddress) |
|
354 { |
|
355 if (!reparseForExceptionInfoIfNecessary(callFrame)) |
|
356 return 1; |
|
357 return binaryChop<CallReturnOffsetToBytecodeOffset, unsigned, getCallReturnOffset>(callReturnIndexVector().begin(), callReturnIndexVector().size(), getJITCode().offsetOf(returnAddress.value()))->bytecodeOffset; |
|
358 } |
|
359 |
|
360 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex); |
|
361 #endif |
|
362 #if ENABLE(INTERPRETER) |
|
363 unsigned bytecodeOffset(CallFrame*, Instruction* returnAddress) |
|
364 { |
|
365 return static_cast<Instruction*>(returnAddress) - instructions().begin(); |
|
366 } |
|
367 #endif |
|
368 |
|
369 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; } |
|
370 bool isNumericCompareFunction() { return m_isNumericCompareFunction; } |
|
371 |
|
372 Vector<Instruction>& instructions() { return m_instructions; } |
|
373 void discardBytecode() { m_instructions.clear(); } |
|
374 |
|
375 #ifndef NDEBUG |
|
376 unsigned instructionCount() { return m_instructionCount; } |
|
377 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; } |
|
378 #endif |
|
379 |
|
380 #if ENABLE(JIT) |
|
381 JITCode& getJITCode() { return m_isConstructor ? ownerExecutable()->generatedJITCodeForConstruct() : ownerExecutable()->generatedJITCodeForCall(); } |
|
382 ExecutablePool* executablePool() { return getJITCode().getExecutablePool(); } |
|
383 #endif |
|
384 |
|
385 ScriptExecutable* ownerExecutable() const { return m_ownerExecutable; } |
|
386 |
|
387 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; } |
|
388 |
|
389 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; } |
|
390 int thisRegister() const { return m_thisRegister; } |
|
391 |
|
392 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; } |
|
393 bool needsFullScopeChain() const { return m_needsFullScopeChain; } |
|
394 void setUsesEval(bool usesEval) { m_usesEval = usesEval; } |
|
395 bool usesEval() const { return m_usesEval; } |
|
396 |
|
397 void setArgumentsRegister(int argumentsRegister) |
|
398 { |
|
399 ASSERT(argumentsRegister != -1); |
|
400 m_argumentsRegister = argumentsRegister; |
|
401 ASSERT(usesArguments()); |
|
402 } |
|
403 int argumentsRegister() |
|
404 { |
|
405 ASSERT(usesArguments()); |
|
406 return m_argumentsRegister; |
|
407 } |
|
408 bool usesArguments() const { return m_argumentsRegister != -1; } |
|
409 |
|
410 CodeType codeType() const { return m_codeType; } |
|
411 |
|
412 SourceProvider* source() const { return m_source.get(); } |
|
413 unsigned sourceOffset() const { return m_sourceOffset; } |
|
414 |
|
415 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); } |
|
416 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); } |
|
417 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; } |
|
418 unsigned lastJumpTarget() const { return m_jumpTargets.last(); } |
|
419 |
|
420 #if ENABLE(INTERPRETER) |
|
421 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); } |
|
422 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); } |
|
423 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset); |
|
424 #endif |
|
425 #if ENABLE(JIT) |
|
426 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); } |
|
427 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); } |
|
428 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; } |
|
429 |
|
430 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); } |
|
431 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; } |
|
432 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset); |
|
433 |
|
434 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); } |
|
435 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); } |
|
436 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; } |
|
437 |
|
438 void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); } |
|
439 MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; } |
|
440 |
|
441 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); } |
|
442 #endif |
|
443 |
|
444 // Exception handling support |
|
445 |
|
446 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; } |
|
447 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); } |
|
448 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; } |
|
449 |
|
450 bool hasExceptionInfo() const { return m_exceptionInfo; } |
|
451 void clearExceptionInfo() { m_exceptionInfo.clear(); } |
|
452 PassOwnPtr<ExceptionInfo> extractExceptionInfo(); |
|
453 |
|
454 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); } |
|
455 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); } |
|
456 |
|
457 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); } |
|
458 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); } |
|
459 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); } |
|
460 |
|
461 #if ENABLE(JIT) |
|
462 Vector<CallReturnOffsetToBytecodeOffset>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; } |
|
463 #endif |
|
464 |
|
465 // Constant Pool |
|
466 |
|
467 size_t numberOfIdentifiers() const { return m_identifiers.size(); } |
|
468 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); } |
|
469 Identifier& identifier(int index) { return m_identifiers[index]; } |
|
470 |
|
471 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); } |
|
472 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); } |
|
473 Register& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; } |
|
474 ALWAYS_INLINE bool isConstantRegisterIndex(int index) const { return index >= FirstConstantRegisterIndex; } |
|
475 ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].jsValue(); } |
|
476 |
|
477 unsigned addFunctionDecl(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionDecls.size(); m_functionDecls.append(n); return size; } |
|
478 FunctionExecutable* functionDecl(int index) { return m_functionDecls[index].get(); } |
|
479 int numberOfFunctionDecls() { return m_functionDecls.size(); } |
|
480 unsigned addFunctionExpr(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionExprs.size(); m_functionExprs.append(n); return size; } |
|
481 FunctionExecutable* functionExpr(int index) { return m_functionExprs[index].get(); } |
|
482 |
|
483 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; } |
|
484 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); } |
|
485 |
|
486 |
|
487 // Jump Tables |
|
488 |
|
489 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; } |
|
490 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); } |
|
491 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; } |
|
492 |
|
493 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; } |
|
494 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); } |
|
495 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; } |
|
496 |
|
497 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; } |
|
498 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); } |
|
499 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; } |
|
500 |
|
501 |
|
502 SymbolTable* symbolTable() { return m_symbolTable; } |
|
503 SharedSymbolTable* sharedSymbolTable() { ASSERT(m_codeType == FunctionCode); return static_cast<SharedSymbolTable*>(m_symbolTable); } |
|
504 |
|
505 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; } |
|
506 |
|
507 void shrinkToFit(); |
|
508 |
|
509 // FIXME: Make these remaining members private. |
|
510 |
|
511 int m_numCalleeRegisters; |
|
512 int m_numVars; |
|
513 int m_numParameters; |
|
514 bool m_isConstructor; |
|
515 |
|
516 private: |
|
517 #if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING) |
|
518 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const; |
|
519 |
|
520 CString registerName(ExecState*, int r) const; |
|
521 void printUnaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const; |
|
522 void printBinaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const; |
|
523 void printConditionalJump(ExecState*, const Vector<Instruction>::const_iterator&, Vector<Instruction>::const_iterator&, int location, const char* op) const; |
|
524 void printGetByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const; |
|
525 void printPutByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const; |
|
526 #endif |
|
527 |
|
528 bool reparseForExceptionInfoIfNecessary(CallFrame*) WARN_UNUSED_RETURN; |
|
529 |
|
530 void createRareDataIfNecessary() |
|
531 { |
|
532 if (!m_rareData) |
|
533 m_rareData = adoptPtr(new RareData); |
|
534 } |
|
535 |
|
536 ScriptExecutable* m_ownerExecutable; |
|
537 JSGlobalData* m_globalData; |
|
538 |
|
539 Vector<Instruction> m_instructions; |
|
540 #ifndef NDEBUG |
|
541 unsigned m_instructionCount; |
|
542 #endif |
|
543 |
|
544 int m_thisRegister; |
|
545 int m_argumentsRegister; |
|
546 |
|
547 bool m_needsFullScopeChain; |
|
548 bool m_usesEval; |
|
549 bool m_isNumericCompareFunction; |
|
550 |
|
551 CodeType m_codeType; |
|
552 |
|
553 RefPtr<SourceProvider> m_source; |
|
554 unsigned m_sourceOffset; |
|
555 |
|
556 #if ENABLE(INTERPRETER) |
|
557 Vector<unsigned> m_propertyAccessInstructions; |
|
558 Vector<unsigned> m_globalResolveInstructions; |
|
559 #endif |
|
560 #if ENABLE(JIT) |
|
561 Vector<StructureStubInfo> m_structureStubInfos; |
|
562 Vector<GlobalResolveInfo> m_globalResolveInfos; |
|
563 Vector<CallLinkInfo> m_callLinkInfos; |
|
564 Vector<MethodCallLinkInfo> m_methodCallLinkInfos; |
|
565 Vector<CallLinkInfo*> m_linkedCallerList; |
|
566 #endif |
|
567 |
|
568 Vector<unsigned> m_jumpTargets; |
|
569 |
|
570 // Constant Pool |
|
571 Vector<Identifier> m_identifiers; |
|
572 Vector<Register> m_constantRegisters; |
|
573 Vector<RefPtr<FunctionExecutable> > m_functionDecls; |
|
574 Vector<RefPtr<FunctionExecutable> > m_functionExprs; |
|
575 |
|
576 SymbolTable* m_symbolTable; |
|
577 |
|
578 OwnPtr<ExceptionInfo> m_exceptionInfo; |
|
579 |
|
580 struct RareData : FastAllocBase { |
|
581 Vector<HandlerInfo> m_exceptionHandlers; |
|
582 |
|
583 // Rare Constants |
|
584 Vector<RefPtr<RegExp> > m_regexps; |
|
585 |
|
586 // Jump Tables |
|
587 Vector<SimpleJumpTable> m_immediateSwitchJumpTables; |
|
588 Vector<SimpleJumpTable> m_characterSwitchJumpTables; |
|
589 Vector<StringJumpTable> m_stringSwitchJumpTables; |
|
590 |
|
591 EvalCodeCache m_evalCodeCache; |
|
592 |
|
593 #if ENABLE(JIT) |
|
594 Vector<FunctionRegisterInfo> m_functionRegisterInfos; |
|
595 #endif |
|
596 }; |
|
597 OwnPtr<RareData> m_rareData; |
|
598 }; |
|
599 |
|
600 // Program code is not marked by any function, so we make the global object |
|
601 // responsible for marking it. |
|
602 |
|
603 class GlobalCodeBlock : public CodeBlock { |
|
604 public: |
|
605 GlobalCodeBlock(ScriptExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, JSGlobalObject* globalObject) |
|
606 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, &m_unsharedSymbolTable, false) |
|
607 , m_globalObject(globalObject) |
|
608 { |
|
609 m_globalObject->codeBlocks().add(this); |
|
610 } |
|
611 |
|
612 ~GlobalCodeBlock() |
|
613 { |
|
614 if (m_globalObject) |
|
615 m_globalObject->codeBlocks().remove(this); |
|
616 } |
|
617 |
|
618 void clearGlobalObject() { m_globalObject = 0; } |
|
619 |
|
620 private: |
|
621 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool. |
|
622 SymbolTable m_unsharedSymbolTable; |
|
623 }; |
|
624 |
|
625 class ProgramCodeBlock : public GlobalCodeBlock { |
|
626 public: |
|
627 ProgramCodeBlock(ProgramExecutable* ownerExecutable, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider) |
|
628 : GlobalCodeBlock(ownerExecutable, codeType, sourceProvider, 0, globalObject) |
|
629 { |
|
630 } |
|
631 }; |
|
632 |
|
633 class EvalCodeBlock : public GlobalCodeBlock { |
|
634 public: |
|
635 EvalCodeBlock(EvalExecutable* ownerExecutable, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth) |
|
636 : GlobalCodeBlock(ownerExecutable, EvalCode, sourceProvider, 0, globalObject) |
|
637 , m_baseScopeDepth(baseScopeDepth) |
|
638 { |
|
639 } |
|
640 |
|
641 int baseScopeDepth() const { return m_baseScopeDepth; } |
|
642 |
|
643 const Identifier& variable(unsigned index) { return m_variables[index]; } |
|
644 unsigned numVariables() { return m_variables.size(); } |
|
645 void adoptVariables(Vector<Identifier>& variables) |
|
646 { |
|
647 ASSERT(m_variables.isEmpty()); |
|
648 m_variables.swap(variables); |
|
649 } |
|
650 |
|
651 private: |
|
652 int m_baseScopeDepth; |
|
653 Vector<Identifier> m_variables; |
|
654 }; |
|
655 |
|
656 class FunctionCodeBlock : public CodeBlock { |
|
657 public: |
|
658 // Rather than using the usual RefCounted::create idiom for SharedSymbolTable we just use new |
|
659 // as we need to initialise the CodeBlock before we could initialise any RefPtr to hold the shared |
|
660 // symbol table, so we just pass as a raw pointer with a ref count of 1. We then manually deref |
|
661 // in the destructor. |
|
662 FunctionCodeBlock(FunctionExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, bool isConstructor) |
|
663 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, SharedSymbolTable::create().releaseRef(), isConstructor) |
|
664 { |
|
665 } |
|
666 ~FunctionCodeBlock() |
|
667 { |
|
668 sharedSymbolTable()->deref(); |
|
669 } |
|
670 }; |
|
671 |
|
672 inline PassOwnPtr<ExceptionInfo> CodeBlock::extractExceptionInfo() |
|
673 { |
|
674 ASSERT(m_exceptionInfo); |
|
675 return m_exceptionInfo.release(); |
|
676 } |
|
677 |
|
678 inline Register& ExecState::r(int index) |
|
679 { |
|
680 CodeBlock* codeBlock = this->codeBlock(); |
|
681 if (codeBlock->isConstantRegisterIndex(index)) |
|
682 return codeBlock->constantRegister(index); |
|
683 return this[index]; |
|
684 } |
|
685 |
|
686 } // namespace JSC |
|
687 |
|
688 #endif // CodeBlock_h |