JavaScriptCore/jit/JITInlineMethods.h
changeset 0 4f2f89ce4247
equal deleted inserted replaced
-1:000000000000 0:4f2f89ce4247
       
     1 /*
       
     2  * Copyright (C) 2008 Apple Inc. All rights reserved.
       
     3  *
       
     4  * Redistribution and use in source and binary forms, with or without
       
     5  * modification, are permitted provided that the following conditions
       
     6  * are met:
       
     7  * 1. Redistributions of source code must retain the above copyright
       
     8  *    notice, this list of conditions and the following disclaimer.
       
     9  * 2. Redistributions in binary form must reproduce the above copyright
       
    10  *    notice, this list of conditions and the following disclaimer in the
       
    11  *    documentation and/or other materials provided with the distribution.
       
    12  *
       
    13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
       
    14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
       
    15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
       
    16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
       
    17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
       
    18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
       
    19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
       
    20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
       
    21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
       
    22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
       
    23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
       
    24  */
       
    25 
       
    26 #ifndef JITInlineMethods_h
       
    27 #define JITInlineMethods_h
       
    28 
       
    29 
       
    30 #if ENABLE(JIT)
       
    31 
       
    32 namespace JSC {
       
    33 
       
    34 /* Deprecated: Please use JITStubCall instead. */
       
    35 
       
    36 ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
       
    37 {
       
    38     unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
       
    39     peek(dst, argumentStackOffset);
       
    40 }
       
    41 
       
    42 ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
       
    43 {
       
    44     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
       
    45 }
       
    46 
       
    47 ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
       
    48 {
       
    49     ASSERT(m_codeBlock->isConstantRegisterIndex(src));
       
    50     return m_codeBlock->getConstant(src);
       
    51 }
       
    52 
       
    53 ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
       
    54 {
       
    55     storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
       
    56 }
       
    57 
       
    58 ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
       
    59 {
       
    60     storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
       
    61 }
       
    62 
       
    63 ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
       
    64 {
       
    65     loadPtr(Address(from, entry * sizeof(Register)), to);
       
    66 #if !USE(JSVALUE32_64)
       
    67     killLastResultRegister();
       
    68 #endif
       
    69 }
       
    70 
       
    71 ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
       
    72 {
       
    73     failures.append(branchPtr(NotEqual, Address(src), ImmPtr(m_globalData->jsStringVPtr)));
       
    74     failures.append(branchTest32(NonZero, Address(src, OBJECT_OFFSETOF(JSString, m_fiberCount))));
       
    75     failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), Imm32(1)));
       
    76     loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
       
    77     loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplDataOffset()), dst);
       
    78     load16(MacroAssembler::Address(dst, 0), dst);
       
    79 }
       
    80 
       
    81 ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
       
    82 {
       
    83     load32(Address(from, entry * sizeof(Register)), to);
       
    84 #if !USE(JSVALUE32_64)
       
    85     killLastResultRegister();
       
    86 #endif
       
    87 }
       
    88 
       
    89 ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
       
    90 {
       
    91     ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
       
    92 
       
    93     Call nakedCall = nearCall();
       
    94     m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
       
    95     return nakedCall;
       
    96 }
       
    97 
       
    98 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
       
    99 
       
   100 ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
       
   101 {
       
   102 #if CPU(ARM_TRADITIONAL)
       
   103 #ifndef NDEBUG
       
   104     // Ensure the label after the sequence can also fit
       
   105     insnSpace += sizeof(ARMWord);
       
   106     constSpace += sizeof(uint64_t);
       
   107 #endif
       
   108 
       
   109     ensureSpace(insnSpace, constSpace);
       
   110 
       
   111 #endif
       
   112 
       
   113 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
       
   114 #ifndef NDEBUG
       
   115     m_uninterruptedInstructionSequenceBegin = label();
       
   116     m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
       
   117 #endif
       
   118 #endif
       
   119 }
       
   120 
       
   121 ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace)
       
   122 {
       
   123 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
       
   124     ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) == insnSpace);
       
   125     ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin == constSpace);
       
   126 #endif
       
   127 }
       
   128 
       
   129 #endif
       
   130 
       
   131 #if CPU(ARM)
       
   132 
       
   133 ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
       
   134 {
       
   135     move(linkRegister, reg);
       
   136 }
       
   137 
       
   138 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
       
   139 {
       
   140     move(reg, linkRegister);
       
   141 }
       
   142 
       
   143 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
       
   144 {
       
   145     loadPtr(address, linkRegister);
       
   146 }
       
   147 
       
   148 #elif CPU(MIPS)
       
   149 
       
   150 ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
       
   151 {
       
   152     move(returnAddressRegister, reg);
       
   153 }
       
   154 
       
   155 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
       
   156 {
       
   157     move(reg, returnAddressRegister);
       
   158 }
       
   159 
       
   160 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
       
   161 {
       
   162     loadPtr(address, returnAddressRegister);
       
   163 }
       
   164 
       
   165 #else // CPU(X86) || CPU(X86_64)
       
   166 
       
   167 ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
       
   168 {
       
   169     pop(reg);
       
   170 }
       
   171 
       
   172 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
       
   173 {
       
   174     push(reg);
       
   175 }
       
   176 
       
   177 ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
       
   178 {
       
   179     push(address);
       
   180 }
       
   181 
       
   182 #endif
       
   183 
       
   184 ALWAYS_INLINE void JIT::restoreArgumentReference()
       
   185 {
       
   186     move(stackPointerRegister, firstArgumentRegister);
       
   187     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
       
   188 }
       
   189 
       
   190 ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
       
   191 {
       
   192 #if CPU(X86)
       
   193     // Within a trampoline the return address will be on the stack at this point.
       
   194     addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
       
   195 #elif CPU(ARM)
       
   196     move(stackPointerRegister, firstArgumentRegister);
       
   197 #endif
       
   198     // In the trampoline on x86-64, the first argument register is not overwritten.
       
   199 }
       
   200 
       
   201 ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
       
   202 {
       
   203     return branchPtr(NotEqual, Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(structure));
       
   204 }
       
   205 
       
   206 ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
       
   207 {
       
   208     if (!m_codeBlock->isKnownNotImmediate(vReg))
       
   209         linkSlowCase(iter);
       
   210 }
       
   211 
       
   212 ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
       
   213 {
       
   214     ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
       
   215 
       
   216     m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
       
   217 }
       
   218 
       
   219 ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
       
   220 {
       
   221     ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
       
   222 
       
   223     const JumpList::JumpVector& jumpVector = jumpList.jumps();
       
   224     size_t size = jumpVector.size();
       
   225     for (size_t i = 0; i < size; ++i)
       
   226         m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
       
   227 }
       
   228 
       
   229 ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
       
   230 {
       
   231     ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
       
   232 
       
   233     m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
       
   234 }
       
   235 
       
   236 ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
       
   237 {
       
   238     ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
       
   239 
       
   240     jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
       
   241 }
       
   242 
       
   243 #if ENABLE(SAMPLING_FLAGS)
       
   244 ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
       
   245 {
       
   246     ASSERT(flag >= 1);
       
   247     ASSERT(flag <= 32);
       
   248     or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
       
   249 }
       
   250 
       
   251 ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
       
   252 {
       
   253     ASSERT(flag >= 1);
       
   254     ASSERT(flag <= 32);
       
   255     and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
       
   256 }
       
   257 #endif
       
   258 
       
   259 #if ENABLE(SAMPLING_COUNTERS)
       
   260 ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
       
   261 {
       
   262 #if CPU(X86_64) // Or any other 64-bit plattform.
       
   263     addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
       
   264 #elif CPU(X86) // Or any other little-endian 32-bit plattform.
       
   265     intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
       
   266     add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
       
   267     addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
       
   268 #else
       
   269 #error "SAMPLING_FLAGS not implemented on this platform."
       
   270 #endif
       
   271 }
       
   272 #endif
       
   273 
       
   274 #if ENABLE(OPCODE_SAMPLING)
       
   275 #if CPU(X86_64)
       
   276 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
       
   277 {
       
   278     move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
       
   279     storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
       
   280 }
       
   281 #else
       
   282 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
       
   283 {
       
   284     storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
       
   285 }
       
   286 #endif
       
   287 #endif
       
   288 
       
   289 #if ENABLE(CODEBLOCK_SAMPLING)
       
   290 #if CPU(X86_64)
       
   291 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
       
   292 {
       
   293     move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
       
   294     storePtr(ImmPtr(codeBlock), X86Registers::ecx);
       
   295 }
       
   296 #else
       
   297 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
       
   298 {
       
   299     storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
       
   300 }
       
   301 #endif
       
   302 #endif
       
   303 
       
   304 ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
       
   305 {
       
   306     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
       
   307 }
       
   308 
       
   309 #if USE(JSVALUE32_64)
       
   310 
       
   311 inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
       
   312 {
       
   313     RegisterID mappedTag;
       
   314     if (getMappedTag(index, mappedTag)) {
       
   315         move(mappedTag, tag);
       
   316         unmap(tag);
       
   317         return;
       
   318     }
       
   319 
       
   320     if (m_codeBlock->isConstantRegisterIndex(index)) {
       
   321         move(Imm32(getConstantOperand(index).tag()), tag);
       
   322         unmap(tag);
       
   323         return;
       
   324     }
       
   325 
       
   326     load32(tagFor(index), tag);
       
   327     unmap(tag);
       
   328 }
       
   329 
       
   330 inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
       
   331 {
       
   332     RegisterID mappedPayload;
       
   333     if (getMappedPayload(index, mappedPayload)) {
       
   334         move(mappedPayload, payload);
       
   335         unmap(payload);
       
   336         return;
       
   337     }
       
   338 
       
   339     if (m_codeBlock->isConstantRegisterIndex(index)) {
       
   340         move(Imm32(getConstantOperand(index).payload()), payload);
       
   341         unmap(payload);
       
   342         return;
       
   343     }
       
   344 
       
   345     load32(payloadFor(index), payload);
       
   346     unmap(payload);
       
   347 }
       
   348 
       
   349 inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
       
   350 {
       
   351     move(Imm32(v.payload()), payload);
       
   352     move(Imm32(v.tag()), tag);
       
   353 }
       
   354 
       
   355 inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
       
   356 {
       
   357     ASSERT(tag != payload);
       
   358 
       
   359     if (base == callFrameRegister) {
       
   360         ASSERT(payload != base);
       
   361         emitLoadPayload(index, payload);
       
   362         emitLoadTag(index, tag);
       
   363         return;
       
   364     }
       
   365 
       
   366     if (payload == base) { // avoid stomping base
       
   367         load32(tagFor(index, base), tag);
       
   368         load32(payloadFor(index, base), payload);
       
   369         return;
       
   370     }
       
   371 
       
   372     load32(payloadFor(index, base), payload);
       
   373     load32(tagFor(index, base), tag);
       
   374 }
       
   375 
       
   376 inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
       
   377 {
       
   378     if (isMapped(index1)) {
       
   379         emitLoad(index1, tag1, payload1);
       
   380         emitLoad(index2, tag2, payload2);
       
   381         return;
       
   382     }
       
   383     emitLoad(index2, tag2, payload2);
       
   384     emitLoad(index1, tag1, payload1);
       
   385 }
       
   386 
       
   387 inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
       
   388 {
       
   389     if (m_codeBlock->isConstantRegisterIndex(index)) {
       
   390         Register& inConstantPool = m_codeBlock->constantRegister(index);
       
   391         loadDouble(&inConstantPool, value);
       
   392     } else
       
   393         loadDouble(addressFor(index), value);
       
   394 }
       
   395 
       
   396 inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
       
   397 {
       
   398     if (m_codeBlock->isConstantRegisterIndex(index)) {
       
   399         Register& inConstantPool = m_codeBlock->constantRegister(index);
       
   400         char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
       
   401         convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
       
   402     } else
       
   403         convertInt32ToDouble(payloadFor(index), value);
       
   404 }
       
   405 
       
   406 inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
       
   407 {
       
   408     store32(payload, payloadFor(index, base));
       
   409     store32(tag, tagFor(index, base));
       
   410 }
       
   411 
       
   412 inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
       
   413 {
       
   414     store32(payload, payloadFor(index, callFrameRegister));
       
   415     if (!indexIsInt32)
       
   416         store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
       
   417 }
       
   418 
       
   419 inline void JIT::emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32)
       
   420 {
       
   421     store32(payload, payloadFor(index, callFrameRegister));
       
   422     if (!indexIsInt32)
       
   423         store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
       
   424 }
       
   425 
       
   426 inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
       
   427 {
       
   428     store32(payload, payloadFor(index, callFrameRegister));
       
   429     if (!indexIsCell)
       
   430         store32(Imm32(JSValue::CellTag), tagFor(index, callFrameRegister));
       
   431 }
       
   432 
       
   433 inline void JIT::emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool)
       
   434 {
       
   435     if (!indexIsBool)
       
   436         store32(Imm32(0), payloadFor(index, callFrameRegister));
       
   437     store32(tag, tagFor(index, callFrameRegister));
       
   438 }
       
   439 
       
   440 inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
       
   441 {
       
   442     storeDouble(value, addressFor(index));
       
   443 }
       
   444 
       
   445 inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
       
   446 {
       
   447     store32(Imm32(constant.payload()), payloadFor(index, base));
       
   448     store32(Imm32(constant.tag()), tagFor(index, base));
       
   449 }
       
   450 
       
   451 ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
       
   452 {
       
   453     emitStore(dst, jsUndefined());
       
   454 }
       
   455 
       
   456 inline bool JIT::isLabeled(unsigned bytecodeOffset)
       
   457 {
       
   458     for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
       
   459         unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
       
   460         if (jumpTarget == bytecodeOffset)
       
   461             return true;
       
   462         if (jumpTarget > bytecodeOffset)
       
   463             return false;
       
   464     }
       
   465     return false;
       
   466 }
       
   467 
       
   468 inline void JIT::map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
       
   469 {
       
   470     if (isLabeled(bytecodeOffset))
       
   471         return;
       
   472 
       
   473     m_mappedBytecodeOffset = bytecodeOffset;
       
   474     m_mappedVirtualRegisterIndex = virtualRegisterIndex;
       
   475     m_mappedTag = tag;
       
   476     m_mappedPayload = payload;
       
   477 }
       
   478 
       
   479 inline void JIT::unmap(RegisterID registerID)
       
   480 {
       
   481     if (m_mappedTag == registerID)
       
   482         m_mappedTag = (RegisterID)-1;
       
   483     else if (m_mappedPayload == registerID)
       
   484         m_mappedPayload = (RegisterID)-1;
       
   485 }
       
   486 
       
   487 inline void JIT::unmap()
       
   488 {
       
   489     m_mappedBytecodeOffset = (unsigned)-1;
       
   490     m_mappedVirtualRegisterIndex = (unsigned)-1;
       
   491     m_mappedTag = (RegisterID)-1;
       
   492     m_mappedPayload = (RegisterID)-1;
       
   493 }
       
   494 
       
   495 inline bool JIT::isMapped(unsigned virtualRegisterIndex)
       
   496 {
       
   497     if (m_mappedBytecodeOffset != m_bytecodeOffset)
       
   498         return false;
       
   499     if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
       
   500         return false;
       
   501     return true;
       
   502 }
       
   503 
       
   504 inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
       
   505 {
       
   506     if (m_mappedBytecodeOffset != m_bytecodeOffset)
       
   507         return false;
       
   508     if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
       
   509         return false;
       
   510     if (m_mappedPayload == (RegisterID)-1)
       
   511         return false;
       
   512     payload = m_mappedPayload;
       
   513     return true;
       
   514 }
       
   515 
       
   516 inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
       
   517 {
       
   518     if (m_mappedBytecodeOffset != m_bytecodeOffset)
       
   519         return false;
       
   520     if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
       
   521         return false;
       
   522     if (m_mappedTag == (RegisterID)-1)
       
   523         return false;
       
   524     tag = m_mappedTag;
       
   525     return true;
       
   526 }
       
   527 
       
   528 inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
       
   529 {
       
   530     if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
       
   531         if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
       
   532             addSlowCase(jump());
       
   533         else
       
   534             addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
       
   535     }
       
   536 }
       
   537 
       
   538 inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
       
   539 {
       
   540     if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
       
   541         if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
       
   542             addSlowCase(jump());
       
   543         else
       
   544             addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
       
   545     }
       
   546 }
       
   547 
       
   548 inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
       
   549 {
       
   550     if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
       
   551         linkSlowCase(iter);
       
   552 }
       
   553 
       
   554 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
       
   555 {
       
   556     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
       
   557 }
       
   558 
       
   559 ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
       
   560 {
       
   561     if (isOperandConstantImmediateInt(op1)) {
       
   562         constant = getConstantOperand(op1).asInt32();
       
   563         op = op2;
       
   564         return true;
       
   565     }
       
   566 
       
   567     if (isOperandConstantImmediateInt(op2)) {
       
   568         constant = getConstantOperand(op2).asInt32();
       
   569         op = op1;
       
   570         return true;
       
   571     }
       
   572     
       
   573     return false;
       
   574 }
       
   575 
       
   576 #else // USE(JSVALUE32_64)
       
   577 
       
   578 ALWAYS_INLINE void JIT::killLastResultRegister()
       
   579 {
       
   580     m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
       
   581 }
       
   582 
       
   583 // get arg puts an arg from the SF register array into a h/w register
       
   584 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
       
   585 {
       
   586     ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
       
   587 
       
   588     // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
       
   589     if (m_codeBlock->isConstantRegisterIndex(src)) {
       
   590         JSValue value = m_codeBlock->getConstant(src);
       
   591         move(ImmPtr(JSValue::encode(value)), dst);
       
   592         killLastResultRegister();
       
   593         return;
       
   594     }
       
   595 
       
   596     if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src)) {
       
   597         bool atJumpTarget = false;
       
   598         while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeOffset) {
       
   599             if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeOffset)
       
   600                 atJumpTarget = true;
       
   601             ++m_jumpTargetsPosition;
       
   602         }
       
   603 
       
   604         if (!atJumpTarget) {
       
   605             // The argument we want is already stored in eax
       
   606             if (dst != cachedResultRegister)
       
   607                 move(cachedResultRegister, dst);
       
   608             killLastResultRegister();
       
   609             return;
       
   610         }
       
   611     }
       
   612 
       
   613     loadPtr(Address(callFrameRegister, src * sizeof(Register)), dst);
       
   614     killLastResultRegister();
       
   615 }
       
   616 
       
   617 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
       
   618 {
       
   619     if (src2 == m_lastResultBytecodeRegister) {
       
   620         emitGetVirtualRegister(src2, dst2);
       
   621         emitGetVirtualRegister(src1, dst1);
       
   622     } else {
       
   623         emitGetVirtualRegister(src1, dst1);
       
   624         emitGetVirtualRegister(src2, dst2);
       
   625     }
       
   626 }
       
   627 
       
   628 ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
       
   629 {
       
   630     return getConstantOperand(src).asInt32();
       
   631 }
       
   632 
       
   633 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
       
   634 {
       
   635     return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
       
   636 }
       
   637 
       
   638 ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
       
   639 {
       
   640     storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
       
   641     m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
       
   642 }
       
   643 
       
   644 ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
       
   645 {
       
   646     storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
       
   647 }
       
   648 
       
   649 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
       
   650 {
       
   651 #if USE(JSVALUE64)
       
   652     return branchTestPtr(Zero, reg, tagMaskRegister);
       
   653 #else
       
   654     return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
       
   655 #endif
       
   656 }
       
   657 
       
   658 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
       
   659 {
       
   660     move(reg1, scratch);
       
   661     orPtr(reg2, scratch);
       
   662     return emitJumpIfJSCell(scratch);
       
   663 }
       
   664 
       
   665 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
       
   666 {
       
   667     addSlowCase(emitJumpIfJSCell(reg));
       
   668 }
       
   669 
       
   670 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
       
   671 {
       
   672 #if USE(JSVALUE64)
       
   673     return branchTestPtr(NonZero, reg, tagMaskRegister);
       
   674 #else
       
   675     return branchTest32(NonZero, reg, Imm32(JSImmediate::TagMask));
       
   676 #endif
       
   677 }
       
   678 
       
   679 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
       
   680 {
       
   681     addSlowCase(emitJumpIfNotJSCell(reg));
       
   682 }
       
   683 
       
   684 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
       
   685 {
       
   686     if (!m_codeBlock->isKnownNotImmediate(vReg))
       
   687         emitJumpSlowCaseIfNotJSCell(reg);
       
   688 }
       
   689 
       
   690 #if USE(JSVALUE64)
       
   691 
       
   692 inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
       
   693 {
       
   694     if (m_codeBlock->isConstantRegisterIndex(index)) {
       
   695         Register& inConstantPool = m_codeBlock->constantRegister(index);
       
   696         loadDouble(&inConstantPool, value);
       
   697     } else
       
   698         loadDouble(addressFor(index), value);
       
   699 }
       
   700 
       
   701 inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
       
   702 {
       
   703     if (m_codeBlock->isConstantRegisterIndex(index)) {
       
   704         Register& inConstantPool = m_codeBlock->constantRegister(index);
       
   705         convertInt32ToDouble(AbsoluteAddress(&inConstantPool), value);
       
   706     } else
       
   707         convertInt32ToDouble(addressFor(index), value);
       
   708 }
       
   709 #endif
       
   710 
       
   711 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
       
   712 {
       
   713 #if USE(JSVALUE64)
       
   714     return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
       
   715 #else
       
   716     return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
       
   717 #endif
       
   718 }
       
   719 
       
   720 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
       
   721 {
       
   722 #if USE(JSVALUE64)
       
   723     return branchPtr(Below, reg, tagTypeNumberRegister);
       
   724 #else
       
   725     return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
       
   726 #endif
       
   727 }
       
   728 
       
   729 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
       
   730 {
       
   731     move(reg1, scratch);
       
   732     andPtr(reg2, scratch);
       
   733     return emitJumpIfNotImmediateInteger(scratch);
       
   734 }
       
   735 
       
   736 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
       
   737 {
       
   738     addSlowCase(emitJumpIfNotImmediateInteger(reg));
       
   739 }
       
   740 
       
   741 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
       
   742 {
       
   743     addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
       
   744 }
       
   745 
       
   746 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
       
   747 {
       
   748     addSlowCase(emitJumpIfNotImmediateNumber(reg));
       
   749 }
       
   750 
       
   751 #if !USE(JSVALUE64)
       
   752 ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
       
   753 {
       
   754     subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
       
   755 }
       
   756 
       
   757 ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
       
   758 {
       
   759     return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
       
   760 }
       
   761 #endif
       
   762 
       
   763 ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
       
   764 {
       
   765 #if USE(JSVALUE64)
       
   766     emitFastArithIntToImmNoCheck(src, dest);
       
   767 #else
       
   768     if (src != dest)
       
   769         move(src, dest);
       
   770     addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
       
   771 #endif
       
   772 }
       
   773 
       
   774 // operand is int32_t, must have been zero-extended if register is 64-bit.
       
   775 ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
       
   776 {
       
   777 #if USE(JSVALUE64)
       
   778     if (src != dest)
       
   779         move(src, dest);
       
   780     orPtr(tagTypeNumberRegister, dest);
       
   781 #else
       
   782     signExtend32ToPtr(src, dest);
       
   783     addPtr(dest, dest);
       
   784     emitFastArithReTagImmediate(dest, dest);
       
   785 #endif
       
   786 }
       
   787 
       
   788 ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
       
   789 {
       
   790     lshift32(Imm32(JSImmediate::ExtendedPayloadShift), reg);
       
   791     or32(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), reg);
       
   792 }
       
   793 
       
   794 #endif // USE(JSVALUE32_64)
       
   795 
       
   796 } // namespace JSC
       
   797 
       
   798 #endif // ENABLE(JIT)
       
   799 
       
   800 #endif