CallLinkInfo inside StructureStubInfo should not use polymorphic stubs
[WebKit-https.git] / Source / JavaScriptCore / jit / JITCall.cpp
index 140e16c..1af51ad 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008, 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2008, 2013-2015 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -29,7 +29,6 @@
 #if USE(JSVALUE64)
 #include "JIT.h"
 
-#include "Arguments.h"
 #include "CodeBlock.h"
 #include "JITInlines.h"
 #include "JSArray.h"
@@ -37,9 +36,9 @@
 #include "Interpreter.h"
 #include "JSCInlines.h"
 #include "LinkBuffer.h"
-#include "RepatchBuffer.h"
 #include "ResultType.h"
 #include "SamplingTool.h"
+#include "SetupVarargsFrame.h"
 #include "StackAlignment.h"
 #include "ThunkGenerators.h"
 #include <wtf/StringPrintStream.h>
@@ -54,100 +53,51 @@ void JIT::emitPutCallResult(Instruction* instruction)
     emitPutVirtualRegister(dst);
 }
 
-void JIT::compileLoadVarargs(Instruction* instruction)
+void JIT::compileSetupVarargsFrame(Instruction* instruction, CallLinkInfo* info)
 {
     int thisValue = instruction[3].u.operand;
     int arguments = instruction[4].u.operand;
     int firstFreeRegister = instruction[5].u.operand;
     int firstVarArgOffset = instruction[6].u.operand;
 
-    JumpList slowCase;
-    JumpList end;
-    bool canOptimize = m_codeBlock->usesArguments()
-        && arguments == m_codeBlock->argumentsRegister().offset()
-        && !m_codeBlock->symbolTable()->slowArguments();
-
-    if (canOptimize) {
-        emitGetVirtualRegister(arguments, regT0);
-        slowCase.append(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(JSValue()))));
-
-        emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
-        if (firstVarArgOffset) {
-            Jump sufficientArguments = branch32(GreaterThan, regT0, TrustedImm32(firstVarArgOffset + 1));
-            move(TrustedImm32(1), regT0);
-            Jump endVarArgs = jump();
-            sufficientArguments.link(this);
-            sub32(TrustedImm32(firstVarArgOffset), regT0);
-            endVarArgs.link(this);
-        }
-        slowCase.append(branch32(Above, regT0, TrustedImm32(Arguments::MaxArguments + 1)));
-        // regT0: argumentCountIncludingThis
-        move(regT0, regT1);
-        add64(TrustedImm32(-firstFreeRegister + JSStack::CallFrameHeaderSize), regT1);
-        // regT1 now has the required frame size in Register units
-        // Round regT1 to next multiple of stackAlignmentRegisters()
-        add64(TrustedImm32(stackAlignmentRegisters() - 1), regT1);
-        and64(TrustedImm32(~(stackAlignmentRegisters() - 1)), regT1);
-
-        neg64(regT1);
-        lshift64(TrustedImm32(3), regT1);
-        addPtr(callFrameRegister, regT1);
-        // regT1: newCallFrame
-
-        slowCase.append(branchPtr(Above, AbsoluteAddress(m_vm->addressOfStackLimit()), regT1));
-
-        // Initialize ArgumentCount.
-        store32(regT0, Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
-
-        // Initialize 'this'.
-        emitGetVirtualRegister(thisValue, regT2);
-        store64(regT2, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
-
-        // Copy arguments.
-        signExtend32ToPtr(regT0, regT0);
-        end.append(branchSub64(Zero, TrustedImm32(1), regT0));
-        // regT0: argumentCount
-
-        Label copyLoop = label();
-        load64(BaseIndex(callFrameRegister, regT0, TimesEight, (CallFrame::thisArgumentOffset() + firstVarArgOffset) * static_cast<int>(sizeof(Register))), regT2);
-        store64(regT2, BaseIndex(regT1, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
-        branchSub64(NonZero, TrustedImm32(1), regT0).linkTo(copyLoop, this);
-
-        end.append(jump());
-    }
-
-    if (canOptimize)
-        slowCase.link(this);
-
     emitGetVirtualRegister(arguments, regT1);
-    callOperation(operationSizeFrameForVarargs, regT1, firstFreeRegister, firstVarArgOffset);
-    move(returnValueGPR, stackPointerRegister);
-    emitGetVirtualRegister(thisValue, regT1);
+    callOperation(operationSizeFrameForVarargs, regT1, -firstFreeRegister, firstVarArgOffset);
+    move(TrustedImm32(-firstFreeRegister), regT1);
+    emitSetVarargsFrame(*this, returnValueGPR, false, regT1, regT1);
+    addPtr(TrustedImm32(-(sizeof(CallerFrameAndPC) + WTF::roundUpToMultipleOf(stackAlignmentBytes(), 5 * sizeof(void*)))), regT1, stackPointerRegister);
     emitGetVirtualRegister(arguments, regT2);
-    callOperation(operationLoadVarargs, returnValueGPR, regT1, regT2, firstVarArgOffset);
+    callOperation(operationSetupVarargsFrame, regT1, regT2, firstVarArgOffset, regT0);
     move(returnValueGPR, regT1);
 
-    if (canOptimize)
-        end.link(this);
+    // Profile the argument count.
+    load32(Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset), regT2);
+    load8(info->addressOfMaxNumArguments(), regT0);
+    Jump notBiggest = branch32(Above, regT0, regT2);
+    Jump notSaturated = branch32(BelowOrEqual, regT2, TrustedImm32(255));
+    move(TrustedImm32(255), regT2);
+    notSaturated.link(this);
+    store8(regT2, info->addressOfMaxNumArguments());
+    notBiggest.link(this);
     
+    // Initialize 'this'.
+    emitGetVirtualRegister(thisValue, regT0);
+    store64(regT0, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
+
     addPtr(TrustedImm32(sizeof(CallerFrameAndPC)), regT1, stackPointerRegister);
 }
 
 void JIT::compileCallEval(Instruction* instruction)
 {
     addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), stackPointerRegister, regT1);
-    callOperationNoExceptionCheck(operationCallEval, regT1);
-
-    Jump noException = emitExceptionCheck(InvertedExceptionCheck);
-    addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);    
-    exceptionCheck(jump());
-
-    noException.link(this);
-    addSlowCase(branch64(Equal, regT0, TrustedImm64(JSValue::encode(JSValue()))));
+    storePtr(callFrameRegister, Address(regT1, CallFrame::callerFrameOffset()));
 
     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
     checkStackPointerAlignment();
 
+    callOperation(operationCallEval, regT1);
+
+    addSlowCase(branch64(Equal, regT0, TrustedImm64(JSValue::encode(JSValue()))));
+
     sampleCodeBlock(m_codeBlock);
     
     emitPutCallResult(instruction);
@@ -155,11 +105,19 @@ void JIT::compileCallEval(Instruction* instruction)
 
 void JIT::compileCallEvalSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
 {
+    CallLinkInfo* info = m_codeBlock->addCallLinkInfo();
+    info->setUpCall(CallLinkInfo::Call, CodeOrigin(m_bytecodeOffset), regT0);
+
     linkSlowCase(iter);
+    int registerOffset = -instruction[4].u.operand;
+
+    addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister);
 
     load64(Address(stackPointerRegister, sizeof(Register) * JSStack::Callee - sizeof(CallerFrameAndPC)), regT0);
-    move(TrustedImmPtr(&CallLinkInfo::dummy()), regT2);
-    emitNakedCall(m_vm->getCTIStub(virtualCallThunkGenerator).code());
+    move(TrustedImmPtr(info), regT2);
+    MacroAssemblerCodeRef virtualThunk = virtualThunkFor(m_vm, *info);
+    info->setSlowStub(createJITStubRoutine(virtualThunk, *m_vm, nullptr, true));
+    emitNakedCall(virtualThunk.code());
     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
     checkStackPointerAlignment();
 
@@ -177,19 +135,21 @@ void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned ca
         - Initializes ArgumentCount; CallerFrame; Callee.
 
        For a JS call:
-        - Caller initializes ScopeChain.
         - Callee initializes ReturnPC; CodeBlock.
         - Callee restores callFrameRegister before return.
 
        For a non-JS call:
-        - Caller initializes ScopeChain; ReturnPC; CodeBlock.
+        - Caller initializes ReturnPC; CodeBlock.
         - Caller restores callFrameRegister after return.
     */
     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct), call_and_construct_opcodes_must_be_same_length);
     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_varargs), call_and_call_varargs_opcodes_must_be_same_length);
     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct_varargs), call_and_construct_varargs_opcodes_must_be_same_length);
+    CallLinkInfo* info;
+    if (opcodeID != op_call_eval)
+        info = m_codeBlock->addCallLinkInfo();
     if (opcodeID == op_call_varargs || opcodeID == op_construct_varargs)
-        compileLoadVarargs(instruction);
+        compileSetupVarargsFrame(instruction, info);
     else {
         int argCount = instruction[3].u.operand;
         int registerOffset = -instruction[4].u.operand;
@@ -207,12 +167,12 @@ void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned ca
     } // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized.
     
     uint32_t bytecodeOffset = instruction - m_codeBlock->instructions().begin();
-    uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(bytecodeOffset);
+    uint32_t locationBits = CallSiteIndex(bytecodeOffset).bits();
     store32(TrustedImm32(locationBits), Address(callFrameRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + TagOffset));
     emitGetVirtualRegister(callee, regT0); // regT0 holds callee.
 
     store64(regT0, Address(stackPointerRegister, JSStack::Callee * static_cast<int>(sizeof(Register)) - sizeof(CallerFrameAndPC)));
-
+    
     if (opcodeID == op_call_eval) {
         compileCallEval(instruction);
         return;
@@ -223,17 +183,11 @@ void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned ca
     addSlowCase(slowCase);
 
     ASSERT(m_callCompilationInfo.size() == callLinkInfoIndex);
-    CallLinkInfo* info = m_codeBlock->addCallLinkInfo();
-    info->callType = CallLinkInfo::callTypeFor(opcodeID);
-    info->codeOrigin = CodeOrigin(m_bytecodeOffset);
-    info->calleeGPR = regT0;
+    info->setUpCall(CallLinkInfo::callTypeFor(opcodeID), CodeOrigin(m_bytecodeOffset), regT0);
     m_callCompilationInfo.append(CallCompilationInfo());
     m_callCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
     m_callCompilationInfo[callLinkInfoIndex].callLinkInfo = info;
 
-    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT2);
-    store64(regT2, Address(MacroAssembler::stackPointerRegister, JSStack::ScopeChain * sizeof(Register) - sizeof(CallerFrameAndPC)));
-
     m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
 
     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
@@ -253,12 +207,8 @@ void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction* instruction, Vec
 
     linkSlowCase(iter);
 
-    ThunkGenerator generator = linkThunkGeneratorFor(
-        (opcodeID == op_construct || opcodeID == op_construct_varargs) ? CodeForConstruct : CodeForCall,
-        RegisterPreservationNotRequired);
-    
     move(TrustedImmPtr(m_callCompilationInfo[callLinkInfoIndex].callLinkInfo), regT2);
-    m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(generator).code());
+    m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).code());
 
     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
     checkStackPointerAlignment();