LinkBuffer should not keep a reference to the MacroAssembler
[WebKit-https.git] / Source / JavaScriptCore / jit / JITCall32_64.cpp
index 7f97f5b..3a9feb2 100644 (file)
@@ -36,6 +36,7 @@
 #include "JSArray.h"
 #include "JSFunction.h"
 #include "JSCInlines.h"
+#include "LinkBuffer.h"
 #include "RepatchBuffer.h"
 #include "ResultType.h"
 #include "SamplingTool.h"
@@ -70,8 +71,7 @@ void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
 
     emitLoad(result, regT1, regT0);
     Jump notJSCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
-    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
-    Jump notObject = emitJumpIfNotObject(regT2);
+    Jump notObject = emitJumpIfCellNotObject(regT0);
 
     checkStackPointerAlignment();
     emitFunctionEpilogue();
@@ -100,7 +100,12 @@ void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowC
 {
     compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
 }
-
+    
+void JIT::emitSlow_op_construct_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+    compileOpCallSlowCase(op_construct_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
+}
+    
 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 {
     compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
@@ -120,7 +125,12 @@ void JIT::emit_op_call_varargs(Instruction* currentInstruction)
 {
     compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
 }
-
+    
+void JIT::emit_op_construct_varargs(Instruction* currentInstruction)
+{
+    compileOpCall(op_construct_varargs, currentInstruction, m_callLinkInfoIndex++);
+}
+    
 void JIT::emit_op_construct(Instruction* currentInstruction)
 {
     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
@@ -144,8 +154,14 @@ void JIT::compileLoadVarargs(Instruction* instruction)
         slowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
 
         load32(payloadFor(JSStack::ArgumentCount), regT2);
-        if (firstVarArgOffset)
+        if (firstVarArgOffset) {
+            Jump sufficientArguments = branch32(GreaterThan, regT2, TrustedImm32(firstVarArgOffset + 1));
+            move(TrustedImm32(1), regT2);
+            Jump endVarArgs = jump();
+            sufficientArguments.link(this);
             sub32(TrustedImm32(firstVarArgOffset), regT2);
+            endVarArgs.link(this);
+        }
         slowCase.append(branch32(Above, regT2, TrustedImm32(Arguments::MaxArguments + 1)));
         // regT2: argumentCountIncludingThis
 
@@ -228,6 +244,7 @@ void JIT::compileCallEvalSlowCase(Instruction* instruction, Vector<SlowCaseEntry
 
     loadPtr(Address(stackPointerRegister, sizeof(Register) * JSStack::Callee - sizeof(CallerFrameAndPC)), regT0);
     loadPtr(Address(stackPointerRegister, sizeof(Register) * JSStack::Callee - sizeof(CallerFrameAndPC)), regT1);
+    move(TrustedImmPtr(&CallLinkInfo::dummy()), regT2);
 
     emitLoad(JSStack::Callee, regT1, regT0);
     emitNakedCall(m_vm->getCTIStub(virtualCallThunkGenerator).code());
@@ -257,7 +274,7 @@ void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned ca
         - Caller restores callFrameRegister after return.
     */
     
-    if (opcodeID == op_call_varargs)
+    if (opcodeID == op_call_varargs || opcodeID == op_construct_varargs)
         compileLoadVarargs(instruction);
     else {
         int argCount = instruction[3].u.operand;
@@ -266,8 +283,8 @@ void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned ca
         if (opcodeID == op_call && shouldEmitProfiling()) {
             emitLoad(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0, regT1);
             Jump done = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
-            loadPtr(Address(regT1, JSCell::structureOffset()), regT1);
-            storePtr(regT1, instruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile->addressOfLastSeenStructure());
+            loadPtr(Address(regT1, JSCell::structureIDOffset()), regT1);
+            storePtr(regT1, instruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile->addressOfLastSeenStructureID());
             done.link(this);
         }
     
@@ -288,24 +305,28 @@ void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned ca
         return;
     }
 
+    addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
+
     DataLabelPtr addressOfLinkedFunctionCheck;
     Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
 
     addSlowCase(slowCase);
-    addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
 
-    ASSERT(m_callStructureStubCompilationInfo.size() == callLinkInfoIndex);
-    m_callStructureStubCompilationInfo.append(StructureStubCompilationInfo());
-    m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
-    m_callStructureStubCompilationInfo[callLinkInfoIndex].callType = CallLinkInfo::callTypeFor(opcodeID);
-    m_callStructureStubCompilationInfo[callLinkInfoIndex].bytecodeIndex = m_bytecodeOffset;
+    ASSERT(m_callCompilationInfo.size() == callLinkInfoIndex);
+    CallLinkInfo* info = m_codeBlock->addCallLinkInfo();
+    info->callType = CallLinkInfo::callTypeFor(opcodeID);
+    info->codeOrigin = CodeOrigin(m_bytecodeOffset);
+    info->calleeGPR = regT0;
+    m_callCompilationInfo.append(CallCompilationInfo());
+    m_callCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
+    m_callCompilationInfo[callLinkInfoIndex].callLinkInfo = info;
 
     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT2);
     store32(regT2, Address(MacroAssembler::stackPointerRegister, JSStack::ScopeChain * sizeof(Register) + PayloadOffset - sizeof(CallerFrameAndPC)));
     store32(TrustedImm32(JSValue::CellTag), Address(stackPointerRegister, JSStack::ScopeChain * sizeof(Register) + TagOffset - sizeof(CallerFrameAndPC)));
 
     checkStackPointerAlignment();
-    m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
+    m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
 
     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
     checkStackPointerAlignment();
@@ -325,10 +346,11 @@ void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction* instruction, Vec
     linkSlowCase(iter);
 
     ThunkGenerator generator = linkThunkGeneratorFor(
-        opcodeID == op_construct ? CodeForConstruct : CodeForCall,
+        (opcodeID == op_construct || opcodeID == op_construct_varargs) ? CodeForConstruct : CodeForCall,
         RegisterPreservationNotRequired);
     
-    m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(generator).code());
+    move(TrustedImmPtr(m_callCompilationInfo[callLinkInfoIndex].callLinkInfo), regT2);
+    m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(generator).code());
 
     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
     checkStackPointerAlignment();
@@ -342,7 +364,7 @@ void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calle
     JumpList slowCases;
 
     slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
-    slowCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(expectedStructure)));
+    slowCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(expectedStructure)));
     slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable)));
     
     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT1);
@@ -356,7 +378,7 @@ void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calle
     restoreReturnAddressBeforeReturn(regT2);
     Jump slow = jump();
     
-    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
+    LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
     
     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
     patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0));