FTL arity fixup should work on ARM64
authorfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 6 Mar 2014 20:16:38 +0000 (20:16 +0000)
committerfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 6 Mar 2014 20:16:38 +0000 (20:16 +0000)
https://bugs.webkit.org/show_bug.cgi?id=129810

Reviewed by Michael Saboff.

- Using regT5 to pass the thunk return address to arityFixup is shady since that's a
  callee-save.

- The FTL path was assuming X86 conventions for where SP points at the top of the prologue.

This makes some more tests pass.

* dfg/DFGJITCompiler.cpp:
(JSC::DFG::JITCompiler::compileFunction):
* ftl/FTLLink.cpp:
(JSC::FTL::link):
* jit/AssemblyHelpers.h:
(JSC::AssemblyHelpers::prologueStackPointerDelta):
* jit/JIT.cpp:
(JSC::JIT::privateCompile):
* jit/ThunkGenerators.cpp:
(JSC::arityFixup):
* llint/LowLevelInterpreter64.asm:
* offlineasm/arm64.rb:
* offlineasm/x86.rb: In addition to the t7 change, make t6 agree with GPRInfo.h.

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@165205 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/dfg/DFGJITCompiler.cpp
Source/JavaScriptCore/ftl/FTLLink.cpp
Source/JavaScriptCore/jit/AssemblyHelpers.h
Source/JavaScriptCore/jit/JIT.cpp
Source/JavaScriptCore/jit/ThunkGenerators.cpp
Source/JavaScriptCore/llint/LowLevelInterpreter64.asm
Source/JavaScriptCore/offlineasm/arm64.rb
Source/JavaScriptCore/offlineasm/x86.rb

index 4a83823..09d6a2b 100644 (file)
@@ -1,3 +1,31 @@
+2014-03-06  Filip Pizlo  <fpizlo@apple.com>
+
+        FTL arity fixup should work on ARM64
+        https://bugs.webkit.org/show_bug.cgi?id=129810
+
+        Reviewed by Michael Saboff.
+        
+        - Using regT5 to pass the thunk return address to arityFixup is shady since that's a
+          callee-save.
+        
+        - The FTL path was assuming X86 conventions for where SP points at the top of the prologue.
+        
+        This makes some more tests pass.
+
+        * dfg/DFGJITCompiler.cpp:
+        (JSC::DFG::JITCompiler::compileFunction):
+        * ftl/FTLLink.cpp:
+        (JSC::FTL::link):
+        * jit/AssemblyHelpers.h:
+        (JSC::AssemblyHelpers::prologueStackPointerDelta):
+        * jit/JIT.cpp:
+        (JSC::JIT::privateCompile):
+        * jit/ThunkGenerators.cpp:
+        (JSC::arityFixup):
+        * llint/LowLevelInterpreter64.asm:
+        * offlineasm/arm64.rb:
+        * offlineasm/x86.rb: In addition to the t7 change, make t6 agree with GPRInfo.h.
+
 2014-03-06  Mark Hahnenberg  <mhahnenberg@apple.com>
 
         Fix write barriers in Repatch.cpp for !ENABLE(DFG_JIT) platforms after r165128
 2014-03-06  Mark Hahnenberg  <mhahnenberg@apple.com>
 
         Fix write barriers in Repatch.cpp for !ENABLE(DFG_JIT) platforms after r165128
index b988ebf..94a7d15 100644 (file)
@@ -381,8 +381,14 @@ void JITCompiler::compileFunction()
         addPtr(TrustedImm32(maxFrameExtentForSlowPathCall), stackPointerRegister);
     branchTest32(Zero, GPRInfo::regT0).linkTo(fromArityCheck, this);
     emitStoreCodeOrigin(CodeOrigin(0));
         addPtr(TrustedImm32(maxFrameExtentForSlowPathCall), stackPointerRegister);
     branchTest32(Zero, GPRInfo::regT0).linkTo(fromArityCheck, this);
     emitStoreCodeOrigin(CodeOrigin(0));
-    move(TrustedImmPtr(m_vm->arityCheckFailReturnThunks->returnPCsFor(*m_vm, m_codeBlock->numParameters())), GPRInfo::regT5);
-    loadPtr(BaseIndex(GPRInfo::regT5, GPRInfo::regT0, timesPtr()), GPRInfo::regT5);
+    GPRReg thunkReg;
+#if USE(JSVALUE64)
+    thunkReg = GPRInfo::regT7;
+#else
+    thunkReg = GPRInfo::regT5;
+#endif
+    move(TrustedImmPtr(m_vm->arityCheckFailReturnThunks->returnPCsFor(*m_vm, m_codeBlock->numParameters())), thunkReg);
+    loadPtr(BaseIndex(thunkReg, GPRInfo::regT0, timesPtr()), thunkReg);
     m_callArityFixup = call();
     jump(fromArityCheck);
     
     m_callArityFixup = call();
     jump(fromArityCheck);
     
index c73c118..d4f0ca5 100644 (file)
@@ -71,7 +71,7 @@ void link(State& state)
     CCallHelpers::Label arityCheck;
 
     CCallHelpers::Address frame = CCallHelpers::Address(
     CCallHelpers::Label arityCheck;
 
     CCallHelpers::Address frame = CCallHelpers::Address(
-        CCallHelpers::stackPointerRegister, -static_cast<int32_t>(sizeof(void*)));
+        CCallHelpers::stackPointerRegister, -static_cast<int32_t>(AssemblyHelpers::prologueStackPointerDelta()));
     
     if (Profiler::Compilation* compilation = graph.compilation()) {
         compilation->addDescription(
     
     if (Profiler::Compilation* compilation = graph.compilation()) {
         compilation->addDescription(
@@ -170,8 +170,8 @@ void link(State& state)
         jit.emitFunctionEpilogue();
         mainPathJumps.append(jit.branchTest32(CCallHelpers::Zero, GPRInfo::regT0));
         jit.emitFunctionPrologue();
         jit.emitFunctionEpilogue();
         mainPathJumps.append(jit.branchTest32(CCallHelpers::Zero, GPRInfo::regT0));
         jit.emitFunctionPrologue();
-        jit.move(CCallHelpers::TrustedImmPtr(vm.arityCheckFailReturnThunks->returnPCsFor(vm, codeBlock->numParameters())), GPRInfo::regT5);
-        jit.loadPtr(CCallHelpers::BaseIndex(GPRInfo::regT5, GPRInfo::regT0, CCallHelpers::timesPtr()), GPRInfo::regT5);
+        jit.move(CCallHelpers::TrustedImmPtr(vm.arityCheckFailReturnThunks->returnPCsFor(vm, codeBlock->numParameters())), GPRInfo::regT7);
+        jit.loadPtr(CCallHelpers::BaseIndex(GPRInfo::regT7, GPRInfo::regT0, CCallHelpers::timesPtr()), GPRInfo::regT7);
         CCallHelpers::Call callArityFixup = jit.call();
         jit.emitFunctionEpilogue();
         mainPathJumps.append(jit.jump());
         CCallHelpers::Call callArityFixup = jit.call();
         jit.emitFunctionEpilogue();
         mainPathJumps.append(jit.jump());
index a008842..ed2bad2 100644 (file)
@@ -68,7 +68,7 @@ public:
     }
 
 #if CPU(X86_64) || CPU(X86)
     }
 
 #if CPU(X86_64) || CPU(X86)
-    size_t prologueStackPointerDelta()
+    static size_t prologueStackPointerDelta()
     {
         // Prologue only saves the framePointerRegister
         return sizeof(void*);
     {
         // Prologue only saves the framePointerRegister
         return sizeof(void*);
@@ -103,7 +103,7 @@ public:
 #endif // CPU(X86_64) || CPU(X86)
 
 #if CPU(ARM) || CPU(ARM64)
 #endif // CPU(X86_64) || CPU(X86)
 
 #if CPU(ARM) || CPU(ARM64)
-    size_t prologueStackPointerDelta()
+    static size_t prologueStackPointerDelta()
     {
         // Prologue saves the framePointerRegister and linkRegister
         return 2 * sizeof(void*);
     {
         // Prologue saves the framePointerRegister and linkRegister
         return 2 * sizeof(void*);
@@ -138,7 +138,7 @@ public:
 #endif
 
 #if CPU(MIPS)
 #endif
 
 #if CPU(MIPS)
-    size_t prologueStackPointerDelta()
+    static size_t prologueStackPointerDelta()
     {
         // Prologue saves the framePointerRegister and returnAddressRegister
         return 2 * sizeof(void*);
     {
         // Prologue saves the framePointerRegister and returnAddressRegister
         return 2 * sizeof(void*);
@@ -161,7 +161,7 @@ public:
 #endif
 
 #if CPU(SH4)
 #endif
 
 #if CPU(SH4)
-    size_t prologueStackPointerDelta()
+    static size_t prologueStackPointerDelta()
     {
         // Prologue saves the framePointerRegister and link register
         return 2 * sizeof(void*);
     {
         // Prologue saves the framePointerRegister and link register
         return 2 * sizeof(void*);
index f9cde55..90c6b6a 100644 (file)
@@ -558,8 +558,14 @@ CompilationResult JIT::privateCompile(JITCompilationEffort effort)
         if (returnValueGPR != regT0)
             move(returnValueGPR, regT0);
         branchTest32(Zero, regT0).linkTo(beginLabel, this);
         if (returnValueGPR != regT0)
             move(returnValueGPR, regT0);
         branchTest32(Zero, regT0).linkTo(beginLabel, this);
-        move(TrustedImmPtr(m_vm->arityCheckFailReturnThunks->returnPCsFor(*m_vm, m_codeBlock->numParameters())), regT5);
-        loadPtr(BaseIndex(regT5, regT0, timesPtr()), regT5);
+        GPRReg thunkReg;
+#if USE(JSVALUE64)
+        thunkReg = GPRInfo::regT7;
+#else
+        thunkReg = GPRInfo::regT5;
+#endif
+        move(TrustedImmPtr(m_vm->arityCheckFailReturnThunks->returnPCsFor(*m_vm, m_codeBlock->numParameters())), thunkReg);
+        loadPtr(BaseIndex(thunkReg, regT0, timesPtr()), thunkReg);
         emitNakedCall(m_vm->getCTIStub(arityFixup).code());
 
 #if !ASSERT_DISABLED
         emitNakedCall(m_vm->getCTIStub(arityFixup).code());
 
 #if !ASSERT_DISABLED
index dd4e414..edc6b34 100644 (file)
@@ -429,7 +429,7 @@ MacroAssemblerCodeRef arityFixup(VM* vm)
     JSInterfaceJIT jit(vm);
 
     // We enter with fixup count, in aligned stack units, in regT0 and the return thunk in
     JSInterfaceJIT jit(vm);
 
     // We enter with fixup count, in aligned stack units, in regT0 and the return thunk in
-    // regT5.
+    // regT5 on 32-bit and regT7 on 64-bit.
 #if USE(JSVALUE64)
 #  if CPU(X86_64)
     jit.pop(JSInterfaceJIT::regT4);
 #if USE(JSVALUE64)
 #  if CPU(X86_64)
     jit.pop(JSInterfaceJIT::regT4);
@@ -467,7 +467,7 @@ MacroAssemblerCodeRef arityFixup(VM* vm)
     jit.storePtr(GPRInfo::regT1, MacroAssembler::BaseIndex(JSInterfaceJIT::regT6, JSInterfaceJIT::regT0, JSInterfaceJIT::TimesEight));
     
     // Install the new return PC.
     jit.storePtr(GPRInfo::regT1, MacroAssembler::BaseIndex(JSInterfaceJIT::regT6, JSInterfaceJIT::regT0, JSInterfaceJIT::TimesEight));
     
     // Install the new return PC.
-    jit.storePtr(GPRInfo::regT5, JSInterfaceJIT::Address(JSInterfaceJIT::callFrameRegister, CallFrame::returnPCOffset()));
+    jit.storePtr(GPRInfo::regT7, JSInterfaceJIT::Address(JSInterfaceJIT::callFrameRegister, CallFrame::returnPCOffset()));
 
 #  if CPU(X86_64)
     jit.push(JSInterfaceJIT::regT4);
 
 #  if CPU(X86_64)
     jit.push(JSInterfaceJIT::regT4);
index 5c3b6f4..5e928b6 100644 (file)
@@ -483,7 +483,7 @@ macro functionArityCheck(doneLabel, slowPath)
     loadp CommonSlowPaths::ArityCheckData::thunkToCall[t1], t2
     btpz t2, .proceedInline
     
     loadp CommonSlowPaths::ArityCheckData::thunkToCall[t1], t2
     btpz t2, .proceedInline
     
-    loadp CommonSlowPaths::ArityCheckData::returnPC[t1], t5
+    loadp CommonSlowPaths::ArityCheckData::returnPC[t1], t7
     loadp CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t0
     call t2
     if ASSERT_ENABLED
     loadp CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t0
     call t2
     if ASSERT_ENABLED
index 92b3e14..1005713 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
+# Copyright (C) 2011, 2012, 2014 Apple Inc. All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions
@@ -118,6 +118,8 @@ class RegisterID
             arm64GPRName('x24', kind)
         when 't6'
             arm64GPRName('x6', kind)
             arm64GPRName('x24', kind)
         when 't6'
             arm64GPRName('x6', kind)
+        when 't7'
+            arm64GPRName('x7', kind)
         when 'cfr'
             arm64GPRName('x29', kind)
         when 'csr1'
         when 'cfr'
             arm64GPRName('x29', kind)
         when 'csr1'
index 07724b4..5a75cd9 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Apple Inc. All rights reserved.
+# Copyright (C) 2012, 2014 Apple Inc. All rights reserved.
 # Copyright (C) 2013 Digia Plc. and/or its subsidiary(-ies)
 #
 # Redistribution and use in source and binary forms, with or without
 # Copyright (C) 2013 Digia Plc. and/or its subsidiary(-ies)
 #
 # Redistribution and use in source and binary forms, with or without
@@ -284,13 +284,25 @@ class RegisterID
             raise "Cannot use #{name} in 32-bit X86 at #{codeOriginString}" unless isX64
             case kind
             when :half
             raise "Cannot use #{name} in 32-bit X86 at #{codeOriginString}" unless isX64
             case kind
             when :half
-                "%r10w"
+                "%r8w"
             when :int
             when :int
-                "%r10d"
+                "%r8d"
             when :ptr
             when :ptr
-                "%r10"
+                "%r8"
             when :quad
             when :quad
-                "%r10"
+                "%r8"
+            end
+        when "t7"
+            raise "Cannot use #{name} in 32-bit X86 at #{codeOriginString}" unless isX64
+            case kind
+            when :half
+                "%r9w"
+            when :int
+                "%r9d"
+            when :ptr
+                "%r9"
+            when :quad
+                "%r9"
             end
         when "csr1"
             raise "Cannot use #{name} in 32-bit X86 at #{codeOriginString}" unless isX64
             end
         when "csr1"
             raise "Cannot use #{name} in 32-bit X86 at #{codeOriginString}" unless isX64