Source/JavaScriptCore:
authormsaboff@apple.com <msaboff@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 30 Sep 2015 22:28:08 +0000 (22:28 +0000)
committermsaboff@apple.com <msaboff@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 30 Sep 2015 22:28:08 +0000 (22:28 +0000)
Relanding r190289 with the following two fixes:

 1. REGRESSION(r190289): It made Speedometer/Full.html performance test fail
    https://bugs.webkit.org/show_bug.cgi?id=149621

    Reviewed by Saam Barati.

    We need to restore callee saves for both the fast and slow paths before making a
    tail call in the FTL.

    * ftl/FTLJSCallBase.cpp:
    (JSC::FTL::JSCallBase::emit):

 2. [ARM] REGRESSION(r190289): It made 374 tests crash on 32 bit ARM Linux
    https://bugs.webkit.org/show_bug.cgi?id=149619

    Reviewed by Filip Pizlo.

    Need to check for ARMv7_TRADITIONAL and ARMv7 in addition to ARM in "if"
    statement to handle platforms with a link register.

    * llint/LowLevelInterpreter.asm:
    (prepareForTailCall):

LayoutTests:
Relanding r190289 after fixes tracked in https://bugs.webkit.org/show_bug.cgi?id=149619
and https://bugs.webkit.org/show_bug.cgi?id=149621

Reviewed by Saam Barati.

git-svn-id: http://svn.webkit.org/repository/webkit/trunk@190370 268f45cc-cd09-0410-ab3c-d52691b4dbfc

42 files changed:
LayoutTests/ChangeLog
LayoutTests/js/caller-property-expected.txt
LayoutTests/js/script-tests/caller-property.js
Source/JavaScriptCore/CMakeLists.txt
Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/JavaScriptCore.vcxproj/JavaScriptCore.vcxproj
Source/JavaScriptCore/JavaScriptCore.vcxproj/JavaScriptCore.vcxproj.filters
Source/JavaScriptCore/JavaScriptCore.xcodeproj/project.pbxproj
Source/JavaScriptCore/dfg/DFGByteCodeParser.cpp
Source/JavaScriptCore/dfg/DFGClobberize.h
Source/JavaScriptCore/dfg/DFGNode.h
Source/JavaScriptCore/dfg/DFGSpeculativeJIT64.cpp
Source/JavaScriptCore/dfg/DFGTierUpCheckInjectionPhase.cpp
Source/JavaScriptCore/ftl/FTLCapabilities.cpp
Source/JavaScriptCore/ftl/FTLCompile.cpp
Source/JavaScriptCore/ftl/FTLInlineCacheSize.cpp
Source/JavaScriptCore/ftl/FTLInlineCacheSize.h
Source/JavaScriptCore/ftl/FTLJSCall.cpp
Source/JavaScriptCore/ftl/FTLJSCallBase.cpp
Source/JavaScriptCore/ftl/FTLJSCallBase.h
Source/JavaScriptCore/ftl/FTLJSCallVarargs.cpp
Source/JavaScriptCore/ftl/FTLJSTailCall.cpp [new file with mode: 0644]
Source/JavaScriptCore/ftl/FTLJSTailCall.h [new file with mode: 0644]
Source/JavaScriptCore/ftl/FTLLocation.h
Source/JavaScriptCore/ftl/FTLLowerDFGToLLVM.cpp
Source/JavaScriptCore/ftl/FTLState.h
Source/JavaScriptCore/jit/AssemblyHelpers.cpp
Source/JavaScriptCore/jit/CallFrameShuffleData.h
Source/JavaScriptCore/jit/CallFrameShuffler.cpp
Source/JavaScriptCore/jit/CallFrameShuffler.h
Source/JavaScriptCore/jit/CallFrameShuffler64.cpp
Source/JavaScriptCore/jit/JITCall.cpp
Source/JavaScriptCore/jit/Reg.h
Source/JavaScriptCore/llint/LowLevelInterpreter.asm
Source/JavaScriptCore/runtime/Options.h
Source/JavaScriptCore/tests/es6.yaml
Source/JavaScriptCore/tests/stress/dfg-tail-calls.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/mutual-tail-call-no-stack-overflow.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/tail-call-no-stack-overflow.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/tail-call-recognize.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/tail-call-varargs-no-stack-overflow.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/tail-calls-dont-overwrite-live-stack.js [new file with mode: 0644]

index 43126ee..1299b06 100644 (file)
@@ -1,3 +1,10 @@
+2015-09-30  Michael Saboff  <msaboff@apple.com>
+
+        Relanding r190289 after fixes tracked in https://bugs.webkit.org/show_bug.cgi?id=149619
+        and https://bugs.webkit.org/show_bug.cgi?id=149621
+
+        Reviewed by Saam Barati.
+
 2015-09-29  Simon Fraser  <simon.fraser@apple.com>
 
         [iOS] Allow tests to generate user gestures for UI testing
index 980afd8..e554462 100644 (file)
@@ -10,10 +10,14 @@ PASS nonStrictCaller(nonStrictCallee) is nonStrictCaller
 PASS nonStrictCaller(strictCallee) threw exception TypeError: Type error.
 PASS strictCaller(nonStrictCallee) threw exception TypeError: Function.caller used to retrieve strict caller.
 PASS strictCaller(strictCallee) threw exception TypeError: Type error.
+PASS strictTailCaller(nonStrictCallee) is null
+PASS strictTailCaller(strictCallee) threw exception TypeError: Type error.
 PASS nonStrictCaller(boundNonStrictCallee) is nonStrictCaller
 PASS nonStrictCaller(boundStrictCallee) threw exception TypeError: Type error.
 PASS strictCaller(boundNonStrictCallee) threw exception TypeError: Function.caller used to retrieve strict caller.
 PASS strictCaller(boundStrictCallee) threw exception TypeError: Type error.
+PASS strictTailCaller(boundNonStrictCallee) is null
+PASS strictTailCaller(boundStrictCallee) threw exception TypeError: Type error.
 PASS nonStrictGetter(nonStrictAccessor) is nonStrictGetter
 PASS nonStrictSetter(nonStrictAccessor) is true
 PASS nonStrictGetter(strictAccessor) threw exception TypeError: Type error.
index b79f277..7048f0d 100644 (file)
@@ -23,11 +23,15 @@ shouldBe('childHasCallerWhenCalledFromWithinParent', 'true')
 function nonStrictCallee() { return nonStrictCallee.caller; }
 function strictCallee() { "use strict"; return strictCallee.caller; }
 function nonStrictCaller(x) { return x(); }
-function strictCaller(x) { "use strict"; return x(); }
+// Tail calls leak and show our caller's caller, which is null here
+function strictCaller(x) { "use strict"; var result = x(); return result; }
+function strictTailCaller(x) { "use strict"; return x(); }
 shouldBe("nonStrictCaller(nonStrictCallee)", "nonStrictCaller");
 shouldThrow("nonStrictCaller(strictCallee)", '"TypeError: Type error"');
 shouldThrow("strictCaller(nonStrictCallee)", '"TypeError: Function.caller used to retrieve strict caller"');
 shouldThrow("strictCaller(strictCallee)", '"TypeError: Type error"');
+shouldBe("strictTailCaller(nonStrictCallee)", "null");
+shouldThrow("strictTailCaller(strictCallee)", '"TypeError: Type error"');
 
 // .caller within a bound function reaches the caller, ignoring the binding.
 var boundNonStrictCallee = nonStrictCallee.bind();
@@ -36,6 +40,8 @@ shouldBe("nonStrictCaller(boundNonStrictCallee)", "nonStrictCaller");
 shouldThrow("nonStrictCaller(boundStrictCallee)", '"TypeError: Type error"');
 shouldThrow("strictCaller(boundNonStrictCallee)", '"TypeError: Function.caller used to retrieve strict caller"');
 shouldThrow("strictCaller(boundStrictCallee)", '"TypeError: Type error"');
+shouldBe("strictTailCaller(boundNonStrictCallee)", "null");
+shouldThrow("strictTailCaller(boundStrictCallee)", '"TypeError: Type error"');
 
 // Check that .caller works (or throws) as expected, over an accessor call.
 function getFooGetter(x) { return Object.getOwnPropertyDescriptor(x, 'foo').get; }
index 9236bc2..9b7b211 100644 (file)
@@ -904,6 +904,7 @@ if (ENABLE_FTL_JIT)
         ftl/FTLJSCall.cpp
         ftl/FTLJSCallBase.cpp
         ftl/FTLJSCallVarargs.cpp
+        ftl/FTLJSTailCall.cpp
         ftl/FTLLink.cpp
         ftl/FTLLocation.cpp
         ftl/FTLLowerDFGToLLVM.cpp
index c9b6945..148e539 100644 (file)
@@ -1,3 +1,29 @@
+2015-09-30  Michael Saboff  <msaboff@apple.com>
+
+        Relanding r190289 with the following two fixes:
+
+         1. REGRESSION(r190289): It made Speedometer/Full.html performance test fail
+            https://bugs.webkit.org/show_bug.cgi?id=149621
+
+            Reviewed by Saam Barati.
+
+            We need to restore callee saves for both the fast and slow paths before making a
+            tail call in the FTL.
+
+            * ftl/FTLJSCallBase.cpp:
+            (JSC::FTL::JSCallBase::emit):
+
+         2. [ARM] REGRESSION(r190289): It made 374 tests crash on 32 bit ARM Linux
+            https://bugs.webkit.org/show_bug.cgi?id=149619
+
+            Reviewed by Filip Pizlo.
+
+            Need to check for ARMv7_TRADITIONAL and ARMv7 in addition to ARM in "if"
+            statement to handle platforms with a link register.
+            
+            * llint/LowLevelInterpreter.asm:
+            (prepareForTailCall):
+
 2015-09-30  Keith Miller  <keith_miller@apple.com>
 
         [ES6] Add TypedArray.prototype functionality.
index 0142054..cf30f8c 100644 (file)
     <ClCompile Include="..\ftl\FTLJSCall.cpp" />
     <ClCompile Include="..\ftl\FTLJSCallBase.cpp" />
     <ClCompile Include="..\ftl\FTLJSCallVarargs.cpp" />
+    <ClCompile Include="..\ftl\FTLJSTailCall.cpp" />
     <ClCompile Include="..\ftl\FTLLink.cpp" />
     <ClCompile Include="..\ftl\FTLLocation.cpp" />
     <ClCompile Include="..\ftl\FTLLowerDFGToLLVM.cpp" />
     <ClInclude Include="..\ftl\FTLJSCall.h" />
     <ClInclude Include="..\ftl\FTLJSCallBase.h" />
     <ClInclude Include="..\ftl\FTLJSCallVarargs.h" />
+    <ClInclude Include="..\ftl\FTLJSTailCall.h" />
     <ClInclude Include="..\ftl\FTLLink.h" />
     <ClInclude Include="..\ftl\FTLLocation.h" />
     <ClInclude Include="..\ftl\FTLLowerDFGToLLVM.h" />
index fd8edff..9469f89 100644 (file)
     <ClCompile Include="..\ftl\FTLJSCall.cpp">
       <Filter>ftl</Filter>
     </ClCompile>
+    <ClCompile Include="..\ftl\FTLJSTailCall.cpp">
+      <Filter>ftl</Filter>
+    </ClCompile>
     <ClCompile Include="..\ftl\FTLLink.cpp">
       <Filter>ftl</Filter>
     </ClCompile>
     <ClInclude Include="..\ftl\FTLJSCall.h">
       <Filter>ftl</Filter>
     </ClInclude>
+    <ClInclude Include="..\ftl\FTLJSTailCall.h">
+      <Filter>ftl</Filter>
+    </ClInclude>
     <ClInclude Include="..\ftl\FTLLink.h">
       <Filter>ftl</Filter>
     </ClInclude>
index ba8a67e..a7cc8f2 100644 (file)
                623A37EC1B87A7C000754209 /* RegisterMap.h in Headers */ = {isa = PBXBuildFile; fileRef = 623A37EB1B87A7BD00754209 /* RegisterMap.h */; settings = {ATTRIBUTES = (Private, ); }; };
                627673231B680C1E00FD9F2E /* CallMode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 627673211B680C1E00FD9F2E /* CallMode.cpp */; };
                627673241B680C1E00FD9F2E /* CallMode.h in Headers */ = {isa = PBXBuildFile; fileRef = 627673221B680C1E00FD9F2E /* CallMode.h */; settings = {ATTRIBUTES = (Private, ); }; };
+               62774DAA1B8D4B190006F05A /* FTLJSTailCall.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 62774DA81B8D4B190006F05A /* FTLJSTailCall.cpp */; };
+               62774DAB1B8D4B190006F05A /* FTLJSTailCall.h in Headers */ = {isa = PBXBuildFile; fileRef = 62774DA91B8D4B190006F05A /* FTLJSTailCall.h */; };
                62D2D38F1ADF103F000206C1 /* FunctionRareData.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 62D2D38D1ADF103F000206C1 /* FunctionRareData.cpp */; };
                62D2D3901ADF103F000206C1 /* FunctionRareData.h in Headers */ = {isa = PBXBuildFile; fileRef = 62D2D38E1ADF103F000206C1 /* FunctionRareData.h */; settings = {ATTRIBUTES = (Private, ); }; };
                62D755D41B84FB3D001801FA /* CallFrameShuffler64.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 62D755D31B84FB39001801FA /* CallFrameShuffler64.cpp */; };
                623A37EB1B87A7BD00754209 /* RegisterMap.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RegisterMap.h; sourceTree = "<group>"; };
                627673211B680C1E00FD9F2E /* CallMode.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CallMode.cpp; sourceTree = "<group>"; };
                627673221B680C1E00FD9F2E /* CallMode.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CallMode.h; sourceTree = "<group>"; };
+               62774DA81B8D4B190006F05A /* FTLJSTailCall.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = FTLJSTailCall.cpp; path = ftl/FTLJSTailCall.cpp; sourceTree = "<group>"; };
+               62774DA91B8D4B190006F05A /* FTLJSTailCall.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FTLJSTailCall.h; path = ftl/FTLJSTailCall.h; sourceTree = "<group>"; };
                62A9A29E1B0BED4800BD54CA /* DFGLazyNode.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGLazyNode.cpp; path = dfg/DFGLazyNode.cpp; sourceTree = "<group>"; };
                62A9A29F1B0BED4800BD54CA /* DFGLazyNode.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGLazyNode.h; path = dfg/DFGLazyNode.h; sourceTree = "<group>"; };
                62D2D38D1ADF103F000206C1 /* FunctionRareData.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = FunctionRareData.cpp; sourceTree = "<group>"; };
                                0FD1202E1A8AED12000F5280 /* FTLJSCallBase.h */,
                                0FD120311A8C85BD000F5280 /* FTLJSCallVarargs.cpp */,
                                0FD120321A8C85BD000F5280 /* FTLJSCallVarargs.h */,
+                               62774DA81B8D4B190006F05A /* FTLJSTailCall.cpp */,
+                               62774DA91B8D4B190006F05A /* FTLJSTailCall.h */,
                                0F8F2B93172E049E007DBDA5 /* FTLLink.cpp */,
                                0F8F2B94172E049E007DBDA5 /* FTLLink.h */,
                                0FCEFADD180738C000472CE4 /* FTLLocation.cpp */,
                                0F6B1CB6185FC9E900845D97 /* FTLJSCall.h in Headers */,
                                0FD120301A8AED12000F5280 /* FTLJSCallBase.h in Headers */,
                                0FD120341A8C85BD000F5280 /* FTLJSCallVarargs.h in Headers */,
+                               62774DAB1B8D4B190006F05A /* FTLJSTailCall.h in Headers */,
                                0F8F2B96172E04A3007DBDA5 /* FTLLink.h in Headers */,
                                0FCEFAE0180738C000472CE4 /* FTLLocation.h in Headers */,
                                0FEA0A10170513DB00BB722C /* FTLLowerDFGToLLVM.h in Headers */,
                                0F6B1CB5185FC9E900845D97 /* FTLJSCall.cpp in Sources */,
                                0FD1202F1A8AED12000F5280 /* FTLJSCallBase.cpp in Sources */,
                                0FD120331A8C85BD000F5280 /* FTLJSCallVarargs.cpp in Sources */,
+                               62774DAA1B8D4B190006F05A /* FTLJSTailCall.cpp in Sources */,
                                0F8F2B95172E04A0007DBDA5 /* FTLLink.cpp in Sources */,
                                0FCEFADF180738C000472CE4 /* FTLLocation.cpp in Sources */,
                                0FEA0A0F170513DB00BB722C /* FTLLowerDFGToLLVM.cpp in Sources */,
index 4349196..d4fc1a8 100644 (file)
@@ -1882,11 +1882,15 @@ bool ByteCodeParser::handleInlining(
         m_currentIndex = nextOffset;
         m_exitOK = true;
         processSetLocalQueue(); // This only comes into play for intrinsics, since normal inlined code will leave an empty queue.
-        addToGraph(Jump);
+        if (Node* terminal = m_currentBlock->terminal())
+            ASSERT_UNUSED(terminal, terminal->op() == TailCall || terminal->op() == TailCallVarargs);
+        else {
+            addToGraph(Jump);
+            landingBlocks.append(m_currentBlock);
+        }
         if (verbose)
             dataLog("Marking ", RawPointer(m_currentBlock), " as linked (tail of poly inlinee)\n");
         m_currentBlock->didLink();
-        landingBlocks.append(m_currentBlock);
 
         if (verbose)
             dataLog("Finished inlining ", callLinkStatus[i], " at ", currentCodeOrigin(), ".\n");
@@ -1919,8 +1923,12 @@ bool ByteCodeParser::handleInlining(
     m_currentIndex = nextOffset;
     m_exitOK = true; // Origin changed, so it's fine to exit again.
     processSetLocalQueue();
-    addToGraph(Jump);
-    landingBlocks.append(m_currentBlock);
+    if (Node* terminal = m_currentBlock->terminal())
+        ASSERT_UNUSED(terminal, terminal->op() == TailCall || terminal->op() == TailCallVarargs);
+    else {
+        addToGraph(Jump);
+        landingBlocks.append(m_currentBlock);
+    }
     
     RefPtr<BasicBlock> continuationBlock = adoptRef(
         new BasicBlock(UINT_MAX, m_numArguments, m_numLocals, PNaN));
@@ -3664,7 +3672,7 @@ bool ByteCodeParser::parseBlock(unsigned limit)
                 // We could be the dummy jump to a return after a non-inlined, non-emulated tail call in a ternary operator
                 Node* terminal = m_currentBlock->terminal();
                 ASSERT_UNUSED(terminal, terminal->op() == TailCall || terminal->op() == TailCallVarargs);
-                LAST_OPCODE(op_ret);
+                LAST_OPCODE(op_jmp);
             }
             int relativeOffset = currentInstruction[1].u.operand;
             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
index b1e1109..c8040ce 100644 (file)
@@ -1035,8 +1035,6 @@ void clobberize(Graph& graph, Node* node, const ReadFunctor& read, const WriteFu
         
     case ThrowReferenceError:
         write(SideState);
-        read(HeapObjectCount);
-        write(HeapObjectCount);
         return;
         
     case CountExecution:
index 8746846..794ef3d 100644 (file)
@@ -1118,6 +1118,14 @@ struct Node {
         }
     }
 
+    bool isFunctionTerminal()
+    {
+        if (isTerminal() && !numSuccessors())
+            return true;
+
+        return false;
+    }
+
     unsigned targetBytecodeOffsetDuringParsing()
     {
         ASSERT(isJump());
index 9a69d12..264c4df 100644 (file)
@@ -771,6 +771,7 @@ void SpeculativeJIT::emitCall(Node* node)
             calleeGPR = callee.gpr();
             callee.use();
 
+            shuffleData.tagTypeNumber = GPRInfo::tagTypeNumberRegister;
             shuffleData.numLocals = m_jit.graph().frameRegisterCount();
             shuffleData.callee = ValueRecovery::inGPR(calleeGPR, DataFormatJS);
             shuffleData.args.resize(numPassedArgs);
@@ -868,7 +869,7 @@ void SpeculativeJIT::emitCall(Node* node)
         m_jit.addPtr(TrustedImm32(m_jit.graph().stackPointerOffset() * sizeof(Register)), GPRInfo::callFrameRegister, JITCompiler::stackPointerRegister);
     }
 
-    callLinkInfo->setUpCall(callType, m_currentNode->origin.semantic,  calleeGPR);    
+    callLinkInfo->setUpCall(callType, m_currentNode->origin.semantic, calleeGPR);
     m_jit.addJSCall(fastCall, slowCall, targetToCheck, callLinkInfo);
 }
 
index 038df9c..9609534 100644 (file)
@@ -95,7 +95,7 @@ public:
             }
             
             NodeAndIndex terminal = block->findTerminal();
-            if (terminal.node->op() == Return) {
+            if (terminal.node->isFunctionTerminal()) {
                 insertionSet.insertNode(
                     terminal.index, SpecNone, CheckTierUpAtReturn, terminal.node->origin);
             }
index c8c6fa4..f3617be 100644 (file)
@@ -130,10 +130,16 @@ inline CapabilityLevel canCompile(Node* node)
     case NotifyWrite:
     case StoreBarrier:
     case Call:
+    case TailCall:
+    case TailCallInlinedCaller:
     case Construct:
     case CallVarargs:
-    case CallForwardVarargs:
+    case TailCallVarargs:
+    case TailCallVarargsInlinedCaller:
     case ConstructVarargs:
+    case CallForwardVarargs:
+    case TailCallForwardVarargs:
+    case TailCallForwardVarargsInlinedCaller:
     case ConstructForwardVarargs:
     case LoadVarargs:
     case ValueToInt32:
index fd46218..a72a0c7 100644 (file)
@@ -618,6 +618,22 @@ static void fixFunctionBasedOnStackMaps(
             call.link(vm, linkBuffer, state.finalizer->handleExceptionsLinkBuffer->entrypoint());
         });
     }
+
+    adjustCallICsForStackmaps(state.jsTailCalls, recordMap);
+
+    for (unsigned i = state.jsTailCalls.size(); i--;) {
+        JSTailCall& call = state.jsTailCalls[i];
+
+        CCallHelpers fastPathJIT(&vm, codeBlock);
+        call.emit(*state.jitCode.get(), fastPathJIT);
+
+        char* startOfIC = bitwise_cast<char*>(generatedFunction) + call.m_instructionOffset;
+        size_t sizeOfIC = call.estimatedSize();
+
+        generateInlineIfPossibleOutOfLineIfNot(state, vm, codeBlock, fastPathJIT, startOfIC, sizeOfIC, "tail call inline cache", [&] (LinkBuffer& linkBuffer, CCallHelpers&, bool) {
+            call.link(vm, linkBuffer);
+        });
+    }
     
     auto iter = recordMap.find(state.handleStackOverflowExceptionStackmapID);
     // It's sort of remotely possible that we won't have an in-band exception handling
index f5bbfcb..94c7ace 100644 (file)
@@ -82,6 +82,15 @@ size_t sizeOfCallVarargs()
 #endif
 }
 
+size_t sizeOfTailCallVarargs()
+{
+#if CPU(ARM64)
+    return 188 + sizeOfCallVarargs();
+#else
+    return 151 + sizeOfCallVarargs();
+#endif
+}
+
 size_t sizeOfCallForwardVarargs()
 {
 #if CPU(ARM64)
@@ -91,6 +100,15 @@ size_t sizeOfCallForwardVarargs()
 #endif
 }
 
+size_t sizeOfTailCallForwardVarargs()
+{
+#if CPU(ARM64)
+    return 188 + sizeOfCallForwardVarargs();
+#else
+    return 151 + sizeOfCallForwardVarargs();
+#endif
+}
+
 size_t sizeOfConstructVarargs()
 {
     return sizeOfCallVarargs(); // Should be the same size.
@@ -121,9 +139,15 @@ size_t sizeOfICFor(Node* node)
     case Construct:
         return sizeOfCall();
     case CallVarargs:
+    case TailCallVarargsInlinedCaller:
         return sizeOfCallVarargs();
+    case TailCallVarargs:
+        return sizeOfTailCallVarargs();
     case CallForwardVarargs:
+    case TailCallForwardVarargsInlinedCaller:
         return sizeOfCallForwardVarargs();
+    case TailCallForwardVarargs:
+        return sizeOfTailCallForwardVarargs();
     case ConstructVarargs:
         return sizeOfConstructVarargs();
     case ConstructForwardVarargs:
@@ -131,7 +155,7 @@ size_t sizeOfICFor(Node* node)
     case In:
         return sizeOfIn();
     default:
-        return 0;
+        RELEASE_ASSERT_NOT_REACHED();
     }
 }
 
index 82f3bbc..fed850b 100644 (file)
@@ -40,7 +40,9 @@ size_t sizeOfGetById();
 size_t sizeOfPutById();
 size_t sizeOfCall();
 size_t sizeOfCallVarargs();
+size_t sizeOfTailCallVarargs();
 size_t sizeOfCallForwardVarargs();
+size_t sizeOfTailCallForwardVarargs();
 size_t sizeOfConstructVarargs();
 size_t sizeOfConstructForwardVarargs();
 size_t sizeOfIn();
index 2877b84..1db1719 100644 (file)
@@ -48,7 +48,7 @@ JSCall::JSCall(unsigned stackmapID, Node* node)
     , m_stackmapID(stackmapID)
     , m_instructionOffset(0)
 {
-    ASSERT(node->op() == Call || node->op() == Construct);
+    ASSERT(node->op() == Call || node->op() == Construct || node->op() == TailCallInlinedCaller);
 }
 
 void JSCall::emit(CCallHelpers& jit, unsigned stackSizeForLocals)
index f200a9f..9582090 100644 (file)
@@ -52,19 +52,34 @@ void JSCallBase::emit(CCallHelpers& jit)
 {
     m_callLinkInfo = jit.codeBlock()->addCallLinkInfo();
     
+    if (CallLinkInfo::callModeFor(m_type) == CallMode::Tail)
+        jit.emitRestoreCalleeSaves();
+
     CCallHelpers::Jump slowPath = jit.branchPtrWithPatch(
         CCallHelpers::NotEqual, GPRInfo::regT0, m_targetToCheck,
         CCallHelpers::TrustedImmPtr(0));
-    
-    m_fastCall = jit.nearCall();
-    CCallHelpers::Jump done = jit.jump();
-    
+
+    CCallHelpers::Jump done;
+
+    if (CallLinkInfo::callModeFor(m_type) == CallMode::Tail) {
+        jit.prepareForTailCallSlow();
+        m_fastCall = jit.nearTailCall();
+    } else {
+        m_fastCall = jit.nearCall();
+        done = jit.jump();
+    }
+
     slowPath.link(&jit);
-    
+
     jit.move(CCallHelpers::TrustedImmPtr(m_callLinkInfo), GPRInfo::regT2);
     m_slowCall = jit.nearCall();
-    
-    done.link(&jit);
+
+    if (CallLinkInfo::callModeFor(m_type) == CallMode::Tail)
+        jit.abortWithReason(JITDidReturnFromTailCall);
+    else
+        done.link(&jit);
+
+    m_callLinkInfo->setUpCall(m_type, m_origin, GPRInfo::regT0);
 }
 
 void JSCallBase::link(VM& vm, LinkBuffer& linkBuffer)
@@ -72,9 +87,8 @@ void JSCallBase::link(VM& vm, LinkBuffer& linkBuffer)
     linkBuffer.link(
         m_slowCall, FunctionPtr(vm.getCTIStub(linkCallThunkGenerator).code().executableAddress()));
 
-    m_callLinkInfo->setUpCallFromFTL(m_type, m_origin, linkBuffer.locationOfNearCall(m_slowCall),
-        linkBuffer.locationOf(m_targetToCheck), linkBuffer.locationOfNearCall(m_fastCall),
-        GPRInfo::regT0);
+    m_callLinkInfo->setCallLocations(linkBuffer.locationOfNearCall(m_slowCall),
+        linkBuffer.locationOf(m_targetToCheck), linkBuffer.locationOfNearCall(m_fastCall));
 }
 
 } } // namespace JSC::FTL
index 595ac69..66073ef 100644 (file)
@@ -50,7 +50,7 @@ public:
     void emit(CCallHelpers&);
     void link(VM&, LinkBuffer&);
     
-private:
+protected:
     CallLinkInfo::CallType m_type;
     CodeOrigin m_origin;
     CCallHelpers::DataLabelPtr m_targetToCheck;
index ac87a3c..6dce3a5 100644 (file)
@@ -51,12 +51,15 @@ JSCallVarargs::JSCallVarargs(unsigned stackmapID, Node* node)
     , m_node(node)
     , m_callBase(
         (node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs)
-        ? CallLinkInfo::ConstructVarargs : CallLinkInfo::CallVarargs,
+        ? CallLinkInfo::ConstructVarargs : (node->op() == TailCallVarargs || node->op() == TailCallForwardVarargs)
+        ? CallLinkInfo::TailCallVarargs : CallLinkInfo::CallVarargs,
         node->origin.semantic)
     , m_instructionOffset(0)
 {
     ASSERT(
         node->op() == CallVarargs || node->op() == CallForwardVarargs
+        || node->op() == TailCallVarargsInlinedCaller || node->op() == TailCallForwardVarargsInlinedCaller
+        || node->op() == TailCallVarargs || node->op() == TailCallForwardVarargs
         || node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs);
 }
 
@@ -83,11 +86,15 @@ void JSCallVarargs::emit(CCallHelpers& jit, int32_t spillSlotsOffset)
     
     switch (m_node->op()) {
     case CallVarargs:
+    case TailCallVarargs:
+    case TailCallVarargsInlinedCaller:
     case ConstructVarargs:
         argumentsGPR = GPRInfo::argumentGPR1;
         thisGPR = GPRInfo::argumentGPR2;
         break;
     case CallForwardVarargs:
+    case TailCallForwardVarargs:
+    case TailCallForwardVarargsInlinedCaller:
     case ConstructForwardVarargs:
         thisGPR = GPRInfo::argumentGPR1;
         forwarding = true;
@@ -196,7 +203,7 @@ void JSCallVarargs::emit(CCallHelpers& jit, int32_t spillSlotsOffset)
     // Henceforth we make the call. The base FTL call machinery expects the callee in regT0 and for the
     // stack frame to already be set up, which it is.
     jit.store64(GPRInfo::regT0, CCallHelpers::calleeFrameSlot(JSStack::Callee));
-    
+
     m_callBase.emit(jit);
     
     // Undo the damage we've done.
diff --git a/Source/JavaScriptCore/ftl/FTLJSTailCall.cpp b/Source/JavaScriptCore/ftl/FTLJSTailCall.cpp
new file mode 100644 (file)
index 0000000..4347be0
--- /dev/null
@@ -0,0 +1,326 @@
+/*
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
+ */
+
+#include "config.h"
+#include "FTLJSTailCall.h"
+
+#if ENABLE(FTL_JIT)
+
+#include "CallFrameShuffler.h"
+#include "DFGNode.h"
+#include "FTLJITCode.h"
+#include "FTLLocation.h"
+#include "FTLStackMaps.h"
+#include "JSCJSValueInlines.h"
+#include "LinkBuffer.h"
+
+namespace JSC { namespace FTL {
+
+using namespace DFG;
+
+namespace {
+
+FTL::Location getRegisterWithAddend(const ExitValue& value, StackMaps::Record& record, StackMaps& stackmaps)
+{
+    if (value.kind() != ExitValueArgument)
+        return { };
+
+    auto location =
+        FTL::Location::forStackmaps(&stackmaps, record.locations[value.exitArgument().argument()]);
+
+    if (location.kind() != Location::Register || !location.addend())
+        return { };
+
+    RELEASE_ASSERT(location.isGPR());
+    return location;
+}
+
+ValueRecovery recoveryFor(const ExitValue& value, StackMaps::Record& record, StackMaps& stackmaps)
+{
+    switch (value.kind()) {
+    case ExitValueConstant:
+        return ValueRecovery::constant(value.constant());
+
+    case ExitValueArgument: {
+        auto location =
+            FTL::Location::forStackmaps(&stackmaps, record.locations[value.exitArgument().argument()]);
+        auto format = value.exitArgument().format();
+
+        switch (location.kind()) {
+        case Location::Register:
+            // We handle the addend outside
+            return ValueRecovery::inRegister(location.dwarfReg().reg(), format);
+
+        case Location::Indirect:
+            // Oh LLVM, you crazy...
+            RELEASE_ASSERT(location.dwarfReg().reg() == Reg(MacroAssembler::framePointerRegister));
+            RELEASE_ASSERT(!(location.offset() % sizeof(void*)));
+            return ValueRecovery::displacedInJSStack(VirtualRegister { static_cast<int>(location.offset() / sizeof(void*)) }, format);
+
+        case Location::Constant:
+            return ValueRecovery::constant(JSValue::decode(location.constant()));
+
+        default:
+            RELEASE_ASSERT_NOT_REACHED();
+        }
+    }
+
+    case ExitValueInJSStack:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatJS);
+
+    case ExitValueInJSStackAsInt32:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatInt32);
+
+    case ExitValueInJSStackAsInt52:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatInt52);
+
+    case ExitValueInJSStackAsDouble:
+        return ValueRecovery::displacedInJSStack(value.virtualRegister(), DataFormatDouble);
+
+    default:
+        RELEASE_ASSERT_NOT_REACHED();
+    }
+}
+
+// This computes an estimated size (in bits) for the sequence of
+// instructions required to load, box, and store a value of a given
+// type, assuming no spilling is required.
+uint32_t sizeFor(DataFormat format)
+{
+    switch (format) {
+    case DataFormatInt32:
+        // Boxing is zero-extending and tagging
+#if CPU(X86_64)
+        return 6 + sizeFor(DataFormatJS);
+#elif CPU(ARM64)
+        return 8 + sizeFor(DataFormatJS);
+#else
+        return sizeOfZeroExtend32 + sizeOfOrImm64 + sizeFor(DataFormatJS);
+#endif
+
+    case DataFormatInt52:
+        // Boxing is first a conversion to StrictInt52, then
+        // StrictInt52 boxing
+#if CPU(X86_64)
+        return 4 + sizeFor(DataFormatStrictInt52);
+#elif CPU(ARM64)
+        return 4 + sizeFor(DataFormatStrictInt52);
+#else
+        return sizeOfShiftImm32 + sizeFor(DataFormatStrictInt52);
+#endif
+
+    case DataFormatStrictInt52:
+        // Boxing is first a conversion to double, then double boxing
+#if CPU(X86_64)
+        return 8 + sizeFor(DataFormatDouble);
+#elif CPU(ARM64)
+        return 4 + sizeFor(DataFormatDouble);
+#else
+        return sizeOfConvertInt64ToDouble + sizeFor(DataFormatDouble);
+#endif
+
+    case DataFormatDouble:
+        // Boxing is purifying, moving to a GPR, and tagging
+#if CPU(X86_64)
+        return 38 + sizeFor(DataFormatJS);
+#elif CPU(ARM64)
+        return 28 + sizeFor(DataFormatJS);
+#else
+        return sizeOfPurifyNaN + sizeOfSubImm64 + sizeOfMoveDoubleTo64 + sizeFor(DataFormatJS);
+#endif
+
+    case DataFormatBoolean:
+        // Boxing is adding ValueFalse
+#if CPU(X86_64)
+        return 4 + sizeFor(DataFormatJS);
+#elif CPU(ARM64)
+        return 4 + sizeFor(DataFormatJS);
+#else
+        return sizeOfAddImm32 + sizeFor(DataFormatJS);
+#endif
+
+    case DataFormatJS:
+        // We will load (in a GPR or FPR) then store the value
+#if CPU(X86_64)
+        return 8;
+#elif CPU(ARM64)
+        return 8;
+#else
+        return sizeOfLoad + sizeOfStore;
+#endif
+
+    default:
+        RELEASE_ASSERT_NOT_REACHED();
+    }
+}
+
+} // anonymous namespace
+
+JSTailCall::JSTailCall(unsigned stackmapID, Node* node, Vector<ExitValue> arguments)
+    : JSCallBase(CallLinkInfo::TailCall, node->origin.semantic)
+    , m_stackmapID(stackmapID)
+    , m_arguments { WTF::move(arguments) }
+    , m_instructionOffset(0)
+{
+    ASSERT(node->op() == TailCall);
+    ASSERT(numArguments() == node->numChildren() - 1);
+
+    // Estimate the size of the inline cache, assuming that every
+    // value goes from the stack to the stack (in practice, this will
+    // seldom be true, giving us some amount of leeway) and that no
+    // spilling will occur (in practice, this will almost always be
+    // true).
+
+    // We first compute the new frame base and load the fp/lr
+    // registers final values. On debug builds, we also need to
+    // account for the fp-sp delta check (twice: fast and slow path).
+#if CPU(X86_64)
+    m_estimatedSize = 56;
+#if !ASSERT_DISABLED
+    m_estimatedSize += 26;
+#  endif
+#elif CPU(ARM64)
+    m_estimatedSize = 44;
+#if !ASSERT_DISABLED
+    m_estimatedSize += 24;
+#  endif
+#else
+    UNREACHABLE_FOR_PLATFORM();
+#endif
+
+    // Arguments will probably be loaded & stored twice (fast & slow)
+    for (ExitValue& arg : m_arguments)
+        m_estimatedSize += 2 * sizeFor(arg.dataFormat());
+
+    // We also have the slow path check, the two calls, and the
+    // CallLinkInfo load for the slow path
+#if CPU(X86_64)
+    m_estimatedSize += 55;
+#elif CPU(ARM64)
+    m_estimatedSize += 44;
+#else
+    m_estimatedSize += sizeOfCall + sizeOfJump + sizeOfLoad + sizeOfSlowPathCheck;
+#endif
+}
+
+void JSTailCall::emit(JITCode& jitCode, CCallHelpers& jit)
+{
+    StackMaps::Record* record { nullptr };
+    
+    for (unsigned i = jitCode.stackmaps.records.size(); i--;) {
+        record = &jitCode.stackmaps.records[i];
+        if (record->patchpointID == m_stackmapID)
+            break;
+    }
+
+    RELEASE_ASSERT(record->patchpointID == m_stackmapID);
+
+    m_callLinkInfo = jit.codeBlock()->addCallLinkInfo();
+
+    CallFrameShuffleData shuffleData;
+
+    // The callee was the first passed argument, and must be in a GPR because
+    // we used the "anyregcc" calling convention
+    auto calleeLocation =
+        FTL::Location::forStackmaps(nullptr, record->locations[0]);
+    GPRReg calleeGPR = calleeLocation.directGPR();
+    shuffleData.callee = ValueRecovery::inGPR(calleeGPR, DataFormatJS);
+
+    // The tag type number was the second argument, if there was one
+    auto tagTypeNumberLocation =
+        FTL::Location::forStackmaps(&jitCode.stackmaps, record->locations[1]);
+    if (tagTypeNumberLocation.isGPR() && !tagTypeNumberLocation.addend())
+        shuffleData.tagTypeNumber = tagTypeNumberLocation.directGPR();
+
+    shuffleData.args.grow(numArguments());
+    HashMap<Reg, Vector<std::pair<ValueRecovery*, int32_t>>> withAddend;
+    size_t numAddends { 0 };
+    for (size_t i = 0; i < numArguments(); ++i) {
+        shuffleData.args[i] = recoveryFor(m_arguments[i], *record, jitCode.stackmaps);
+        if (FTL::Location addend = getRegisterWithAddend(m_arguments[i], *record, jitCode.stackmaps)) {
+            withAddend.add(
+                addend.dwarfReg().reg(),
+                Vector<std::pair<ValueRecovery*, int32_t>>()).iterator->value.append(
+                    std::make_pair(&shuffleData.args[i], addend.addend()));
+            numAddends++;
+        }
+    }
+
+    numAddends = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numAddends);
+
+    shuffleData.numLocals = static_cast<int64_t>(jitCode.stackmaps.stackSizeForLocals()) / sizeof(void*) + numAddends;
+
+    ASSERT(!numAddends == withAddend.isEmpty());
+
+    if (!withAddend.isEmpty()) {
+        jit.subPtr(MacroAssembler::TrustedImm32(numAddends * sizeof(void*)), MacroAssembler::stackPointerRegister);
+        VirtualRegister spillBase { 1 - static_cast<int>(shuffleData.numLocals) };
+        for (auto entry : withAddend) {
+            for (auto pair : entry.value) {
+                ASSERT(numAddends > 0);
+                VirtualRegister spillSlot { spillBase + --numAddends };
+                ASSERT(entry.key.isGPR());
+                jit.addPtr(MacroAssembler::TrustedImm32(pair.second), entry.key.gpr());
+                jit.storePtr(entry.key.gpr(), CCallHelpers::addressFor(spillSlot));
+                jit.subPtr(MacroAssembler::TrustedImm32(pair.second), entry.key.gpr());
+                *pair.first = ValueRecovery::displacedInJSStack(spillSlot, pair.first->dataFormat());
+            }
+        }
+        ASSERT(numAddends < stackAlignmentRegisters());
+    }
+
+    shuffleData.args.resize(numArguments());
+    for (size_t i = 0; i < numArguments(); ++i)
+        shuffleData.args[i] = recoveryFor(m_arguments[i], *record, jitCode.stackmaps);
+
+    shuffleData.setupCalleeSaveRegisters(jit.codeBlock());
+
+    CCallHelpers::Jump slowPath = jit.branchPtrWithPatch(
+        CCallHelpers::NotEqual, calleeGPR, m_targetToCheck,
+        CCallHelpers::TrustedImmPtr(0));
+
+    m_callLinkInfo->setFrameShuffleData(shuffleData);
+    CallFrameShuffler(jit, shuffleData).prepareForTailCall();
+
+    m_fastCall = jit.nearTailCall();
+
+    slowPath.link(&jit);
+
+    CallFrameShuffler slowPathShuffler(jit, shuffleData);
+    slowPathShuffler.setCalleeJSValueRegs(JSValueRegs { GPRInfo::regT0 });
+    slowPathShuffler.prepareForSlowPath();
+
+    jit.move(CCallHelpers::TrustedImmPtr(m_callLinkInfo), GPRInfo::regT2);
+
+    m_slowCall = jit.nearCall();
+
+    jit.abortWithReason(JITDidReturnFromTailCall);
+
+    m_callLinkInfo->setUpCall(m_type, m_origin, calleeGPR);
+}
+
+} } // namespace JSC::FTL
+
+#endif // ENABLE(FTL_JIT)
diff --git a/Source/JavaScriptCore/ftl/FTLJSTailCall.h b/Source/JavaScriptCore/ftl/FTLJSTailCall.h
new file mode 100644 (file)
index 0000000..50b4f0c
--- /dev/null
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
+ */
+
+#ifndef FTLJSTailCall_h
+#define FTLJSTailCall_h
+
+#if ENABLE(FTL_JIT)
+
+#include "FTLExitArgumentList.h"
+#include "FTLExitValue.h"
+#include "FTLJSCallBase.h"
+
+namespace JSC {
+
+namespace DFG {
+struct Node;
+}
+
+namespace FTL {
+
+class JSTailCall : public JSCallBase {
+public:
+    JSTailCall(unsigned stackmapID, DFG::Node*, Vector<ExitValue> arguments);
+
+    void emit(JITCode&, CCallHelpers&);
+    
+    unsigned stackmapID() const { return m_stackmapID; }
+
+    unsigned estimatedSize() const { return m_estimatedSize; }
+
+    unsigned numArguments() const { return m_arguments.size(); }
+
+    bool operator<(const JSTailCall& other) const
+    {
+        return m_instructionOffset < other.m_instructionOffset;
+    }
+    
+private:
+    unsigned m_stackmapID;
+    Vector<ExitValue> m_arguments;
+    unsigned m_estimatedSize;
+
+public:
+    uint32_t m_instructionOffset;
+};
+
+} } // namespace JSC::FTL
+
+#endif // ENABLE(FTL_JIT)
+
+#endif // FTLJSTailCall_h
+
index 72718bb..1dec974 100644 (file)
@@ -120,7 +120,9 @@ public:
         return u.constant;
     }
     
-    bool operator!() const { return kind() == Unprocessed && !u.variable.offset; }
+    explicit operator bool() const { return kind() != Unprocessed || u.variable.offset; }
+
+    bool operator!() const { return !static_cast<bool>(*this); }
     
     bool isHashTableDeletedValue() const { return kind() == Unprocessed && u.variable.offset; }
     
index 398ba57..986b435 100644 (file)
@@ -173,7 +173,11 @@ public:
             for (Node* node : *block) {
                 switch (node->op()) {
                 case CallVarargs:
+                case TailCallVarargs:
+                case TailCallVarargsInlinedCaller:
                 case CallForwardVarargs:
+                case TailCallForwardVarargs:
+                case TailCallForwardVarargsInlinedCaller:
                 case ConstructVarargs:
                 case ConstructForwardVarargs:
                     hasVarargs = true;
@@ -723,11 +727,19 @@ private:
             compileLogicalNot();
             break;
         case Call:
+        case TailCallInlinedCaller:
         case Construct:
             compileCallOrConstruct();
             break;
+        case TailCall:
+            compileTailCall();
+            break;
         case CallVarargs:
         case CallForwardVarargs:
+        case TailCallVarargs:
+        case TailCallVarargsInlinedCaller:
+        case TailCallForwardVarargs:
+        case TailCallForwardVarargsInlinedCaller:
         case ConstructVarargs:
         case ConstructForwardVarargs:
             compileCallOrConstructVarargs();
@@ -4400,6 +4412,41 @@ private:
         
         setJSValue(call);
     }
+
+    void compileTailCall()
+    {
+        int numArgs = m_node->numChildren() - 1;
+        ExitArgumentList exitArguments;
+        exitArguments.reserveCapacity(numArgs + 6);
+
+        unsigned stackmapID = m_stackmapIDs++;
+        exitArguments.append(lowJSValue(m_graph.varArgChild(m_node, 0)));
+        exitArguments.append(m_tagTypeNumber);
+
+        Vector<ExitValue> callArguments(numArgs);
+
+        bool needsTagTypeNumber { false };
+        for (int i = 0; i < numArgs; ++i) {
+            callArguments[i] =
+                exitValueForTailCall(exitArguments, m_graph.varArgChild(m_node, 1 + i).node());
+            if (callArguments[i].dataFormat() == DataFormatInt32)
+                needsTagTypeNumber = true;
+        }
+
+        JSTailCall tailCall(stackmapID, m_node, WTF::move(callArguments));
+
+        exitArguments.insert(0, m_out.constInt32(needsTagTypeNumber ? 2 : 1));
+        exitArguments.insert(0, constNull(m_out.ref8));
+        exitArguments.insert(0, m_out.constInt32(tailCall.estimatedSize()));
+        exitArguments.insert(0, m_out.constInt64(stackmapID));
+
+        LValue call =
+            m_out.call(m_out.patchpointVoidIntrinsic(), exitArguments);
+        setInstructionCallingConvention(call, LLVMAnyRegCallConv);
+        m_out.unreachable();
+
+        m_ftlState.jsTailCalls.append(tailCall);
+    }
     
     void compileCallOrConstructVarargs()
     {
@@ -4410,10 +4457,14 @@ private:
         
         switch (m_node->op()) {
         case CallVarargs:
+        case TailCallVarargs:
+        case TailCallVarargsInlinedCaller:
         case ConstructVarargs:
             jsArguments = lowJSValue(m_node->child2());
             break;
         case CallForwardVarargs:
+        case TailCallForwardVarargs:
+        case TailCallForwardVarargsInlinedCaller:
         case ConstructForwardVarargs:
             break;
         default:
@@ -4440,8 +4491,16 @@ private:
         setInstructionCallingConvention(call, LLVMCCallConv);
         
         m_ftlState.jsCallVarargses.append(JSCallVarargs(stackmapID, m_node));
-        
-        setJSValue(call);
+
+        switch (m_node->op()) {
+        case TailCallVarargs:
+        case TailCallForwardVarargs:
+            m_out.unreachable();
+            break;
+
+        default:
+            setJSValue(call);
+        }
     }
     
     void compileLoadVarargs()
@@ -8256,7 +8315,14 @@ private:
     }
     void callPreflight()
     {
-        callPreflight(m_node->origin.semantic);
+        CodeOrigin codeOrigin = m_node->origin.semantic;
+
+        if (m_node->op() == TailCallInlinedCaller
+            || m_node->op() == TailCallVarargsInlinedCaller
+            || m_node->op() == TailCallForwardVarargsInlinedCaller)
+            codeOrigin =*codeOrigin.inlineCallFrame->getCallerSkippingDeadFrames();
+
+        callPreflight(codeOrigin);
     }
     
     void callCheck()
@@ -8527,13 +8593,46 @@ private:
         DFG_CRASH(m_graph, m_node, toCString("Cannot find value for node: ", node).data());
         return ExitValue::dead();
     }
-    
+
     ExitValue exitArgument(ExitArgumentList& arguments, DataFormat format, LValue value)
     {
         ExitValue result = ExitValue::exitArgument(ExitArgument(format, arguments.size()));
         arguments.append(value);
         return result;
     }
+
+    ExitValue exitValueForTailCall(ExitArgumentList& arguments, Node* node)
+    {
+        ASSERT(node->shouldGenerate());
+        ASSERT(node->hasResult());
+
+        switch (node->op()) {
+        case JSConstant:
+        case Int52Constant:
+        case DoubleConstant:
+            return ExitValue::constant(node->asJSValue());
+
+        default:
+            break;
+        }
+
+        LoweredNodeValue value = m_jsValueValues.get(node);
+        if (isValid(value))
+            return exitArgument(arguments, DataFormatJS, value.value());
+
+        value = m_int32Values.get(node);
+        if (isValid(value))
+            return exitArgument(arguments, DataFormatInt32, value.value());
+
+        value = m_booleanValues.get(node);
+        if (isValid(value)) {
+            LValue valueToPass = m_out.zeroExt(value.value(), m_out.int32);
+            return exitArgument(arguments, DataFormatBoolean, valueToPass);
+        }
+
+        // Doubles and Int52 have been converted by ValueRep()
+        DFG_CRASH(m_graph, m_node, toCString("Cannot find value for node: ", node).data());
+    }
     
     bool doesKill(Edge edge)
     {
index 42fb9d2..f8414ec 100644 (file)
@@ -37,6 +37,7 @@
 #include "FTLJITFinalizer.h"
 #include "FTLJSCall.h"
 #include "FTLJSCallVarargs.h"
+#include "FTLJSTailCall.h"
 #include "FTLStackMaps.h"
 #include "FTLState.h"
 #include <wtf/Noncopyable.h>
@@ -79,6 +80,7 @@ public:
     SegmentedVector<CheckInDescriptor> checkIns;
     Vector<JSCall> jsCalls;
     Vector<JSCallVarargs> jsCallVarargses;
+    Vector<JSTailCall> jsTailCalls;
     Vector<CString> codeSectionNames;
     Vector<CString> dataSectionNames;
     void* unwindDataSection;
index eccedd9..8e3c270 100644 (file)
@@ -338,7 +338,7 @@ AssemblyHelpers::Jump AssemblyHelpers::emitExceptionCheck(ExceptionCheckKind kin
     
     if (width == NormalJumpWidth)
         return result;
-    
+
     PatchableJump realJump = patchableJump();
     result.link(this);
     
index 44802e8..4610c5e 100644 (file)
@@ -39,6 +39,7 @@ struct CallFrameShuffleData {
     Vector<ValueRecovery> args;
 #if USE(JSVALUE64)
     RegisterMap<ValueRecovery> registers;
+    GPRReg tagTypeNumber { InvalidGPRReg };
 
     void setupCalleeSaveRegisters(CodeBlock*);
 #endif
index 75edeb5..0afb591 100644 (file)
@@ -71,6 +71,10 @@ CallFrameShuffler::CallFrameShuffler(CCallHelpers& jit, const CallFrameShuffleDa
         else
             addNew(reg.fpr(), data.registers[reg]);
     }
+
+    m_tagTypeNumber = data.tagTypeNumber;
+    if (m_tagTypeNumber != InvalidGPRReg)
+        lockGPR(m_tagTypeNumber);
 #endif
 }
 
@@ -80,12 +84,12 @@ void CallFrameShuffler::dump(PrintStream& out) const
     static const char* dangerDelimiter       = " X-------------------------------X ";
     static const char* dangerBoundsDelimiter = " XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX ";
     static const char* emptySpace            = "                                   ";
-    ASSERT(m_alignedNewFrameSize <= numLocals());
     out.print("          ");
     out.print("           Old frame               ");
     out.print("           New frame               ");
     out.print("\n");
-    for (int i = 0; i < m_alignedOldFrameSize + numLocals() + 3; ++i) {
+    int totalSize = m_alignedOldFrameSize + std::max(numLocals(), m_alignedNewFrameSize) + 3;
+    for (int i = 0; i < totalSize; ++i) {
         VirtualRegister old { m_alignedOldFrameSize - i - 1 };
         VirtualRegister newReg { old + m_frameDelta };
 
@@ -204,6 +208,10 @@ void CallFrameShuffler::dump(PrintStream& out) const
         out.print("   Old frame offset is ", m_oldFrameOffset, "\n");
     if (m_newFrameOffset)
         out.print("   New frame offset is ", m_newFrameOffset, "\n");
+#if USE(JSVALUE64)
+    if (m_tagTypeNumber != InvalidGPRReg)
+        out.print("   TagTypeNumber is currently in ", m_tagTypeNumber, "\n");
+#endif
 }
 
 CachedRecovery* CallFrameShuffler::getCachedRecovery(ValueRecovery recovery)
@@ -247,17 +255,26 @@ void CallFrameShuffler::spill(CachedRecovery& cachedRecovery)
     ASSERT(cachedRecovery.recovery().isInRegisters());
 
     VirtualRegister spillSlot { 0 };
-    for (VirtualRegister slot = firstOld(); slot <= lastOld(); slot -= 1) {
-        ASSERT(slot < newAsOld(firstNew()));
+    for (VirtualRegister slot = firstOld(); slot <= lastOld(); slot += 1) {
+        if (slot >= newAsOld(firstNew()))
+            break;
+
         if (getOld(slot))
             continue;
 
         spillSlot = slot;
         break;
     }
-    // We must have enough slots to be able to fit the whole
-    // callee's frame for the slow path.
-    RELEASE_ASSERT(spillSlot.isLocal());
+    // We must have enough slots to be able to fit the whole callee's
+    // frame for the slow path - unless we are in the FTL. In that
+    // case, we are allowed to extend the frame *once*, since we are
+    // guaranteed to have enough available space for that.
+    if (spillSlot >= newAsOld(firstNew()) || !spillSlot.isLocal()) {
+        RELEASE_ASSERT(!m_didExtendFrame);
+        extendFrameIfNeeded();
+        spill(cachedRecovery);
+        return;
+    }
 
     if (verbose)
         dataLog("   * Spilling ", cachedRecovery.recovery(), " into ", spillSlot, "\n");
@@ -286,6 +303,38 @@ void CallFrameShuffler::emitDeltaCheck()
         dataLog("  Skipping the fp-sp delta check since there is too much pressure");
 }
 
+void CallFrameShuffler::extendFrameIfNeeded()
+{
+    ASSERT(!m_didExtendFrame);
+    ASSERT(!isUndecided());
+
+    VirtualRegister firstRead { firstOld() };
+    for (; firstRead <= virtualRegisterForLocal(0); firstRead += 1) {
+        if (getOld(firstRead))
+            break;
+    }
+    size_t availableSize = static_cast<size_t>(firstRead.offset() - firstOld().offset());
+    size_t wantedSize = m_newFrame.size() + m_newFrameOffset;
+
+    if (availableSize < wantedSize) {
+        size_t delta = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), wantedSize - availableSize);
+        m_oldFrame.grow(m_oldFrame.size() + delta);
+        for (size_t i = 0; i < delta; ++i)
+            m_oldFrame[m_oldFrame.size() - i - 1] = nullptr;
+        m_jit.subPtr(MacroAssembler::TrustedImm32(delta * sizeof(Register)), MacroAssembler::stackPointerRegister);
+
+        if (isSlowPath())
+            m_frameDelta = numLocals() + JSStack::CallerFrameAndPCSize;
+        else
+            m_oldFrameOffset = numLocals();
+
+        if (verbose)
+            dataLogF("  Not enough space - extending the old frame %zu slot\n", delta);
+    }
+
+    m_didExtendFrame = true;
+}
+
 void CallFrameShuffler::prepareForSlowPath()
 {
     ASSERT(isUndecided());
@@ -296,7 +345,15 @@ void CallFrameShuffler::prepareForSlowPath()
     m_newFrameOffset = -JSStack::CallerFrameAndPCSize;
 
     if (verbose)
-        dataLog("\n\nPreparing frame for slow path call:\n", *this);
+        dataLog("\n\nPreparing frame for slow path call:\n");
+
+    // When coming from the FTL, we need to extend the frame. In other
+    // cases, we may end up extending the frame if we previously
+    // spilled things (e.g. in polymorphic cache).
+    extendFrameIfNeeded();
+
+    if (verbose)
+        dataLog(*this);
 
     prepareAny();
 
@@ -646,6 +703,11 @@ void CallFrameShuffler::prepareAny()
         ASSERT_UNUSED(writesOK, writesOK);
     }
 
+#if USE(JSVALUE64)
+    if (m_tagTypeNumber != InvalidGPRReg && m_newRegisters[m_tagTypeNumber])
+        releaseGPR(m_tagTypeNumber);
+#endif
+
     // Handle 2) by loading all registers. We don't have to do any
     // writes, since they have been taken care of above.
     if (verbose)
@@ -660,6 +722,11 @@ void CallFrameShuffler::prepareAny()
         ASSERT(cachedRecovery->targets().isEmpty());
     }
 
+#if USE(JSVALUE64)
+    if (m_tagTypeNumber != InvalidGPRReg)
+        releaseGPR(m_tagTypeNumber);
+#endif
+
     // At this point, we have read everything we cared about from the
     // stack, and written everything we had to to the stack.
     if (verbose)
index 4ba41b2..c993e8c 100644 (file)
@@ -73,6 +73,21 @@ public:
         m_lockedRegisters.clear(gpr);
     }
 
+    void restoreGPR(GPRReg gpr)
+    {
+        if (!m_newRegisters[gpr])
+            return;
+
+        ensureGPR();
+#if USE(JSVALUE32_64)
+        GPRReg tempGPR { getFreeGPR() };
+        lockGPR(tempGPR);
+        ensureGPR();
+        releaseGPR(tempGPR);
+#endif
+        emitDisplace(*m_newRegisters[gpr]);
+    }
+
     // You can only take a snapshot if the recovery has not started
     // yet. The only operations that are valid before taking a
     // snapshot are lockGPR(), acquireGPR() and releaseGPR().
@@ -309,6 +324,10 @@ private:
         return reg >= firstOld() && reg <= lastOld();
     }
 
+    bool m_didExtendFrame { false };
+
+    void extendFrameIfNeeded();
+
     // This stores, for each slot in the new frame, information about
     // the recovery for the value that should eventually go into that
     // slot.
@@ -385,13 +404,19 @@ private:
     // We also use this to lock registers temporarily, for instance to
     // ensure that we have at least 2 available registers for loading
     // a pair on 32bits.
-    RegisterSet m_lockedRegisters;
+    mutable RegisterSet m_lockedRegisters;
 
     // This stores the current recoveries present in registers. A null
     // CachedRecovery means we can trash the current value as we don't
     // care about it. 
     RegisterMap<CachedRecovery*> m_registers;
 
+#if USE(JSVALUE64)
+    mutable GPRReg m_tagTypeNumber;
+
+    bool tryAcquireTagTypeNumber();
+#endif
+
     // This stores, for each register, information about the recovery
     // for the value that should eventually go into that register. The
     // only registers that have a target recovery will be callee-save
@@ -421,9 +446,26 @@ private:
                     nonTemp = reg;
             }
         }
+
+#if USE(JSVALUE64)
+        if (!nonTemp && m_tagTypeNumber != InvalidGPRReg && check(Reg { m_tagTypeNumber })) {
+            ASSERT(m_lockedRegisters.get(m_tagTypeNumber));
+            m_lockedRegisters.clear(m_tagTypeNumber);
+            nonTemp = Reg { m_tagTypeNumber };
+            m_tagTypeNumber = InvalidGPRReg;
+        }
+#endif
         return nonTemp;
     }
 
+    GPRReg getFreeTempGPR() const
+    {
+        Reg freeTempGPR { getFreeRegister([this] (Reg reg) { return reg.isGPR() && !m_newRegisters[reg]; }) };
+        if (!freeTempGPR)
+            return InvalidGPRReg;
+        return freeTempGPR.gpr();
+    }
+
     GPRReg getFreeGPR() const
     {
         Reg freeGPR { getFreeRegister([] (Reg reg) { return reg.isGPR(); }) };
@@ -519,6 +561,31 @@ private:
             });
     }
 
+    void ensureTempGPR()
+    {
+        if (getFreeTempGPR() != InvalidGPRReg)
+            return;
+
+        if (verbose)
+            dataLog("  Finding a temp GPR to spill\n");
+        ensureRegister(
+            [this] (const CachedRecovery& cachedRecovery) {
+                if (cachedRecovery.recovery().isInGPR()) {
+                    return !m_lockedRegisters.get(cachedRecovery.recovery().gpr()) 
+                        && !m_newRegisters[cachedRecovery.recovery().gpr()];
+                }
+#if USE(JSVALUE32_64)
+                if (cachedRecovery.recovery().technique() == InPair) {
+                    return !m_lockedRegisters.get(cachedRecovery.recovery().tagGPR())
+                        && !m_lockedRegisters.get(cachedRecovery.recovery().payloadGPR())
+                        && !m_newRegisters[cachedRecovery.recovery().tagGPR()]
+                        && !m_newRegisters[cachedRecovery.recovery().payloadGPR()];
+                }
+#endif
+                return false;
+            });
+    }
+
     void ensureGPR()
     {
         if (getFreeGPR() != InvalidGPRReg)
@@ -573,16 +640,24 @@ private:
     {
         ASSERT(jsValueRegs && !getNew(jsValueRegs));
         CachedRecovery* cachedRecovery = addCachedRecovery(recovery);
-        ASSERT(!cachedRecovery->wantedJSValueRegs());
-        cachedRecovery->setWantedJSValueRegs(jsValueRegs);
 #if USE(JSVALUE64)
+        if (cachedRecovery->wantedJSValueRegs())
+            m_newRegisters[cachedRecovery->wantedJSValueRegs().gpr()] = nullptr;
         m_newRegisters[jsValueRegs.gpr()] = cachedRecovery;
 #else
+        if (JSValueRegs oldRegs { cachedRecovery->wantedJSValueRegs() }) {
+            if (oldRegs.payloadGPR())
+                m_newRegisters[oldRegs.payloadGPR()] = nullptr;
+            if (oldRegs.tagGPR())
+                m_newRegisters[oldRegs.tagGPR()] = nullptr;
+        }
         if (jsValueRegs.payloadGPR() != InvalidGPRReg)
             m_newRegisters[jsValueRegs.payloadGPR()] = cachedRecovery;
         if (jsValueRegs.tagGPR() != InvalidGPRReg)
             m_newRegisters[jsValueRegs.tagGPR()] = cachedRecovery;
 #endif
+        ASSERT(!cachedRecovery->wantedJSValueRegs());
+        cachedRecovery->setWantedJSValueRegs(jsValueRegs);
     }
 
     void addNew(FPRReg fpr, ValueRecovery recovery)
index 31813cd..8039b3b 100644 (file)
@@ -87,9 +87,15 @@ void CallFrameShuffler::emitBox(CachedRecovery& cachedRecovery)
             m_jit.zeroExtend32ToPtr(
                 cachedRecovery.recovery().gpr(),
                 cachedRecovery.recovery().gpr());
-            // We have to do this the hard way.
-            m_jit.or64(MacroAssembler::TrustedImm64(TagTypeNumber),
-                cachedRecovery.recovery().gpr());
+            m_lockedRegisters.set(cachedRecovery.recovery().gpr());
+            if (tryAcquireTagTypeNumber())
+                m_jit.or64(m_tagTypeNumber, cachedRecovery.recovery().gpr());
+            else {
+                // We have to do this the hard way
+                m_jit.or64(MacroAssembler::TrustedImm64(TagTypeNumber),
+                    cachedRecovery.recovery().gpr());
+            }
+            m_lockedRegisters.clear(cachedRecovery.recovery().gpr());
             cachedRecovery.setRecovery(
                 ValueRecovery::inGPR(cachedRecovery.recovery().gpr(), DataFormatJS));
             if (verbose)
@@ -141,7 +147,12 @@ void CallFrameShuffler::emitBox(CachedRecovery& cachedRecovery)
             ASSERT(resultGPR != InvalidGPRReg);
             m_jit.purifyNaN(cachedRecovery.recovery().fpr());
             m_jit.moveDoubleTo64(cachedRecovery.recovery().fpr(), resultGPR);
-            m_jit.sub64(MacroAssembler::TrustedImm64(TagTypeNumber), resultGPR);
+            m_lockedRegisters.set(resultGPR);
+            if (tryAcquireTagTypeNumber())
+                m_jit.sub64(m_tagTypeNumber, resultGPR);
+            else
+                m_jit.sub64(MacroAssembler::TrustedImm64(TagTypeNumber), resultGPR);
+            m_lockedRegisters.clear(resultGPR);
             updateRecovery(cachedRecovery, ValueRecovery::inGPR(resultGPR, DataFormatJS));
             if (verbose)
                 dataLog(" into ", cachedRecovery.recovery(), "\n");
@@ -337,6 +348,21 @@ void CallFrameShuffler::emitDisplace(CachedRecovery& cachedRecovery)
 
     ASSERT(m_registers[wantedReg] == &cachedRecovery);
 }
+    
+bool CallFrameShuffler::tryAcquireTagTypeNumber()
+{
+    if (m_tagTypeNumber != InvalidGPRReg)
+        return true;
+
+    m_tagTypeNumber = getFreeGPR();
+
+    if (m_tagTypeNumber == InvalidGPRReg)
+        return false;
+
+    m_lockedRegisters.set(m_tagTypeNumber);
+    m_jit.move(MacroAssembler::TrustedImm64(TagTypeNumber), m_tagTypeNumber);
+    return true;
+}
 
 } // namespace JSC
 
index 3ffe75d..d406d5b 100644 (file)
@@ -193,6 +193,7 @@ void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned ca
 
     if (opcodeID == op_tail_call) {
         CallFrameShuffleData shuffleData;
+        shuffleData.tagTypeNumber = GPRInfo::tagTypeNumberRegister;
         shuffleData.numLocals =
             instruction[4].u.operand - sizeof(CallerFrameAndPC) / sizeof(Register);
         shuffleData.args.resize(instruction[3].u.operand);
index 9f6ad1c..7ec2fa5 100644 (file)
@@ -55,6 +55,11 @@ public:
         : m_index(invalid())
     {
     }
+
+    Reg(WTF::HashTableDeletedValueType)
+        : m_index(deleted())
+    {
+    }
     
     Reg(MacroAssembler::RegisterID reg)
         : m_index(MacroAssembler::registerIndex(reg))
@@ -102,6 +107,8 @@ public:
     bool isSet() const { return m_index != invalid(); }
     bool operator!() const { return !isSet(); }
     explicit operator bool() const { return isSet(); }
+
+    bool isHashTableDeletedValue() const { return m_index == deleted(); }
     
     bool isGPR() const
     {
@@ -165,12 +172,34 @@ public:
 
 private:
     static uint8_t invalid() { return 0xff; }
+
+    static uint8_t deleted() { return 0xfe; }
     
     uint8_t m_index;
 };
 
+struct RegHash {
+    static unsigned hash(const Reg& key) { return key.hash(); }
+    static bool equal(const Reg& a, const Reg& b) { return a == b; }
+    static const bool safeToCompareToEmptyOrDeleted = true;
+};
+
 } // namespace JSC
 
+namespace WTF {
+
+template<typename T> struct DefaultHash;
+template<> struct DefaultHash<JSC::Reg> {
+    typedef JSC::RegHash Hash;
+};
+
+template<typename T> struct HashTraits;
+template<> struct HashTraits<JSC::Reg> : SimpleClassHashTraits<JSC::Reg> {
+    static const bool emptyValueIsZero = false;
+ };
+
+} // namespace WTF
+
 #endif // ENABLE(JIT)
 
 #endif // Reg_h
index 34f7251..bc258cb 100644 (file)
@@ -753,7 +753,7 @@ macro prepareForTailCall(callee, temp1, temp2, temp3)
     addi StackAlignment - 1 + CallFrameHeaderSize, temp2
     andi ~StackAlignmentMask, temp2
 
-    if ARM or SH4 or ARM64 or C_LOOP or MIPS
+    if ARM or ARMv7_TRADITIONAL or ARMv7 or SH4 or ARM64 or C_LOOP or MIPS
         addp 2 * PtrSize, sp
         subi 2 * PtrSize, temp2
         loadp PtrSize[cfr], lr
index 3ba65f0..cb292f6 100644 (file)
@@ -128,7 +128,7 @@ typedef const char* optionString;
     v(bool, forceProfilerBytecodeGeneration, false, nullptr) \
     \
     v(bool, enableFunctionDotArguments, true, nullptr) \
-    v(bool, enableTailCalls, false, nullptr) \
+    v(bool, enableTailCalls, true, nullptr) \
     \
     /* showDisassembly implies showDFGDisassembly. */ \
     v(bool, showDisassembly, false, "dumps disassembly of all JIT compiled code upon compilation") \
index d72bd21..52f0d7b 100644 (file)
 - path: es6/Promise_Promise[Symbol.species].js
   cmd: runES6 :fail
 - path: es6/proper_tail_calls_tail_call_optimisation_direct_recursion.js
-  cmd: runES6 :fail
+  cmd: runES6 :normal
 - path: es6/proper_tail_calls_tail_call_optimisation_mutual_recursion.js
-  cmd: runES6 :fail
+  cmd: runES6 :normal
 - path: es6/prototype_of_bound_functions_arrow_functions.js
   cmd: runES6 :fail
 - path: es6/prototype_of_bound_functions_basic_functions.js
diff --git a/Source/JavaScriptCore/tests/stress/dfg-tail-calls.js b/Source/JavaScriptCore/tests/stress/dfg-tail-calls.js
new file mode 100644 (file)
index 0000000..ef51c0c
--- /dev/null
@@ -0,0 +1,56 @@
+(function nonInlinedTailCall() {
+    function callee() { if (callee.caller != nonInlinedTailCall) throw new Error(); }
+    noInline(callee);
+
+    function caller() { "use strict"; return callee(); }
+
+    for (var i = 0; i < 10000; ++i)
+        caller();
+
+    function loop(n) { "use strict"; if (n > 0) return loop(n - 1); }
+    noInline(loop);
+
+    loop(1000000);
+})();
+
+(function inlinedTailCall() {
+    function callee() { if (callee.caller != inlinedTailCall) throw new Error(); }
+    function caller() { "use strict"; return callee(); }
+
+    for (var i = 0; i < 10000; ++i)
+        caller();
+
+    function loop(n) { "use strict"; if (n > 0) return loop(n - 1); }
+
+    loop(1000000);
+})();
+
+(function nonInlinedEmulatedTailCall() {
+    function emulator() { caller(); }
+    function callee() { if (callee.caller != emulator) throw new Error(); }
+    noInline(callee);
+    function caller() { "use strict"; return callee(); }
+
+    for (var i = 0; i < 10000; ++i)
+        emulator();
+
+    function pad(n) { "use strict"; return loop(n); }
+    function loop(n) { "use strict"; if (n > 0) return pad(n - 1); }
+    noInline(loop);
+
+    loop(1000000);
+})();
+
+(function inlinedEmulatedTailCall() {
+    function emulator() { caller(); }
+    function callee() { if (callee.caller != emulator) throw new Error(); }
+    function caller() { "use strict"; return callee(); }
+
+    for (var i = 0; i < 10000; ++i)
+        emulator();
+
+    function pad(n) { "use strict"; return loop(n); }
+    function loop(n) { "use strict"; if (n > 0) return pad(n - 1); }
+
+    loop(1000000);
+})();
diff --git a/Source/JavaScriptCore/tests/stress/mutual-tail-call-no-stack-overflow.js b/Source/JavaScriptCore/tests/stress/mutual-tail-call-no-stack-overflow.js
new file mode 100644 (file)
index 0000000..159c8cf
--- /dev/null
@@ -0,0 +1,71 @@
+function shouldThrow(func, errorMessage) {
+    var errorThrown = false;
+    var error = null;
+    try {
+        func();
+    } catch (e) {
+        errorThrown = true;
+        error = e;
+    }
+    if (!errorThrown)
+        throw new Error('not thrown');
+    if (String(error) !== errorMessage)
+        throw new Error(`bad error: ${String(error)}`);
+}
+
+function sloppyCountdown(n) {
+    function even(n) {
+        if (n == 0)
+            return n;
+        return odd(n - 1);
+    }
+
+    function odd(n) {
+        if (n == 1)
+            return n;
+        return even(n - 1);
+    }
+
+    if (n % 2 === 0)
+        return even(n);
+    else
+        return odd(n);
+}
+
+function strictCountdown(n) {
+    "use strict";
+
+    function even(n) {
+        if (n == 0)
+            return n;
+        return odd(n - 1);
+    }
+
+    function odd(n) {
+        if (n == 1)
+            return n;
+        return even(n - 1);
+    }
+
+    if (n % 2 === 0)
+        return even(n);
+    else
+        return odd(n);
+}
+
+shouldThrow(function () { sloppyCountdown(100000); }, "RangeError: Maximum call stack size exceeded.");
+strictCountdown(100000);
+
+// Parity alterning
+function odd(n) {
+    "use strict";
+    if (n > 0)
+        return even(n, 0);
+}
+
+function even(n) {
+    "use strict";
+    return odd(n - 1);
+}
+
+odd(100000);
diff --git a/Source/JavaScriptCore/tests/stress/tail-call-no-stack-overflow.js b/Source/JavaScriptCore/tests/stress/tail-call-no-stack-overflow.js
new file mode 100644 (file)
index 0000000..b812737
--- /dev/null
@@ -0,0 +1,45 @@
+function shouldThrow(func, errorMessage) {
+    var errorThrown = false;
+    var error = null;
+    try {
+        func();
+    } catch (e) {
+        errorThrown = true;
+        error = e;
+    }
+    if (!errorThrown)
+        throw new Error('not thrown');
+    if (String(error) !== errorMessage)
+        throw new Error(`bad error: ${String(error)}`);
+}
+
+function sloppyLoop(n) {
+    if (n > 0)
+        return sloppyLoop(n - 1);
+}
+
+function strictLoop(n) {
+    "use strict";
+    if (n > 0)
+        return strictLoop(n - 1);
+}
+
+// We have two of these so that we can test different stack alignments
+function strictLoopArityFixup1(n, dummy) {
+    "use strict";
+    if (n > 0)
+        return strictLoopArityFixup1(n - 1);
+}
+
+function strictLoopArityFixup2(n, dummy1, dummy2) {
+    "use strict";
+    if (n > 0)
+        return strictLoopArityFixup2(n - 1);
+}
+
+shouldThrow(function () { sloppyLoop(100000); }, 'RangeError: Maximum call stack size exceeded.');
+
+// These should not throw
+strictLoop(100000);
+strictLoopArityFixup1(1000000);
+strictLoopArityFixup2(1000000);
diff --git a/Source/JavaScriptCore/tests/stress/tail-call-recognize.js b/Source/JavaScriptCore/tests/stress/tail-call-recognize.js
new file mode 100644 (file)
index 0000000..d4fbe5e
--- /dev/null
@@ -0,0 +1,178 @@
+function callerMustBeRun() {
+    if (!Object.is(callerMustBeRun.caller, runTests))
+        throw Error("Wrong caller, expected run but got ", callerMustBeRun.caller);
+}
+
+function callerMustBeStrict() {
+    var errorThrown = false;
+    try {
+        callerMustBeStrict.caller;
+    } catch (e) {
+        errorThrown = true;
+    }
+    if (!errorThrown)
+        throw Error("Wrong caller, expected strict caller but got ", callerMustBeStrict.caller);
+}
+
+function runTests() {
+    // Statement tests
+    (function simpleTailCall() {
+        "use strict";
+        return callerMustBeRun();
+    })();
+
+    (function noTailCallInTry() {
+        "use strict";
+        try {
+            return callerMustBeStrict();
+        } catch (e) {
+            throw e;
+        }
+    })();
+
+    (function tailCallInCatch() {
+        "use strict";
+        try { } catch (e) { return callerMustBeRun(); }
+    })();
+
+    (function tailCallInFinally() {
+        "use strict";
+        try { } finally { return callerMustBeRun(); }
+    })();
+
+    (function tailCallInFinallyWithCatch() {
+        "use strict";
+        try { } catch (e) { } finally { return callerMustBeRun(); }
+    })();
+
+    (function tailCallInFinallyWithCatchTaken() {
+        "use strict";
+        try { throw null; } catch (e) { } finally { return callerMustBeRun(); }
+    })();
+
+    (function noTailCallInCatchIfFinally() {
+        "use strict";
+        try { throw null; } catch (e) { return callerMustBeStrict(); } finally { }
+    })();
+
+    (function tailCallInFor() {
+        "use strict";
+        for (var i = 0; i < 10; ++i)
+            return callerMustBeRun();
+    })();
+
+    (function tailCallInWhile() {
+        "use strict";
+        while (true)
+            return callerMustBeRun();
+    })();
+
+    (function tailCallInDoWhile() {
+        "use strict";
+        do
+            return callerMustBeRun();
+        while (true);
+    })();
+
+    (function noTailCallInForIn() {
+        "use strict";
+        for (var x in [1, 2])
+            return callerMustBeStrict();
+    })();
+
+    (function noTailCallInForOf() {
+        "use strict";
+        for (var x of [1, 2])
+            return callerMustBeStrict();
+    })();
+
+    (function tailCallInIf() {
+        "use strict";
+        if (true)
+            return callerMustBeRun();
+    })();
+
+    (function tailCallInElse() {
+        "use strict";
+        if (false) throw new Error("WTF");
+        else return callerMustBeRun();
+    })();
+
+    (function tailCallInSwitchCase() {
+        "use strict";
+        switch (0) {
+        case 0: return callerMustBeRun();
+        }
+    })();
+
+    (function tailCallInSwitchDefault() {
+        "use strict";
+        switch (0) {
+        default: return callerMustBeRun();
+        }
+    })();
+
+    (function tailCallWithLabel() {
+        "use strict";
+        dummy: return callerMustBeRun();
+    })();
+
+    // Expression tests, we don't enumerate all the cases where there
+    // *shouldn't* be a tail call
+
+    (function tailCallComma() {
+        "use strict";
+        return callerMustBeStrict(), callerMustBeRun();
+    })();
+
+    (function tailCallTernaryLeft() {
+        "use strict";
+        return true ? callerMustBeRun() : unreachable();
+    })();
+
+    (function tailCallTernaryRight() {
+        "use strict";
+        return false ? unreachable() : callerMustBeRun();
+    })();
+
+    (function tailCallLogicalAnd() {
+        "use strict";
+        return true && callerMustBeRun();
+    })();
+
+    (function tailCallLogicalOr() {
+        "use strict";
+        return false || callerMustBeRun();
+    })();
+
+    (function memberTailCall() {
+        "use strict";
+        return { f: callerMustBeRun }.f();
+    })();
+
+    (function bindTailCall() {
+        "use strict";
+        return callerMustBeRun.bind()();
+    })();
+
+    // Function.prototype tests
+
+    (function applyTailCall() {
+        "use strict";
+        return callerMustBeRun.apply();
+    })();
+
+    (function callTailCall() {
+        "use strict";
+        return callerMustBeRun.call();
+    })();
+
+    // No tail call for constructors
+    (function noTailConstruct() {
+        "use strict";
+        return new callerMustBeStrict();
+    })();
+}
+
+for (var i = 0; i < 10000; ++i)
+    runTests();
diff --git a/Source/JavaScriptCore/tests/stress/tail-call-varargs-no-stack-overflow.js b/Source/JavaScriptCore/tests/stress/tail-call-varargs-no-stack-overflow.js
new file mode 100644 (file)
index 0000000..a29a3e0
--- /dev/null
@@ -0,0 +1,28 @@
+function shouldThrow(func, errorMessage) {
+    var errorThrown = false;
+    var error = null;
+    try {
+        func();
+    } catch (e) {
+        errorThrown = true;
+        error = e;
+    }
+    if (!errorThrown)
+        throw new Error('not thrown');
+    if (String(error) !== errorMessage)
+        throw new Error(`bad error: ${String(error)}`);
+}
+
+function sloppyLoop(n) {
+    if (n > 0)
+        return sloppyLoop(...[n - 1]);
+}
+
+function strictLoop(n) {
+    "use strict";
+    if (n > 0)
+        return strictLoop(...[n - 1]);
+}
+
+shouldThrow(function () { sloppyLoop(100000); }, 'RangeError: Maximum call stack size exceeded.');
+strictLoop(100000);
diff --git a/Source/JavaScriptCore/tests/stress/tail-calls-dont-overwrite-live-stack.js b/Source/JavaScriptCore/tests/stress/tail-calls-dont-overwrite-live-stack.js
new file mode 100644 (file)
index 0000000..d9563d1
--- /dev/null
@@ -0,0 +1,30 @@
+"use strict";
+
+function tail(a, b) { }
+noInline(tail);
+
+var obj = {
+    method: function (x) {
+        return tail(x, x);
+    },
+
+    get fromNative() { return tail(0, 0); }
+};
+noInline(obj.method);
+
+function getThis(x) { return this; }
+noInline(getThis);
+
+for (var i = 0; i < 10000; ++i) {
+    var that = getThis(obj.method(42));
+
+    if (!Object.is(that, undefined))
+        throw new Error("Wrong 'this' value in call, expected undefined but got " + that);
+
+    that = getThis(obj.method(...[42]));
+    if (!Object.is(that, undefined))
+        throw new Error("Wrong 'this' value in varargs call, expected undefined but got " + that);
+
+    if (!Object.is(obj.fromNative, undefined))
+        throw new Error("Wrong 'fromNative' value, expected undefined but got " + obj.fromNative);
+}