2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "ArityCheckFailReturnThunks.h"
32 #include "CCallHelpers.h"
33 #include "CodeBlockWithJITType.h"
34 #include "DFGCommon.h"
35 #include "FTLJITCode.h"
36 #include "JITOperations.h"
39 #include "LinkBuffer.h"
40 #include "JSCInlines.h"
41 #include "ProfilerCompilation.h"
42 #include "VirtualRegister.h"
44 namespace JSC { namespace FTL {
48 void link(State& state)
50 Graph& graph = state.graph;
51 CodeBlock* codeBlock = graph.m_codeBlock;
54 // LLVM will create its own jump tables as needed.
55 codeBlock->clearSwitchJumpTables();
57 // FIXME: Need to know the real frame register count.
58 // https://bugs.webkit.org/show_bug.cgi?id=125727
59 state.jitCode->common.frameRegisterCount = 1000;
61 state.jitCode->common.requiredRegisterCountForExit = graph.requiredRegisterCountForExit();
63 if (!graph.m_inlineCallFrames->isEmpty())
64 state.jitCode->common.inlineCallFrames = std::move(graph.m_inlineCallFrames);
66 // Create the entrypoint. Note that we use this entrypoint totally differently
67 // depending on whether we're doing OSR entry or not.
68 CCallHelpers jit(&vm, codeBlock);
70 OwnPtr<LinkBuffer> linkBuffer;
71 CCallHelpers::Label arityCheck;
73 CCallHelpers::Address frame = CCallHelpers::Address(
74 CCallHelpers::stackPointerRegister, -static_cast<int32_t>(AssemblyHelpers::prologueStackPointerDelta()));
76 if (Profiler::Compilation* compilation = graph.compilation()) {
77 compilation->addDescription(
78 Profiler::OriginStack(),
79 toCString("Generated FTL JIT code for ", CodeBlockWithJITType(codeBlock, JITCode::FTLJIT), ", instruction count = ", graph.m_codeBlock->instructionCount(), ":\n"));
81 graph.m_dominators.computeIfNecessary(graph);
82 graph.m_naturalLoops.computeIfNecessary(graph);
84 const char* prefix = " ";
86 DumpContext dumpContext;
87 StringPrintStream out;
89 for (size_t blockIndex = 0; blockIndex < graph.numBlocks(); ++blockIndex) {
90 BasicBlock* block = graph.block(blockIndex);
94 graph.dumpBlockHeader(out, prefix, block, Graph::DumpLivePhisOnly, &dumpContext);
95 compilation->addDescription(Profiler::OriginStack(), out.toCString());
98 for (size_t nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
99 Node* node = block->at(nodeIndex);
100 if (!node->willHaveCodeGenOrOSR() && !Options::showAllDFGNodes())
103 Profiler::OriginStack stack;
105 if (node->origin.semantic.isSet()) {
106 stack = Profiler::OriginStack(
107 *vm.m_perBytecodeProfiler, codeBlock, node->origin.semantic);
110 if (graph.dumpCodeOrigin(out, prefix, lastNode, node, &dumpContext)) {
111 compilation->addDescription(stack, out.toCString());
115 graph.dump(out, prefix, node, &dumpContext);
116 compilation->addDescription(stack, out.toCString());
119 if (node->origin.semantic.isSet())
124 dumpContext.dump(out, prefix);
125 compilation->addDescription(Profiler::OriginStack(), out.toCString());
128 out.print(" Disassembly:\n");
129 for (unsigned i = 0; i < state.jitCode->handles().size(); ++i) {
130 if (state.codeSectionNames[i] != "__text")
133 ExecutableMemoryHandle* handle = state.jitCode->handles()[i].get();
135 MacroAssemblerCodePtr(handle->start()), handle->sizeInBytes(),
136 " ", out, LLVMSubset);
138 compilation->addDescription(Profiler::OriginStack(), out.toCString());
141 state.jitCode->common.compilation = compilation;
144 switch (graph.m_plan.mode) {
146 CCallHelpers::JumpList mainPathJumps;
149 frame.withOffset(sizeof(Register) * JSStack::ArgumentCount),
151 mainPathJumps.append(jit.branch32(
152 CCallHelpers::AboveOrEqual, GPRInfo::regT1,
153 CCallHelpers::TrustedImm32(codeBlock->numParameters())));
154 jit.emitFunctionPrologue();
155 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
157 CCallHelpers::TrustedImm32(CallFrame::Location::encodeAsBytecodeOffset(0)),
158 CCallHelpers::tagFor(JSStack::ArgumentCount));
159 jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
160 CCallHelpers::Call callArityCheck = jit.call();
162 // FIXME: need to make this call register with exception handling somehow. This is
163 // part of a bigger problem: FTL should be able to handle exceptions.
164 // https://bugs.webkit.org/show_bug.cgi?id=113622
165 // Until then, use a JIT ASSERT.
166 jit.load64(vm.addressOfException(), GPRInfo::regT1);
167 jit.jitAssertIsNull(GPRInfo::regT1);
169 jit.move(GPRInfo::returnValueGPR, GPRInfo::regT0);
170 jit.emitFunctionEpilogue();
171 mainPathJumps.append(jit.branchTest32(CCallHelpers::Zero, GPRInfo::regT0));
172 jit.emitFunctionPrologue();
173 jit.move(CCallHelpers::TrustedImmPtr(vm.arityCheckFailReturnThunks->returnPCsFor(vm, codeBlock->numParameters())), GPRInfo::regT7);
174 jit.loadPtr(CCallHelpers::BaseIndex(GPRInfo::regT7, GPRInfo::regT0, CCallHelpers::timesPtr()), GPRInfo::regT7);
175 CCallHelpers::Call callArityFixup = jit.call();
176 jit.emitFunctionEpilogue();
177 mainPathJumps.append(jit.jump());
179 linkBuffer = adoptPtr(new LinkBuffer(vm, &jit, codeBlock, JITCompilationMustSucceed));
180 linkBuffer->link(callArityCheck, codeBlock->m_isConstructor ? operationConstructArityCheck : operationCallArityCheck);
181 linkBuffer->link(callArityFixup, FunctionPtr((vm.getCTIStub(arityFixup)).code().executableAddress()));
182 linkBuffer->link(mainPathJumps, CodeLocationLabel(bitwise_cast<void*>(state.generatedFunction)));
184 state.jitCode->initializeAddressForCall(MacroAssemblerCodePtr(bitwise_cast<void*>(state.generatedFunction)));
188 case FTLForOSREntryMode: {
189 // We jump to here straight from DFG code, after having boxed up all of the
190 // values into the scratch buffer. Everything should be good to go - at this
191 // point we've even done the stack check. Basically we just have to make the
192 // call to the LLVM-generated code.
193 CCallHelpers::Label start = jit.label();
194 jit.emitFunctionEpilogue();
195 CCallHelpers::Jump mainPathJump = jit.jump();
197 linkBuffer = adoptPtr(new LinkBuffer(vm, &jit, codeBlock, JITCompilationMustSucceed));
198 linkBuffer->link(mainPathJump, CodeLocationLabel(bitwise_cast<void*>(state.generatedFunction)));
200 state.jitCode->initializeAddressForCall(linkBuffer->locationOf(start));
205 RELEASE_ASSERT_NOT_REACHED();
209 state.finalizer->entrypointLinkBuffer = linkBuffer.release();
210 state.finalizer->function = state.generatedFunction;
211 state.finalizer->arityCheck = arityCheck;
212 state.finalizer->jitCode = state.jitCode;
215 } } // namespace JSC::FTL
217 #endif // ENABLE(FTL_JIT)