5ee01293b2377806af80a1954089c91daf28f455
[WebKit-https.git] / Source / JavaScriptCore / ftl / FTLCompile.cpp
1 /*
2  * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "FTLCompile.h"
28
29 #if ENABLE(FTL_JIT)
30
31 #include "CodeBlockWithJITType.h"
32 #include "CCallHelpers.h"
33 #include "DFGCommon.h"
34 #include "DFGGraphSafepoint.h"
35 #include "DataView.h"
36 #include "Disassembler.h"
37 #include "FTLExitThunkGenerator.h"
38 #include "FTLInlineCacheSize.h"
39 #include "FTLJITCode.h"
40 #include "FTLThunks.h"
41 #include "FTLUnwindInfo.h"
42 #include "JITStubs.h"
43 #include "LLVMAPI.h"
44 #include "LinkBuffer.h"
45 #include "RepatchBuffer.h"
46
47 namespace JSC { namespace FTL {
48
49 using namespace DFG;
50
51 static uint8_t* mmAllocateCodeSection(
52     void* opaqueState, uintptr_t size, unsigned alignment, unsigned, const char* sectionName)
53 {
54     State& state = *static_cast<State*>(opaqueState);
55     
56     RELEASE_ASSERT(alignment <= jitAllocationGranule);
57     
58     RefPtr<ExecutableMemoryHandle> result =
59         state.graph.m_vm.executableAllocator.allocate(
60             state.graph.m_vm, size, state.graph.m_codeBlock, JITCompilationMustSucceed);
61     
62     // LLVM used to put __compact_unwind in a code section. We keep this here defensively,
63     // for clients that use older LLVMs.
64     if (!strcmp(sectionName, "__compact_unwind")) {
65         state.compactUnwind = result->start();
66         state.compactUnwindSize = result->sizeInBytes();
67     }
68     
69     state.jitCode->addHandle(result);
70     state.codeSectionNames.append(sectionName);
71     
72     return static_cast<uint8_t*>(result->start());
73 }
74
75 static uint8_t* mmAllocateDataSection(
76     void* opaqueState, uintptr_t size, unsigned alignment, unsigned sectionID,
77     const char* sectionName, LLVMBool isReadOnly)
78 {
79     UNUSED_PARAM(sectionID);
80     UNUSED_PARAM(isReadOnly);
81
82     State& state = *static_cast<State*>(opaqueState);
83     
84     RefPtr<DataSection> section = adoptRef(new DataSection(
85         state.graph.m_vm, state.graph.m_codeBlock, size, alignment));
86     
87     if (!strcmp(sectionName, "__llvm_stackmaps"))
88         state.stackmapsSection = section;
89     else {
90         state.jitCode->addDataSection(section);
91         state.dataSectionNames.append(sectionName);
92         if (!strcmp(sectionName, "__compact_unwind")) {
93             state.compactUnwind = section->base();
94             state.compactUnwindSize = size;
95         }
96     }
97     
98     return bitwise_cast<uint8_t*>(section->base());
99 }
100
101 static LLVMBool mmApplyPermissions(void*, char**)
102 {
103     return false;
104 }
105
106 static void mmDestroy(void*)
107 {
108 }
109
110 static void dumpDataSection(DataSection* section, const char* prefix)
111 {
112     for (unsigned j = 0; j < section->size() / sizeof(int64_t); ++j) {
113         char buf[32];
114         int64_t* wordPointer = static_cast<int64_t*>(section->base()) + j;
115         snprintf(buf, sizeof(buf), "0x%lx", static_cast<unsigned long>(bitwise_cast<uintptr_t>(wordPointer)));
116         dataLogF("%s%16s: 0x%016llx\n", prefix, buf, static_cast<long long>(*wordPointer));
117     }
118 }
119
120 template<typename DescriptorType>
121 void generateICFastPath(
122     State& state, CodeBlock* codeBlock, GeneratedFunction generatedFunction,
123     StackMaps::RecordMap& recordMap, DescriptorType& ic, size_t sizeOfIC)
124 {
125     VM& vm = state.graph.m_vm;
126
127     StackMaps::RecordMap::iterator iter = recordMap.find(ic.stackmapID());
128     if (iter == recordMap.end()) {
129         // It was optimized out.
130         return;
131     }
132     
133     Vector<StackMaps::Record>& records = iter->value;
134     
135     RELEASE_ASSERT(records.size() == ic.m_generators.size());
136     
137     for (unsigned i = records.size(); i--;) {
138         StackMaps::Record& record = records[i];
139         auto generator = ic.m_generators[i];
140
141         CCallHelpers fastPathJIT(&vm, codeBlock);
142         generator.generateFastPath(fastPathJIT);
143         
144         char* startOfIC =
145             bitwise_cast<char*>(generatedFunction) + record.instructionOffset;
146         
147         LinkBuffer linkBuffer(vm, &fastPathJIT, startOfIC, sizeOfIC);
148         // Note: we could handle the !isValid() case. We just don't appear to have a
149         // reason to do so, yet.
150         RELEASE_ASSERT(linkBuffer.isValid());
151         
152         MacroAssembler::AssemblerType_T::fillNops(
153             startOfIC + linkBuffer.size(), sizeOfIC - linkBuffer.size());
154         
155         state.finalizer->sideCodeLinkBuffer->link(
156             ic.m_slowPathDone[i], CodeLocationLabel(startOfIC + sizeOfIC));
157         
158         linkBuffer.link(
159             generator.slowPathJump(),
160             state.finalizer->sideCodeLinkBuffer->locationOf(generator.slowPathBegin()));
161         
162         generator.finalize(linkBuffer, *state.finalizer->sideCodeLinkBuffer);
163     }
164 }
165
166 static void fixFunctionBasedOnStackMaps(
167     State& state, CodeBlock* codeBlock, JITCode* jitCode, GeneratedFunction generatedFunction,
168     StackMaps::RecordMap& recordMap, bool didSeeUnwindInfo)
169 {
170     Graph& graph = state.graph;
171     VM& vm = graph.m_vm;
172     StackMaps stackmaps = jitCode->stackmaps;
173     
174     StackMaps::RecordMap::iterator iter = recordMap.find(state.capturedStackmapID);
175     RELEASE_ASSERT(iter != recordMap.end());
176     RELEASE_ASSERT(iter->value.size() == 1);
177     RELEASE_ASSERT(iter->value[0].locations.size() == 1);
178     Location capturedLocation =
179         Location::forStackmaps(&jitCode->stackmaps, iter->value[0].locations[0]);
180     RELEASE_ASSERT(capturedLocation.kind() == Location::Register);
181     RELEASE_ASSERT(capturedLocation.gpr() == GPRInfo::callFrameRegister);
182     RELEASE_ASSERT(!(capturedLocation.addend() % sizeof(Register)));
183     int32_t localsOffset = capturedLocation.addend() / sizeof(Register) + graph.m_nextMachineLocal;
184     
185     for (unsigned i = graph.m_inlineVariableData.size(); i--;) {
186         InlineCallFrame* inlineCallFrame = graph.m_inlineVariableData[i].inlineCallFrame;
187         
188         if (inlineCallFrame->argumentsRegister.isValid()) {
189             inlineCallFrame->argumentsRegister = VirtualRegister(
190                 inlineCallFrame->argumentsRegister.offset() + localsOffset);
191         }
192         
193         for (unsigned argument = inlineCallFrame->arguments.size(); argument-- > 1;) {
194             inlineCallFrame->arguments[argument] =
195                 inlineCallFrame->arguments[argument].withLocalsOffset(localsOffset);
196         }
197         
198         if (inlineCallFrame->isClosureCall) {
199             inlineCallFrame->calleeRecovery =
200                 inlineCallFrame->calleeRecovery.withLocalsOffset(localsOffset);
201         }
202     }
203     
204     if (codeBlock->usesArguments()) {
205         codeBlock->setArgumentsRegister(
206             VirtualRegister(codeBlock->argumentsRegister().offset() + localsOffset));
207     }
208
209     MacroAssembler::Label stackOverflowException;
210
211     {
212         CCallHelpers checkJIT(&vm, codeBlock);
213         
214         // At this point it's perfectly fair to just blow away all state and restore the
215         // JS JIT view of the universe.
216         checkJIT.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
217
218         MacroAssembler::Label exceptionContinueArg1Set = checkJIT.label();
219         checkJIT.move(MacroAssembler::TrustedImm64(TagTypeNumber), GPRInfo::tagTypeNumberRegister);
220         checkJIT.move(MacroAssembler::TrustedImm64(TagMask), GPRInfo::tagMaskRegister);
221
222         checkJIT.move(MacroAssembler::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
223         MacroAssembler::Call call = checkJIT.call();
224         checkJIT.jumpToExceptionHandler();
225
226         stackOverflowException = checkJIT.label();
227         checkJIT.emitGetCallerFrameFromCallFrameHeaderPtr(GPRInfo::argumentGPR1);
228         checkJIT.jump(exceptionContinueArg1Set);
229
230         OwnPtr<LinkBuffer> linkBuffer = adoptPtr(new LinkBuffer(
231             vm, &checkJIT, codeBlock, JITCompilationMustSucceed));
232         linkBuffer->link(call, FunctionPtr(lookupExceptionHandler));
233         
234         state.finalizer->handleExceptionsLinkBuffer = linkBuffer.release();
235     }
236
237     ExitThunkGenerator exitThunkGenerator(state);
238     exitThunkGenerator.emitThunks();
239     if (exitThunkGenerator.didThings()) {
240         RELEASE_ASSERT(state.finalizer->osrExit.size());
241         RELEASE_ASSERT(didSeeUnwindInfo);
242         
243         OwnPtr<LinkBuffer> linkBuffer = adoptPtr(new LinkBuffer(
244             vm, &exitThunkGenerator, codeBlock, JITCompilationMustSucceed));
245         
246         RELEASE_ASSERT(state.finalizer->osrExit.size() == state.jitCode->osrExit.size());
247         
248         for (unsigned i = 0; i < state.jitCode->osrExit.size(); ++i) {
249             OSRExitCompilationInfo& info = state.finalizer->osrExit[i];
250             OSRExit& exit = jitCode->osrExit[i];
251             
252             if (verboseCompilationEnabled())
253                 dataLog("Handling OSR stackmap #", exit.m_stackmapID, " for ", exit.m_codeOrigin, "\n");
254
255             iter = recordMap.find(exit.m_stackmapID);
256             if (iter == recordMap.end()) {
257                 // It was optimized out.
258                 continue;
259             }
260             
261             info.m_thunkAddress = linkBuffer->locationOf(info.m_thunkLabel);
262             exit.m_patchableCodeOffset = linkBuffer->offsetOf(info.m_thunkJump);
263             
264             for (unsigned j = exit.m_values.size(); j--;) {
265                 ExitValue value = exit.m_values[j];
266                 if (!value.isInJSStackSomehow())
267                     continue;
268                 if (!value.virtualRegister().isLocal())
269                     continue;
270                 exit.m_values[j] = value.withVirtualRegister(
271                     VirtualRegister(value.virtualRegister().offset() + localsOffset));
272             }
273             
274             if (verboseCompilationEnabled()) {
275                 DumpContext context;
276                 dataLog("    Exit values: ", inContext(exit.m_values, &context), "\n");
277             }
278         }
279         
280         state.finalizer->exitThunksLinkBuffer = linkBuffer.release();
281     }
282
283     if (!state.getByIds.isEmpty() || !state.putByIds.isEmpty()) {
284         CCallHelpers slowPathJIT(&vm, codeBlock);
285         
286         CCallHelpers::JumpList exceptionTarget;
287         
288         for (unsigned i = state.getByIds.size(); i--;) {
289             GetByIdDescriptor& getById = state.getByIds[i];
290             
291             if (verboseCompilationEnabled())
292                 dataLog("Handling GetById stackmap #", getById.stackmapID(), "\n");
293             
294             iter = recordMap.find(getById.stackmapID());
295             if (iter == recordMap.end()) {
296                 // It was optimized out.
297                 continue;
298             }
299             
300             for (unsigned i = 0; i < iter->value.size(); ++i) {
301                 StackMaps::Record& record = iter->value[i];
302             
303                 // FIXME: LLVM should tell us which registers are live.
304                 RegisterSet usedRegisters = RegisterSet::allRegisters();
305                 
306                 GPRReg result = record.locations[0].directGPR();
307                 GPRReg base = record.locations[1].directGPR();
308                 
309                 JITGetByIdGenerator gen(
310                     codeBlock, getById.codeOrigin(), usedRegisters, JSValueRegs(base),
311                     JSValueRegs(result), NeedToSpill);
312                 
313                 MacroAssembler::Label begin = slowPathJIT.label();
314                 
315                 MacroAssembler::Call call = callOperation(
316                     state, usedRegisters, slowPathJIT, getById.codeOrigin(), &exceptionTarget,
317                     operationGetByIdOptimize, result, gen.stubInfo(), base, getById.uid());
318                 
319                 gen.reportSlowPathCall(begin, call);
320                 
321                 getById.m_slowPathDone.append(slowPathJIT.jump());
322                 getById.m_generators.append(gen);
323             }
324         }
325         
326         for (unsigned i = state.putByIds.size(); i--;) {
327             PutByIdDescriptor& putById = state.putByIds[i];
328             
329             if (verboseCompilationEnabled())
330                 dataLog("Handling PutById stackmap #", putById.stackmapID(), "\n");
331             
332             iter = recordMap.find(putById.stackmapID());
333             if (iter == recordMap.end()) {
334                 // It was optimized out.
335                 continue;
336             }
337             
338             for (unsigned i = 0; i < iter->value.size(); ++i) {
339                 StackMaps::Record& record = iter->value[i];
340                 
341                 // FIXME: LLVM should tell us which registers are live.
342                 RegisterSet usedRegisters = RegisterSet::allRegisters();
343                 
344                 GPRReg base = record.locations[0].directGPR();
345                 GPRReg value = record.locations[1].directGPR();
346                 
347                 JITPutByIdGenerator gen(
348                     codeBlock, putById.codeOrigin(), usedRegisters, JSValueRegs(base),
349                     JSValueRegs(value), GPRInfo::patchpointScratchRegister, false,
350                     putById.ecmaMode(), putById.putKind());
351                 
352                 MacroAssembler::Label begin = slowPathJIT.label();
353                 
354                 MacroAssembler::Call call = callOperation(
355                     state, usedRegisters, slowPathJIT, putById.codeOrigin(), &exceptionTarget,
356                     gen.slowPathFunction(), gen.stubInfo(), value, base, putById.uid());
357                 
358                 gen.reportSlowPathCall(begin, call);
359                 
360                 putById.m_slowPathDone.append(slowPathJIT.jump());
361                 putById.m_generators.append(gen);
362             }
363         }
364         
365         exceptionTarget.link(&slowPathJIT);
366         MacroAssembler::Jump exceptionJump = slowPathJIT.jump();
367         
368         state.finalizer->sideCodeLinkBuffer = adoptPtr(
369             new LinkBuffer(vm, &slowPathJIT, codeBlock, JITCompilationMustSucceed));
370         state.finalizer->sideCodeLinkBuffer->link(
371             exceptionJump, state.finalizer->handleExceptionsLinkBuffer->entrypoint());
372         
373         for (unsigned i = state.getByIds.size(); i--;) {
374             generateICFastPath(
375                 state, codeBlock, generatedFunction, recordMap, state.getByIds[i],
376                 sizeOfGetById());
377         }
378         for (unsigned i = state.putByIds.size(); i--;) {
379             generateICFastPath(
380                 state, codeBlock, generatedFunction, recordMap, state.putByIds[i],
381                 sizeOfPutById());
382         }
383     }
384     
385     // Handling JS calls is weird: we need to ensure that we sort them by the PC in LLVM
386     // generated code. That implies first pruning the ones that LLVM didn't generate.
387     Vector<JSCall> oldCalls = state.jsCalls;
388     state.jsCalls.resize(0);
389     for (unsigned i = 0; i < oldCalls.size(); ++i) {
390         JSCall& call = oldCalls[i];
391         
392         StackMaps::RecordMap::iterator iter = recordMap.find(call.stackmapID());
393         if (iter == recordMap.end())
394             continue;
395
396         for (unsigned j = 0; j < iter->value.size(); ++j) {
397             JSCall copy = call;
398             copy.m_instructionOffset = iter->value[j].instructionOffset;
399             state.jsCalls.append(copy);
400         }
401     }
402     
403     std::sort(state.jsCalls.begin(), state.jsCalls.end());
404     
405     codeBlock->setNumberOfCallLinkInfos(state.jsCalls.size());
406     for (unsigned i = state.jsCalls.size(); i--;) {
407         JSCall& call = state.jsCalls[i];
408
409         CCallHelpers fastPathJIT(&vm, codeBlock);
410         call.emit(fastPathJIT);
411         
412         char* startOfIC = bitwise_cast<char*>(generatedFunction) + call.m_instructionOffset;
413         
414         LinkBuffer linkBuffer(vm, &fastPathJIT, startOfIC, sizeOfCall());
415         RELEASE_ASSERT(linkBuffer.isValid());
416         
417         MacroAssembler::AssemblerType_T::fillNops(
418             startOfIC + linkBuffer.size(), sizeOfCall() - linkBuffer.size());
419         
420         CallLinkInfo& info = codeBlock->callLinkInfo(i);
421         call.link(vm, linkBuffer, info);
422     }
423     
424     RepatchBuffer repatchBuffer(codeBlock);
425
426     iter = recordMap.find(state.handleStackOverflowExceptionStackmapID);
427     // It's sort of remotely possible that we won't have an in-band exception handling
428     // path, for some kinds of functions.
429     if (iter != recordMap.end()) {
430         for (unsigned i = iter->value.size(); i--;) {
431             StackMaps::Record& record = iter->value[i];
432             
433             CodeLocationLabel source = CodeLocationLabel(
434                 bitwise_cast<char*>(generatedFunction) + record.instructionOffset);
435
436             RELEASE_ASSERT(stackOverflowException.isSet());
437
438             repatchBuffer.replaceWithJump(source, state.finalizer->handleExceptionsLinkBuffer->locationOf(stackOverflowException));
439         }
440     }
441     
442     iter = recordMap.find(state.handleExceptionStackmapID);
443     // It's sort of remotely possible that we won't have an in-band exception handling
444     // path, for some kinds of functions.
445     if (iter != recordMap.end()) {
446         for (unsigned i = iter->value.size(); i--;) {
447             StackMaps::Record& record = iter->value[i];
448             
449             CodeLocationLabel source = CodeLocationLabel(
450                 bitwise_cast<char*>(generatedFunction) + record.instructionOffset);
451             
452             repatchBuffer.replaceWithJump(source, state.finalizer->handleExceptionsLinkBuffer->entrypoint());
453         }
454     }
455     
456     for (unsigned exitIndex = 0; exitIndex < jitCode->osrExit.size(); ++exitIndex) {
457         OSRExitCompilationInfo& info = state.finalizer->osrExit[exitIndex];
458         OSRExit& exit = jitCode->osrExit[exitIndex];
459         iter = recordMap.find(exit.m_stackmapID);
460         
461         Vector<const void*> codeAddresses;
462         
463         if (iter != recordMap.end()) {
464             for (unsigned i = iter->value.size(); i--;) {
465                 StackMaps::Record& record = iter->value[i];
466                 
467                 CodeLocationLabel source = CodeLocationLabel(
468                     bitwise_cast<char*>(generatedFunction) + record.instructionOffset);
469                 
470                 codeAddresses.append(bitwise_cast<char*>(generatedFunction) + record.instructionOffset + MacroAssembler::maxJumpReplacementSize());
471                 
472                 if (info.m_isInvalidationPoint)
473                     jitCode->common.jumpReplacements.append(JumpReplacement(source, info.m_thunkAddress));
474                 else
475                     repatchBuffer.replaceWithJump(source, info.m_thunkAddress);
476             }
477         }
478         
479         if (graph.compilation())
480             graph.compilation()->addOSRExitSite(codeAddresses);
481     }
482 }
483
484 void compile(State& state)
485 {
486     char* error = 0;
487     
488     {
489         GraphSafepoint safepoint(state.graph);
490         
491         LLVMMCJITCompilerOptions options;
492         llvm->InitializeMCJITCompilerOptions(&options, sizeof(options));
493         options.OptLevel = Options::llvmBackendOptimizationLevel();
494         options.NoFramePointerElim = true;
495         if (Options::useLLVMSmallCodeModel())
496             options.CodeModel = LLVMCodeModelSmall;
497         options.EnableFastISel = Options::enableLLVMFastISel();
498         options.MCJMM = llvm->CreateSimpleMCJITMemoryManager(
499             &state, mmAllocateCodeSection, mmAllocateDataSection, mmApplyPermissions, mmDestroy);
500     
501         LLVMExecutionEngineRef engine;
502     
503         if (llvm->CreateMCJITCompilerForModule(&engine, state.module, &options, sizeof(options), &error)) {
504             dataLog("FATAL: Could not create LLVM execution engine: ", error, "\n");
505             CRASH();
506         }
507
508         LLVMPassManagerRef functionPasses = 0;
509         LLVMPassManagerRef modulePasses;
510     
511         if (Options::llvmSimpleOpt()) {
512             modulePasses = llvm->CreatePassManager();
513             llvm->AddTargetData(llvm->GetExecutionEngineTargetData(engine), modulePasses);
514             llvm->AddPromoteMemoryToRegisterPass(modulePasses);
515             llvm->AddConstantPropagationPass(modulePasses);
516             llvm->AddInstructionCombiningPass(modulePasses);
517             llvm->AddBasicAliasAnalysisPass(modulePasses);
518             llvm->AddTypeBasedAliasAnalysisPass(modulePasses);
519             llvm->AddGVNPass(modulePasses);
520             llvm->AddCFGSimplificationPass(modulePasses);
521             llvm->AddDeadStoreEliminationPass(modulePasses);
522             llvm->RunPassManager(modulePasses, state.module);
523         } else {
524             LLVMPassManagerBuilderRef passBuilder = llvm->PassManagerBuilderCreate();
525             llvm->PassManagerBuilderSetOptLevel(passBuilder, Options::llvmOptimizationLevel());
526             llvm->PassManagerBuilderSetSizeLevel(passBuilder, Options::llvmSizeLevel());
527         
528             functionPasses = llvm->CreateFunctionPassManagerForModule(state.module);
529             modulePasses = llvm->CreatePassManager();
530         
531             llvm->AddTargetData(llvm->GetExecutionEngineTargetData(engine), modulePasses);
532         
533             llvm->PassManagerBuilderPopulateFunctionPassManager(passBuilder, functionPasses);
534             llvm->PassManagerBuilderPopulateModulePassManager(passBuilder, modulePasses);
535         
536             llvm->PassManagerBuilderDispose(passBuilder);
537         
538             llvm->InitializeFunctionPassManager(functionPasses);
539             for (LValue function = llvm->GetFirstFunction(state.module); function; function = llvm->GetNextFunction(function))
540                 llvm->RunFunctionPassManager(functionPasses, function);
541             llvm->FinalizeFunctionPassManager(functionPasses);
542         
543             llvm->RunPassManager(modulePasses, state.module);
544         }
545
546         if (shouldShowDisassembly() || verboseCompilationEnabled())
547             state.dumpState("after optimization");
548     
549         // FIXME: Need to add support for the case where JIT memory allocation failed.
550         // https://bugs.webkit.org/show_bug.cgi?id=113620
551         state.generatedFunction = reinterpret_cast<GeneratedFunction>(llvm->GetPointerToGlobal(engine, state.function));
552         if (functionPasses)
553             llvm->DisposePassManager(functionPasses);
554         llvm->DisposePassManager(modulePasses);
555         llvm->DisposeExecutionEngine(engine);
556     }
557
558     if (shouldShowDisassembly()) {
559         for (unsigned i = 0; i < state.jitCode->handles().size(); ++i) {
560             ExecutableMemoryHandle* handle = state.jitCode->handles()[i].get();
561             dataLog(
562                 "Generated LLVM code for ",
563                 CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT),
564                 " #", i, ", ", state.codeSectionNames[i], ":\n");
565             disassemble(
566                 MacroAssemblerCodePtr(handle->start()), handle->sizeInBytes(),
567                 "    ", WTF::dataFile(), LLVMSubset);
568         }
569         
570         for (unsigned i = 0; i < state.jitCode->dataSections().size(); ++i) {
571             DataSection* section = state.jitCode->dataSections()[i].get();
572             dataLog(
573                 "Generated LLVM data section for ",
574                 CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT),
575                 " #", i, ", ", state.dataSectionNames[i], ":\n");
576             dumpDataSection(section, "    ");
577         }
578     }
579     
580     bool didSeeUnwindInfo = state.jitCode->unwindInfo.parse(
581         state.compactUnwind, state.compactUnwindSize, state.generatedFunction);
582     if (shouldShowDisassembly()) {
583         dataLog("Unwind info for ", CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT), ":\n");
584         if (didSeeUnwindInfo)
585             dataLog("    ", state.jitCode->unwindInfo, "\n");
586         else
587             dataLog("    <no unwind info>\n");
588     }
589     
590     if (state.stackmapsSection && state.stackmapsSection->size()) {
591         if (shouldShowDisassembly()) {
592             dataLog(
593                 "Generated LLVM stackmaps section for ",
594                 CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT), ":\n");
595             dataLog("    Raw data:\n");
596             dumpDataSection(state.stackmapsSection.get(), "    ");
597         }
598         
599         RefPtr<DataView> stackmapsData = DataView::create(
600             ArrayBuffer::create(state.stackmapsSection->base(), state.stackmapsSection->size()));
601         state.jitCode->stackmaps.parse(stackmapsData.get());
602     
603         if (shouldShowDisassembly()) {
604             dataLog("    Structured data:\n");
605             state.jitCode->stackmaps.dumpMultiline(WTF::dataFile(), "        ");
606         }
607         
608         StackMaps::RecordMap recordMap = state.jitCode->stackmaps.computeRecordMap();
609         fixFunctionBasedOnStackMaps(
610             state, state.graph.m_codeBlock, state.jitCode.get(), state.generatedFunction,
611             recordMap, didSeeUnwindInfo);
612         
613         if (shouldShowDisassembly()) {
614             for (unsigned i = 0; i < state.jitCode->handles().size(); ++i) {
615                 if (state.codeSectionNames[i] != "__text")
616                     continue;
617                 
618                 ExecutableMemoryHandle* handle = state.jitCode->handles()[i].get();
619                 dataLog(
620                     "Generated LLVM code after stackmap-based fix-up for ",
621                     CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT),
622                     " in ", state.graph.m_plan.mode, " #", i, ", ",
623                     state.codeSectionNames[i], ":\n");
624                 disassemble(
625                     MacroAssemblerCodePtr(handle->start()), handle->sizeInBytes(),
626                     "    ", WTF::dataFile(), LLVMSubset);
627             }
628         }
629     }
630     
631     state.module = 0; // We no longer own the module.
632 }
633
634 } } // namespace JSC::FTL
635
636 #endif // ENABLE(FTL_JIT)
637