FTL should allow LLVM to allocate data sections with alignment > 8
[WebKit.git] / Source / JavaScriptCore / ftl / FTLCompile.cpp
1 /*
2  * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "FTLCompile.h"
28
29 #if ENABLE(FTL_JIT)
30
31 #include "CodeBlockWithJITType.h"
32 #include "CCallHelpers.h"
33 #include "DFGCommon.h"
34 #include "DFGGraphSafepoint.h"
35 #include "DataView.h"
36 #include "Disassembler.h"
37 #include "FTLExitThunkGenerator.h"
38 #include "FTLInlineCacheSize.h"
39 #include "FTLJITCode.h"
40 #include "FTLThunks.h"
41 #include "FTLUnwindInfo.h"
42 #include "JITStubs.h"
43 #include "LLVMAPI.h"
44 #include "LinkBuffer.h"
45 #include "RepatchBuffer.h"
46
47 namespace JSC { namespace FTL {
48
49 using namespace DFG;
50
51 static uint8_t* mmAllocateCodeSection(
52     void* opaqueState, uintptr_t size, unsigned alignment, unsigned, const char* sectionName)
53 {
54     State& state = *static_cast<State*>(opaqueState);
55     
56     RELEASE_ASSERT(alignment <= jitAllocationGranule);
57     
58     RefPtr<ExecutableMemoryHandle> result =
59         state.graph.m_vm.executableAllocator.allocate(
60             state.graph.m_vm, size, state.graph.m_codeBlock, JITCompilationMustSucceed);
61     
62     // LLVM used to put __compact_unwind in a code section. We keep this here defensively,
63     // for clients that use older LLVMs.
64     if (!strcmp(sectionName, "__compact_unwind")) {
65         state.compactUnwind = result->start();
66         state.compactUnwindSize = result->sizeInBytes();
67     }
68     
69     state.jitCode->addHandle(result);
70     state.codeSectionNames.append(sectionName);
71     
72     return static_cast<uint8_t*>(result->start());
73 }
74
75 static uint8_t* mmAllocateDataSection(
76     void* opaqueState, uintptr_t size, unsigned alignment, unsigned sectionID,
77     const char* sectionName, LLVMBool isReadOnly)
78 {
79     UNUSED_PARAM(sectionID);
80     UNUSED_PARAM(isReadOnly);
81
82     State& state = *static_cast<State*>(opaqueState);
83     
84     RefPtr<DataSection> section = adoptRef(new DataSection(size, alignment));
85     
86     if (!strcmp(sectionName, "__llvm_stackmaps"))
87         state.stackmapsSection = section;
88     else {
89         state.jitCode->addDataSection(section);
90         state.dataSectionNames.append(sectionName);
91         if (!strcmp(sectionName, "__compact_unwind")) {
92             state.compactUnwind = section->base();
93             state.compactUnwindSize = size;
94         }
95     }
96     
97     return bitwise_cast<uint8_t*>(section->base());
98 }
99
100 static LLVMBool mmApplyPermissions(void*, char**)
101 {
102     return false;
103 }
104
105 static void mmDestroy(void*)
106 {
107 }
108
109 static void dumpDataSection(DataSection* section, const char* prefix)
110 {
111     for (unsigned j = 0; j < section->size() / sizeof(int64_t); ++j) {
112         char buf[32];
113         int64_t* wordPointer = static_cast<int64_t*>(section->base()) + j;
114         snprintf(buf, sizeof(buf), "0x%lx", static_cast<unsigned long>(bitwise_cast<uintptr_t>(wordPointer)));
115         dataLogF("%s%16s: 0x%016llx\n", prefix, buf, static_cast<long long>(*wordPointer));
116     }
117 }
118
119 template<typename DescriptorType>
120 void generateICFastPath(
121     State& state, CodeBlock* codeBlock, GeneratedFunction generatedFunction,
122     StackMaps::RecordMap& recordMap, DescriptorType& ic, size_t sizeOfIC)
123 {
124     VM& vm = state.graph.m_vm;
125
126     StackMaps::RecordMap::iterator iter = recordMap.find(ic.stackmapID());
127     if (iter == recordMap.end()) {
128         // It was optimized out.
129         return;
130     }
131     
132     Vector<StackMaps::Record>& records = iter->value;
133     
134     RELEASE_ASSERT(records.size() == ic.m_generators.size());
135     
136     for (unsigned i = records.size(); i--;) {
137         StackMaps::Record& record = records[i];
138         auto generator = ic.m_generators[i];
139
140         CCallHelpers fastPathJIT(&vm, codeBlock);
141         generator.generateFastPath(fastPathJIT);
142         
143         char* startOfIC =
144             bitwise_cast<char*>(generatedFunction) + record.instructionOffset;
145         
146         LinkBuffer linkBuffer(vm, &fastPathJIT, startOfIC, sizeOfIC);
147         // Note: we could handle the !isValid() case. We just don't appear to have a
148         // reason to do so, yet.
149         RELEASE_ASSERT(linkBuffer.isValid());
150         
151         MacroAssembler::AssemblerType_T::fillNops(
152             startOfIC + linkBuffer.size(), sizeOfIC - linkBuffer.size());
153         
154         state.finalizer->sideCodeLinkBuffer->link(
155             ic.m_slowPathDone[i], CodeLocationLabel(startOfIC + sizeOfIC));
156         
157         linkBuffer.link(
158             generator.slowPathJump(),
159             state.finalizer->sideCodeLinkBuffer->locationOf(generator.slowPathBegin()));
160         
161         generator.finalize(linkBuffer, *state.finalizer->sideCodeLinkBuffer);
162     }
163 }
164
165 static void fixFunctionBasedOnStackMaps(
166     State& state, CodeBlock* codeBlock, JITCode* jitCode, GeneratedFunction generatedFunction,
167     StackMaps::RecordMap& recordMap)
168 {
169     Graph& graph = state.graph;
170     VM& vm = graph.m_vm;
171     StackMaps stackmaps = jitCode->stackmaps;
172     
173     StackMaps::RecordMap::iterator iter = recordMap.find(state.capturedStackmapID);
174     RELEASE_ASSERT(iter != recordMap.end());
175     RELEASE_ASSERT(iter->value.size() == 1);
176     RELEASE_ASSERT(iter->value[0].locations.size() == 1);
177     Location capturedLocation =
178         Location::forStackmaps(&jitCode->stackmaps, iter->value[0].locations[0]);
179     RELEASE_ASSERT(capturedLocation.kind() == Location::Register);
180     RELEASE_ASSERT(capturedLocation.gpr() == GPRInfo::callFrameRegister);
181     RELEASE_ASSERT(!(capturedLocation.addend() % sizeof(Register)));
182     int32_t localsOffset = capturedLocation.addend() / sizeof(Register) + graph.m_nextMachineLocal;
183     
184     for (unsigned i = graph.m_inlineVariableData.size(); i--;) {
185         InlineCallFrame* inlineCallFrame = graph.m_inlineVariableData[i].inlineCallFrame;
186         
187         if (inlineCallFrame->argumentsRegister.isValid()) {
188             inlineCallFrame->argumentsRegister = VirtualRegister(
189                 inlineCallFrame->argumentsRegister.offset() + localsOffset);
190         }
191         
192         for (unsigned argument = inlineCallFrame->arguments.size(); argument-- > 1;) {
193             inlineCallFrame->arguments[argument] =
194                 inlineCallFrame->arguments[argument].withLocalsOffset(localsOffset);
195         }
196         
197         if (inlineCallFrame->isClosureCall) {
198             inlineCallFrame->calleeRecovery =
199                 inlineCallFrame->calleeRecovery.withLocalsOffset(localsOffset);
200         }
201     }
202     
203     if (codeBlock->usesArguments()) {
204         codeBlock->setArgumentsRegister(
205             VirtualRegister(codeBlock->argumentsRegister().offset() + localsOffset));
206     }
207
208     MacroAssembler::Label stackOverflowException;
209
210     {
211         CCallHelpers checkJIT(&vm, codeBlock);
212         
213         // At this point it's perfectly fair to just blow away all state and restore the
214         // JS JIT view of the universe.
215         checkJIT.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
216
217         MacroAssembler::Label exceptionContinueArg1Set = checkJIT.label();
218         checkJIT.move(MacroAssembler::TrustedImm64(TagTypeNumber), GPRInfo::tagTypeNumberRegister);
219         checkJIT.move(MacroAssembler::TrustedImm64(TagMask), GPRInfo::tagMaskRegister);
220
221         checkJIT.move(MacroAssembler::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
222         MacroAssembler::Call call = checkJIT.call();
223         checkJIT.jumpToExceptionHandler();
224
225         stackOverflowException = checkJIT.label();
226         checkJIT.emitGetCallerFrameFromCallFrameHeaderPtr(GPRInfo::argumentGPR1);
227         checkJIT.jump(exceptionContinueArg1Set);
228
229         OwnPtr<LinkBuffer> linkBuffer = adoptPtr(new LinkBuffer(
230             vm, &checkJIT, codeBlock, JITCompilationMustSucceed));
231         linkBuffer->link(call, FunctionPtr(lookupExceptionHandler));
232         
233         state.finalizer->handleExceptionsLinkBuffer = linkBuffer.release();
234     }
235
236     ExitThunkGenerator exitThunkGenerator(state);
237     exitThunkGenerator.emitThunks();
238     if (exitThunkGenerator.didThings()) {
239         OwnPtr<LinkBuffer> linkBuffer = adoptPtr(new LinkBuffer(
240             vm, &exitThunkGenerator, codeBlock, JITCompilationMustSucceed));
241         
242         ASSERT(state.finalizer->osrExit.size() == state.jitCode->osrExit.size());
243         
244         for (unsigned i = 0; i < state.jitCode->osrExit.size(); ++i) {
245             OSRExitCompilationInfo& info = state.finalizer->osrExit[i];
246             OSRExit& exit = jitCode->osrExit[i];
247             
248             if (verboseCompilationEnabled())
249                 dataLog("Handling OSR stackmap #", exit.m_stackmapID, " for ", exit.m_codeOrigin, "\n");
250
251             iter = recordMap.find(exit.m_stackmapID);
252             if (iter == recordMap.end()) {
253                 // It was optimized out.
254                 continue;
255             }
256             
257             info.m_thunkAddress = linkBuffer->locationOf(info.m_thunkLabel);
258             exit.m_patchableCodeOffset = linkBuffer->offsetOf(info.m_thunkJump);
259             
260             for (unsigned j = exit.m_values.size(); j--;) {
261                 ExitValue value = exit.m_values[j];
262                 if (!value.isInJSStackSomehow())
263                     continue;
264                 if (!value.virtualRegister().isLocal())
265                     continue;
266                 exit.m_values[j] = value.withVirtualRegister(
267                     VirtualRegister(value.virtualRegister().offset() + localsOffset));
268             }
269             
270             if (verboseCompilationEnabled()) {
271                 DumpContext context;
272                 dataLog("    Exit values: ", inContext(exit.m_values, &context), "\n");
273             }
274         }
275         
276         state.finalizer->exitThunksLinkBuffer = linkBuffer.release();
277     }
278
279     if (!state.getByIds.isEmpty() || !state.putByIds.isEmpty()) {
280         CCallHelpers slowPathJIT(&vm, codeBlock);
281         
282         CCallHelpers::JumpList exceptionTarget;
283         
284         for (unsigned i = state.getByIds.size(); i--;) {
285             GetByIdDescriptor& getById = state.getByIds[i];
286             
287             if (verboseCompilationEnabled())
288                 dataLog("Handling GetById stackmap #", getById.stackmapID(), "\n");
289             
290             iter = recordMap.find(getById.stackmapID());
291             if (iter == recordMap.end()) {
292                 // It was optimized out.
293                 continue;
294             }
295             
296             for (unsigned i = 0; i < iter->value.size(); ++i) {
297                 StackMaps::Record& record = iter->value[i];
298             
299                 // FIXME: LLVM should tell us which registers are live.
300                 RegisterSet usedRegisters = RegisterSet::allRegisters();
301                 
302                 GPRReg result = record.locations[0].directGPR();
303                 GPRReg base = record.locations[1].directGPR();
304                 
305                 JITGetByIdGenerator gen(
306                     codeBlock, getById.codeOrigin(), usedRegisters, JSValueRegs(base),
307                     JSValueRegs(result), false);
308                 
309                 MacroAssembler::Label begin = slowPathJIT.label();
310                 
311                 MacroAssembler::Call call = callOperation(
312                     state, usedRegisters, slowPathJIT, getById.codeOrigin(), &exceptionTarget,
313                     operationGetByIdOptimize, result, gen.stubInfo(), base, getById.uid());
314                 
315                 gen.reportSlowPathCall(begin, call);
316                 
317                 getById.m_slowPathDone.append(slowPathJIT.jump());
318                 getById.m_generators.append(gen);
319             }
320         }
321         
322         for (unsigned i = state.putByIds.size(); i--;) {
323             PutByIdDescriptor& putById = state.putByIds[i];
324             
325             if (verboseCompilationEnabled())
326                 dataLog("Handling PutById stackmap #", putById.stackmapID(), "\n");
327             
328             iter = recordMap.find(putById.stackmapID());
329             if (iter == recordMap.end()) {
330                 // It was optimized out.
331                 continue;
332             }
333             
334             for (unsigned i = 0; i < iter->value.size(); ++i) {
335                 StackMaps::Record& record = iter->value[i];
336                 
337                 // FIXME: LLVM should tell us which registers are live.
338                 RegisterSet usedRegisters = RegisterSet::allRegisters();
339                 
340                 GPRReg base = record.locations[0].directGPR();
341                 GPRReg value = record.locations[1].directGPR();
342                 
343                 JITPutByIdGenerator gen(
344                     codeBlock, putById.codeOrigin(), usedRegisters, JSValueRegs(base),
345                     JSValueRegs(value), GPRInfo::patchpointScratchRegister, false,
346                     putById.ecmaMode(), putById.putKind());
347                 
348                 MacroAssembler::Label begin = slowPathJIT.label();
349                 
350                 MacroAssembler::Call call = callOperation(
351                     state, usedRegisters, slowPathJIT, putById.codeOrigin(), &exceptionTarget,
352                     gen.slowPathFunction(), gen.stubInfo(), value, base, putById.uid());
353                 
354                 gen.reportSlowPathCall(begin, call);
355                 
356                 putById.m_slowPathDone.append(slowPathJIT.jump());
357                 putById.m_generators.append(gen);
358             }
359         }
360         
361         exceptionTarget.link(&slowPathJIT);
362         MacroAssembler::Jump exceptionJump = slowPathJIT.jump();
363         
364         state.finalizer->sideCodeLinkBuffer = adoptPtr(
365             new LinkBuffer(vm, &slowPathJIT, codeBlock, JITCompilationMustSucceed));
366         state.finalizer->sideCodeLinkBuffer->link(
367             exceptionJump, state.finalizer->handleExceptionsLinkBuffer->entrypoint());
368         
369         for (unsigned i = state.getByIds.size(); i--;) {
370             generateICFastPath(
371                 state, codeBlock, generatedFunction, recordMap, state.getByIds[i],
372                 sizeOfGetById());
373         }
374         for (unsigned i = state.putByIds.size(); i--;) {
375             generateICFastPath(
376                 state, codeBlock, generatedFunction, recordMap, state.putByIds[i],
377                 sizeOfPutById());
378         }
379     }
380     
381     // Handling JS calls is weird: we need to ensure that we sort them by the PC in LLVM
382     // generated code. That implies first pruning the ones that LLVM didn't generate.
383     Vector<JSCall> oldCalls = state.jsCalls;
384     state.jsCalls.resize(0);
385     for (unsigned i = 0; i < oldCalls.size(); ++i) {
386         JSCall& call = oldCalls[i];
387         
388         StackMaps::RecordMap::iterator iter = recordMap.find(call.stackmapID());
389         if (iter == recordMap.end())
390             continue;
391
392         for (unsigned j = 0; j < iter->value.size(); ++j) {
393             JSCall copy = call;
394             copy.m_instructionOffset = iter->value[j].instructionOffset;
395             state.jsCalls.append(copy);
396         }
397     }
398     
399     std::sort(state.jsCalls.begin(), state.jsCalls.end());
400     
401     codeBlock->setNumberOfCallLinkInfos(state.jsCalls.size());
402     for (unsigned i = state.jsCalls.size(); i--;) {
403         JSCall& call = state.jsCalls[i];
404
405         CCallHelpers fastPathJIT(&vm, codeBlock);
406         call.emit(fastPathJIT);
407         
408         char* startOfIC = bitwise_cast<char*>(generatedFunction) + call.m_instructionOffset;
409         
410         LinkBuffer linkBuffer(vm, &fastPathJIT, startOfIC, sizeOfCall());
411         RELEASE_ASSERT(linkBuffer.isValid());
412         
413         MacroAssembler::AssemblerType_T::fillNops(
414             startOfIC + linkBuffer.size(), sizeOfCall() - linkBuffer.size());
415         
416         CallLinkInfo& info = codeBlock->callLinkInfo(i);
417         call.link(vm, linkBuffer, info);
418     }
419     
420     RepatchBuffer repatchBuffer(codeBlock);
421
422     iter = recordMap.find(state.handleStackOverflowExceptionStackmapID);
423     // It's sort of remotely possible that we won't have an in-band exception handling
424     // path, for some kinds of functions.
425     if (iter != recordMap.end()) {
426         for (unsigned i = iter->value.size(); i--;) {
427             StackMaps::Record& record = iter->value[i];
428             
429             CodeLocationLabel source = CodeLocationLabel(
430                 bitwise_cast<char*>(generatedFunction) + record.instructionOffset);
431
432             RELEASE_ASSERT(stackOverflowException.isSet());
433
434             repatchBuffer.replaceWithJump(source, state.finalizer->handleExceptionsLinkBuffer->locationOf(stackOverflowException));
435         }
436     }
437     
438     iter = recordMap.find(state.handleExceptionStackmapID);
439     // It's sort of remotely possible that we won't have an in-band exception handling
440     // path, for some kinds of functions.
441     if (iter != recordMap.end()) {
442         for (unsigned i = iter->value.size(); i--;) {
443             StackMaps::Record& record = iter->value[i];
444             
445             CodeLocationLabel source = CodeLocationLabel(
446                 bitwise_cast<char*>(generatedFunction) + record.instructionOffset);
447             
448             repatchBuffer.replaceWithJump(source, state.finalizer->handleExceptionsLinkBuffer->entrypoint());
449         }
450     }
451     
452     for (unsigned exitIndex = 0; exitIndex < jitCode->osrExit.size(); ++exitIndex) {
453         OSRExitCompilationInfo& info = state.finalizer->osrExit[exitIndex];
454         OSRExit& exit = jitCode->osrExit[exitIndex];
455         iter = recordMap.find(exit.m_stackmapID);
456         
457         Vector<const void*> codeAddresses;
458         
459         if (iter != recordMap.end()) {
460             for (unsigned i = iter->value.size(); i--;) {
461                 StackMaps::Record& record = iter->value[i];
462                 
463                 CodeLocationLabel source = CodeLocationLabel(
464                     bitwise_cast<char*>(generatedFunction) + record.instructionOffset);
465                 
466                 codeAddresses.append(bitwise_cast<char*>(generatedFunction) + record.instructionOffset + MacroAssembler::maxJumpReplacementSize());
467                 
468                 if (info.m_isInvalidationPoint)
469                     jitCode->common.jumpReplacements.append(JumpReplacement(source, info.m_thunkAddress));
470                 else
471                     repatchBuffer.replaceWithJump(source, info.m_thunkAddress);
472             }
473         }
474         
475         if (graph.compilation())
476             graph.compilation()->addOSRExitSite(codeAddresses);
477     }
478 }
479
480 void compile(State& state)
481 {
482     char* error = 0;
483     
484     {
485         GraphSafepoint safepoint(state.graph);
486         
487         LLVMMCJITCompilerOptions options;
488         llvm->InitializeMCJITCompilerOptions(&options, sizeof(options));
489         options.OptLevel = Options::llvmBackendOptimizationLevel();
490         options.NoFramePointerElim = true;
491         if (Options::useLLVMSmallCodeModel())
492             options.CodeModel = LLVMCodeModelSmall;
493         options.EnableFastISel = Options::enableLLVMFastISel();
494         options.MCJMM = llvm->CreateSimpleMCJITMemoryManager(
495             &state, mmAllocateCodeSection, mmAllocateDataSection, mmApplyPermissions, mmDestroy);
496     
497         LLVMExecutionEngineRef engine;
498     
499         if (llvm->CreateMCJITCompilerForModule(&engine, state.module, &options, sizeof(options), &error)) {
500             dataLog("FATAL: Could not create LLVM execution engine: ", error, "\n");
501             CRASH();
502         }
503
504         LLVMPassManagerRef functionPasses = 0;
505         LLVMPassManagerRef modulePasses;
506     
507         if (Options::llvmSimpleOpt()) {
508             modulePasses = llvm->CreatePassManager();
509             llvm->AddTargetData(llvm->GetExecutionEngineTargetData(engine), modulePasses);
510             llvm->AddPromoteMemoryToRegisterPass(modulePasses);
511             llvm->AddConstantPropagationPass(modulePasses);
512             llvm->AddInstructionCombiningPass(modulePasses);
513             llvm->AddBasicAliasAnalysisPass(modulePasses);
514             llvm->AddTypeBasedAliasAnalysisPass(modulePasses);
515             llvm->AddGVNPass(modulePasses);
516             llvm->AddCFGSimplificationPass(modulePasses);
517             llvm->AddDeadStoreEliminationPass(modulePasses);
518             llvm->RunPassManager(modulePasses, state.module);
519         } else {
520             LLVMPassManagerBuilderRef passBuilder = llvm->PassManagerBuilderCreate();
521             llvm->PassManagerBuilderSetOptLevel(passBuilder, Options::llvmOptimizationLevel());
522             llvm->PassManagerBuilderSetSizeLevel(passBuilder, Options::llvmSizeLevel());
523         
524             functionPasses = llvm->CreateFunctionPassManagerForModule(state.module);
525             modulePasses = llvm->CreatePassManager();
526         
527             llvm->AddTargetData(llvm->GetExecutionEngineTargetData(engine), modulePasses);
528         
529             llvm->PassManagerBuilderPopulateFunctionPassManager(passBuilder, functionPasses);
530             llvm->PassManagerBuilderPopulateModulePassManager(passBuilder, modulePasses);
531         
532             llvm->PassManagerBuilderDispose(passBuilder);
533         
534             llvm->InitializeFunctionPassManager(functionPasses);
535             for (LValue function = llvm->GetFirstFunction(state.module); function; function = llvm->GetNextFunction(function))
536                 llvm->RunFunctionPassManager(functionPasses, function);
537             llvm->FinalizeFunctionPassManager(functionPasses);
538         
539             llvm->RunPassManager(modulePasses, state.module);
540         }
541
542         if (shouldShowDisassembly() || verboseCompilationEnabled())
543             state.dumpState("after optimization");
544     
545         // FIXME: Need to add support for the case where JIT memory allocation failed.
546         // https://bugs.webkit.org/show_bug.cgi?id=113620
547         state.generatedFunction = reinterpret_cast<GeneratedFunction>(llvm->GetPointerToGlobal(engine, state.function));
548         if (functionPasses)
549             llvm->DisposePassManager(functionPasses);
550         llvm->DisposePassManager(modulePasses);
551         llvm->DisposeExecutionEngine(engine);
552     }
553
554     if (shouldShowDisassembly()) {
555         for (unsigned i = 0; i < state.jitCode->handles().size(); ++i) {
556             ExecutableMemoryHandle* handle = state.jitCode->handles()[i].get();
557             dataLog(
558                 "Generated LLVM code for ",
559                 CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT),
560                 " #", i, ", ", state.codeSectionNames[i], ":\n");
561             disassemble(
562                 MacroAssemblerCodePtr(handle->start()), handle->sizeInBytes(),
563                 "    ", WTF::dataFile(), LLVMSubset);
564         }
565         
566         for (unsigned i = 0; i < state.jitCode->dataSections().size(); ++i) {
567             DataSection* section = state.jitCode->dataSections()[i].get();
568             dataLog(
569                 "Generated LLVM data section for ",
570                 CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT),
571                 " #", i, ", ", state.dataSectionNames[i], ":\n");
572             dumpDataSection(section, "    ");
573         }
574     }
575     
576     state.jitCode->unwindInfo.parse(
577         state.compactUnwind, state.compactUnwindSize, state.generatedFunction);
578     if (shouldShowDisassembly())
579         dataLog("Unwind info for ", CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT), ":\n    ", state.jitCode->unwindInfo, "\n");
580     
581     if (state.stackmapsSection && state.stackmapsSection->size()) {
582         if (shouldShowDisassembly()) {
583             dataLog(
584                 "Generated LLVM stackmaps section for ",
585                 CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT), ":\n");
586             dataLog("    Raw data:\n");
587             dumpDataSection(state.stackmapsSection.get(), "    ");
588         }
589         
590         RefPtr<DataView> stackmapsData = DataView::create(
591             ArrayBuffer::create(state.stackmapsSection->base(), state.stackmapsSection->size()));
592         state.jitCode->stackmaps.parse(stackmapsData.get());
593     
594         if (shouldShowDisassembly()) {
595             dataLog("    Structured data:\n");
596             state.jitCode->stackmaps.dumpMultiline(WTF::dataFile(), "        ");
597         }
598         
599         StackMaps::RecordMap recordMap = state.jitCode->stackmaps.computeRecordMap();
600         fixFunctionBasedOnStackMaps(
601             state, state.graph.m_codeBlock, state.jitCode.get(), state.generatedFunction,
602             recordMap);
603         
604         if (shouldShowDisassembly()) {
605             for (unsigned i = 0; i < state.jitCode->handles().size(); ++i) {
606                 if (state.codeSectionNames[i] != "__text")
607                     continue;
608                 
609                 ExecutableMemoryHandle* handle = state.jitCode->handles()[i].get();
610                 dataLog(
611                     "Generated LLVM code after stackmap-based fix-up for ",
612                     CodeBlockWithJITType(state.graph.m_codeBlock, JITCode::FTLJIT),
613                     " in ", state.graph.m_plan.mode, " #", i, ", ",
614                     state.codeSectionNames[i], ":\n");
615                 disassemble(
616                     MacroAssemblerCodePtr(handle->start()), handle->sizeInBytes(),
617                     "    ", WTF::dataFile(), LLVMSubset);
618             }
619         }
620     }
621     
622     state.module = 0; // We no longer own the module.
623 }
624
625 } } // namespace JSC::FTL
626
627 #endif // ENABLE(FTL_JIT)
628