7126e80d23e69fa8dbcdd895b1b5605a91f8f016
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008, 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArityCheckFailReturnThunks.h"
34 #include "ArrayBufferNeuteringWatchpoint.h"
35 #include "BuiltinExecutables.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "FTLThunks.h"
46 #include "FunctionConstructor.h"
47 #include "GCActivityCallback.h"
48 #include "GetterSetter.h"
49 #include "Heap.h"
50 #include "HeapIterationScope.h"
51 #include "HostCallReturnValue.h"
52 #include "Identifier.h"
53 #include "IncrementalSweeper.h"
54 #include "Interpreter.h"
55 #include "JITCode.h"
56 #include "JSAPIValueWrapper.h"
57 #include "JSArray.h"
58 #include "JSCInlines.h"
59 #include "JSFunction.h"
60 #include "JSGlobalObjectFunctions.h"
61 #include "JSLexicalEnvironment.h"
62 #include "JSLock.h"
63 #include "JSNameScope.h"
64 #include "JSNotAnObject.h"
65 #include "JSPromiseDeferred.h"
66 #include "JSPromiseReaction.h"
67 #include "JSPropertyNameEnumerator.h"
68 #include "JSTemplateRegistryKey.h"
69 #include "JSWithScope.h"
70 #include "Lexer.h"
71 #include "Lookup.h"
72 #include "MapData.h"
73 #include "Nodes.h"
74 #include "Parser.h"
75 #include "ProfilerDatabase.h"
76 #include "PropertyMapHashTable.h"
77 #include "RegExpCache.h"
78 #include "RegExpObject.h"
79 #include "RuntimeType.h"
80 #include "SimpleTypedArrayController.h"
81 #include "SourceProviderCache.h"
82 #include "StackVisitor.h"
83 #include "StrictEvalActivation.h"
84 #include "StrongInlines.h"
85 #include "StructureInlines.h"
86 #include "TypeProfiler.h"
87 #include "TypeProfilerLog.h"
88 #include "UnlinkedCodeBlock.h"
89 #include "WeakGCMapInlines.h"
90 #include "WeakMapData.h"
91 #include <wtf/CurrentTime.h>
92 #include <wtf/ProcessID.h>
93 #include <wtf/RetainPtr.h>
94 #include <wtf/StringPrintStream.h>
95 #include <wtf/Threading.h>
96 #include <wtf/WTFThreadData.h>
97 #include <wtf/text/AtomicStringTable.h>
98 #include <wtf/text/SymbolRegistry.h>
99
100 #if ENABLE(DFG_JIT)
101 #include "ConservativeRoots.h"
102 #endif
103
104 #if ENABLE(REGEXP_TRACING)
105 #include "RegExp.h"
106 #endif
107
108 #if USE(CF)
109 #include <CoreFoundation/CoreFoundation.h>
110 #endif
111
112 using namespace WTF;
113
114 namespace JSC {
115
116 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
117 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
118 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
119
120 #if ENABLE(ASSEMBLER)
121 static bool enableAssembler(ExecutableAllocator& executableAllocator)
122 {
123     if (!Options::useJIT() && !Options::useRegExpJIT())
124         return false;
125
126     if (!executableAllocator.isValid()) {
127         if (Options::crashIfCantAllocateJITMemory())
128             CRASH();
129         return false;
130     }
131
132 #if USE(CF) || OS(UNIX)
133     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
134     return !canUseJITString || atoi(canUseJITString);
135 #else
136     return true;
137 #endif
138 }
139 #endif // ENABLE(!ASSEMBLER)
140
141 VM::VM(VMType vmType, HeapType heapType)
142     : m_apiLock(adoptRef(new JSLock(this)))
143 #if ENABLE(ASSEMBLER)
144     , executableAllocator(*this)
145 #endif
146     , heap(this, heapType)
147     , vmType(vmType)
148     , clientData(0)
149     , topVMEntryFrame(nullptr)
150     , topCallFrame(CallFrame::noCaller())
151     , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
152     , propertyNames(nullptr)
153     , emptyList(new MarkedArgumentBuffer)
154     , stringCache(*this)
155     , prototypeMap(*this)
156     , keywords(std::make_unique<Keywords>(*this))
157     , interpreter(0)
158     , jsArrayClassInfo(JSArray::info())
159     , jsFinalObjectClassInfo(JSFinalObject::info())
160     , sizeOfLastScratchBuffer(0)
161     , entryScope(0)
162     , m_regExpCache(new RegExpCache(this))
163 #if ENABLE(REGEXP_TRACING)
164     , m_rtTraceList(new RTTraceList())
165 #endif
166     , m_newStringsSinceLastHashCons(0)
167 #if ENABLE(ASSEMBLER)
168     , m_canUseAssembler(enableAssembler(executableAllocator))
169 #endif
170 #if ENABLE(JIT)
171     , m_canUseJIT(m_canUseAssembler && Options::useJIT())
172 #endif
173 #if ENABLE(YARR_JIT)
174     , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
175 #endif
176 #if ENABLE(GC_VALIDATION)
177     , m_initializingObjectClass(0)
178 #endif
179     , m_stackPointerAtVMEntry(0)
180     , m_stackLimit(0)
181 #if !ENABLE(JIT)
182     , m_jsStackLimit(0)
183 #endif
184 #if ENABLE(FTL_JIT)
185     , m_ftlStackLimit(0)
186     , m_largestFTLStackSize(0)
187 #endif
188     , m_inDefineOwnProperty(false)
189     , m_codeCache(std::make_unique<CodeCache>())
190     , m_enabledProfiler(nullptr)
191     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
192     , m_typeProfilerEnabledCount(0)
193     , m_controlFlowProfilerEnabledCount(0)
194 {
195     interpreter = new Interpreter(*this);
196     StackBounds stack = wtfThreadData().stack();
197     updateReservedZoneSize(Options::reservedZoneSize());
198 #if !ENABLE(JIT)
199     interpreter->stack().setReservedZoneSize(Options::reservedZoneSize());
200 #endif
201     setLastStackTop(stack.origin());
202
203     // Need to be careful to keep everything consistent here
204     JSLockHolder lock(this);
205     AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
206     propertyNames = new CommonIdentifiers(this);
207     structureStructure.set(*this, Structure::createStructure(*this));
208     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
209     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
210     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
211     notAnObjectStructure.set(*this, JSNotAnObject::createStructure(*this, 0, jsNull()));
212     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
213     getterSetterStructure.set(*this, GetterSetter::createStructure(*this, 0, jsNull()));
214     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
215     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
216     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
217     JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
218     executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
219     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
220     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
221     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
222     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
223     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
224     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
225     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
226     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
227     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
228     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
229     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
230     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
231     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
232     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
233     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
234     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
235     weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
236     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
237     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
238 #if ENABLE(PROMISES)
239     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
240     promiseReactionStructure.set(*this, JSPromiseReaction::createStructure(*this, 0, jsNull()));
241 #endif
242     iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
243     smallStrings.initializeCommonStrings(*this);
244
245     wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
246
247 #if ENABLE(JIT)
248     jitStubs = std::make_unique<JITThunks>();
249     arityCheckFailReturnThunks = std::make_unique<ArityCheckFailReturnThunks>();
250 #endif
251     arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
252
253 #if ENABLE(FTL_JIT)
254     ftlThunks = std::make_unique<FTL::Thunks>();
255 #endif // ENABLE(FTL_JIT)
256     
257     interpreter->initialize(this->canUseJIT());
258     
259 #if ENABLE(JIT)
260     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
261 #endif
262
263     heap.notifyIsSafeToCollect();
264     
265     LLInt::Data::performAssertions(*this);
266     
267     if (Options::enableProfiler()) {
268         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
269
270         StringPrintStream pathOut;
271         const char* profilerPath = getenv("JSC_PROFILER_PATH");
272         if (profilerPath)
273             pathOut.print(profilerPath, "/");
274         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
275         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
276     }
277
278 #if ENABLE(DFG_JIT)
279     if (canUseJIT())
280         dfgState = std::make_unique<DFG::LongLivedState>();
281 #endif
282     
283     // Initialize this last, as a free way of asserting that VM initialization itself
284     // won't use this.
285     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
286
287     if (Options::enableTypeProfiler())
288         enableTypeProfiler();
289     if (Options::enableControlFlowProfiler())
290         enableControlFlowProfiler();
291 }
292
293 VM::~VM()
294 {
295     // Never GC, ever again.
296     heap.incrementDeferralDepth();
297     
298 #if ENABLE(DFG_JIT)
299     // Make sure concurrent compilations are done, but don't install them, since there is
300     // no point to doing so.
301     for (unsigned i = DFG::numberOfWorklists(); i--;) {
302         if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i)) {
303             worklist->waitUntilAllPlansForVMAreReady(*this);
304             worklist->removeAllReadyPlansForVM(*this);
305         }
306     }
307 #endif // ENABLE(DFG_JIT)
308     
309     waitForAsynchronousDisassembly();
310     
311     // Clear this first to ensure that nobody tries to remove themselves from it.
312     m_perBytecodeProfiler = nullptr;
313
314     ASSERT(m_apiLock->currentThreadIsHoldingLock());
315     m_apiLock->willDestroyVM(this);
316     heap.lastChanceToFinalize();
317
318     delete interpreter;
319 #ifndef NDEBUG
320     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
321 #endif
322
323     delete emptyList;
324
325     delete propertyNames;
326     if (vmType != Default)
327         delete m_atomicStringTable;
328
329     delete clientData;
330     delete m_regExpCache;
331 #if ENABLE(REGEXP_TRACING)
332     delete m_rtTraceList;
333 #endif
334
335 #if ENABLE(DFG_JIT)
336     for (unsigned i = 0; i < scratchBuffers.size(); ++i)
337         fastFree(scratchBuffers[i]);
338 #endif
339 }
340
341 PassRefPtr<VM> VM::createContextGroup(HeapType heapType)
342 {
343     return adoptRef(new VM(APIContextGroup, heapType));
344 }
345
346 PassRefPtr<VM> VM::create(HeapType heapType)
347 {
348     return adoptRef(new VM(Default, heapType));
349 }
350
351 PassRefPtr<VM> VM::createLeaked(HeapType heapType)
352 {
353     return create(heapType);
354 }
355
356 bool VM::sharedInstanceExists()
357 {
358     return sharedInstanceInternal();
359 }
360
361 VM& VM::sharedInstance()
362 {
363     GlobalJSLock globalLock;
364     VM*& instance = sharedInstanceInternal();
365     if (!instance)
366         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
367     return *instance;
368 }
369
370 VM*& VM::sharedInstanceInternal()
371 {
372     static VM* sharedInstance;
373     return sharedInstance;
374 }
375
376 #if ENABLE(JIT)
377 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
378 {
379     switch (intrinsic) {
380     case CharCodeAtIntrinsic:
381         return charCodeAtThunkGenerator;
382     case CharAtIntrinsic:
383         return charAtThunkGenerator;
384     case Clz32Intrinsic:
385         return clz32ThunkGenerator;
386     case FromCharCodeIntrinsic:
387         return fromCharCodeThunkGenerator;
388     case SqrtIntrinsic:
389         return sqrtThunkGenerator;
390     case PowIntrinsic:
391         return powThunkGenerator;
392     case AbsIntrinsic:
393         return absThunkGenerator;
394     case FloorIntrinsic:
395         return floorThunkGenerator;
396     case CeilIntrinsic:
397         return ceilThunkGenerator;
398     case RoundIntrinsic:
399         return roundThunkGenerator;
400     case ExpIntrinsic:
401         return expThunkGenerator;
402     case LogIntrinsic:
403         return logThunkGenerator;
404     case IMulIntrinsic:
405         return imulThunkGenerator;
406     default:
407         return 0;
408     }
409 }
410
411 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
412 {
413     return jitStubs->hostFunctionStub(this, function, constructor);
414 }
415 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic)
416 {
417     ASSERT(canUseJIT());
418     return jitStubs->hostFunctionStub(this, function, intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0, intrinsic);
419 }
420
421 #else // !ENABLE(JIT)
422
423 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
424 {
425     return NativeExecutable::create(*this,
426         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
427         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
428         NoIntrinsic);
429 }
430
431 #endif // !ENABLE(JIT)
432
433 VM::ClientData::~ClientData()
434 {
435 }
436
437 void VM::resetDateCache()
438 {
439     localTimeOffsetCache.reset();
440     cachedDateString = String();
441     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
442     dateInstanceCache.reset();
443 }
444
445 void VM::startSampling()
446 {
447     interpreter->startSampling();
448 }
449
450 void VM::stopSampling()
451 {
452     interpreter->stopSampling();
453 }
454
455 void VM::prepareToDiscardCode()
456 {
457 #if ENABLE(DFG_JIT)
458     for (unsigned i = DFG::numberOfWorklists(); i--;) {
459         if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i))
460             worklist->completeAllPlansForVM(*this);
461     }
462 #endif // ENABLE(DFG_JIT)
463 }
464
465 void VM::discardAllCode()
466 {
467     prepareToDiscardCode();
468     m_codeCache->clear();
469     m_regExpCache->invalidateCode();
470     heap.deleteAllCompiledCode();
471     heap.deleteAllUnlinkedFunctionCode();
472     heap.reportAbandonedObjectGraph();
473 }
474
475 void VM::dumpSampleData(ExecState* exec)
476 {
477     interpreter->dumpSampleData(exec);
478 #if ENABLE(ASSEMBLER)
479     ExecutableAllocator::dumpProfile();
480 #endif
481 }
482
483 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
484 {
485     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
486     if (addResult.isNewEntry)
487         addResult.iterator->value = adoptRef(new SourceProviderCache);
488     return addResult.iterator->value.get();
489 }
490
491 void VM::clearSourceProviderCaches()
492 {
493     sourceProviderCacheMap.clear();
494 }
495
496 struct StackPreservingRecompiler : public MarkedBlock::VoidFunctor {
497     HashSet<FunctionExecutable*> currentlyExecutingFunctions;
498     inline void visit(JSCell* cell)
499     {
500         if (!cell->inherits(FunctionExecutable::info()))
501             return;
502         FunctionExecutable* executable = jsCast<FunctionExecutable*>(cell);
503         if (currentlyExecutingFunctions.contains(executable))
504             return;
505         executable->clearCodeIfNotCompiling();
506     }
507     IterationStatus operator()(JSCell* cell)
508     {
509         visit(cell);
510         return IterationStatus::Continue;
511     }
512 };
513
514 void VM::releaseExecutableMemory()
515 {
516     prepareToDiscardCode();
517     
518     if (entryScope) {
519         StackPreservingRecompiler recompiler;
520         HeapIterationScope iterationScope(heap);
521         HashSet<JSCell*> roots;
522         heap.getConservativeRegisterRoots(roots);
523         HashSet<JSCell*>::iterator end = roots.end();
524         for (HashSet<JSCell*>::iterator ptr = roots.begin(); ptr != end; ++ptr) {
525             ScriptExecutable* executable = 0;
526             JSCell* cell = *ptr;
527             if (cell->inherits(ScriptExecutable::info()))
528                 executable = static_cast<ScriptExecutable*>(*ptr);
529             else if (cell->inherits(JSFunction::info())) {
530                 JSFunction* function = jsCast<JSFunction*>(*ptr);
531                 if (function->isHostFunction())
532                     continue;
533                 executable = function->jsExecutable();
534             } else
535                 continue;
536             ASSERT(executable->inherits(ScriptExecutable::info()));
537             executable->unlinkCalls();
538             if (executable->inherits(FunctionExecutable::info()))
539                 recompiler.currentlyExecutingFunctions.add(static_cast<FunctionExecutable*>(executable));
540                 
541         }
542         heap.objectSpace().forEachLiveCell<StackPreservingRecompiler>(iterationScope, recompiler);
543     }
544     m_regExpCache->invalidateCode();
545     heap.collectAllGarbage();
546 }
547
548 JSValue VM::throwException(ExecState* exec, JSValue error)
549 {
550     if (Options::breakOnThrow()) {
551         dataLog("In call frame ", RawPointer(exec), " for code block ", *exec->codeBlock(), "\n");
552         CRASH();
553     }
554     
555     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
556     
557     Vector<StackFrame> stackTrace;
558     interpreter->getStackTrace(stackTrace);
559     m_exceptionStack = RefCountedArray<StackFrame>(stackTrace);
560     m_exception = error;
561
562     return error;
563 }
564     
565 JSObject* VM::throwException(ExecState* exec, JSObject* error)
566 {
567     return asObject(throwException(exec, JSValue(error)));
568 }
569 void VM::getExceptionInfo(JSValue& exception, RefCountedArray<StackFrame>& exceptionStack)
570 {
571     exception = m_exception;
572     exceptionStack = m_exceptionStack;
573 }
574 void VM::setExceptionInfo(JSValue& exception, RefCountedArray<StackFrame>& exceptionStack)
575 {
576     m_exception = exception;
577     m_exceptionStack = exceptionStack;
578 }
579
580 void VM::clearException()
581 {
582     m_exception = JSValue();
583 }
584 void VM:: clearExceptionStack()
585 {
586     m_exceptionStack = RefCountedArray<StackFrame>();
587 }
588
589 void VM::setStackPointerAtVMEntry(void* sp)
590 {
591     m_stackPointerAtVMEntry = sp;
592     updateStackLimit();
593 }
594
595 size_t VM::updateReservedZoneSize(size_t reservedZoneSize)
596 {
597     size_t oldReservedZoneSize = m_reservedZoneSize;
598     m_reservedZoneSize = reservedZoneSize;
599
600     updateStackLimit();
601
602     return oldReservedZoneSize;
603 }
604
605 #if PLATFORM(WIN)
606 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
607 // where the guard page is a barrier between committed and uncommitted memory.
608 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
609 // This is how the system grows the stack.
610 // When using the C stack on Windows we need to precommit the needed stack space.
611 // Otherwise we might crash later if we access uncommitted stack memory.
612 // This can happen if we allocate stack space larger than the page guard size (4K).
613 // The system does not get the chance to move the guard page, and commit more memory,
614 // and we crash if uncommitted memory is accessed.
615 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
616 // when needed, see http://support.microsoft.com/kb/100775.
617 // By touching every page up to the stack limit with a dummy operation,
618 // we force the system to move the guard page, and commit memory.
619
620 static void preCommitStackMemory(void* stackLimit)
621 {
622     const int pageSize = 4096;
623     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
624         char ch = *p;
625         *p = ch;
626     }
627 }
628 #endif
629
630 inline void VM::updateStackLimit()
631 {
632 #if PLATFORM(WIN)
633     void* lastStackLimit = m_stackLimit;
634 #endif
635
636     if (m_stackPointerAtVMEntry) {
637         ASSERT(wtfThreadData().stack().isGrowingDownward());
638         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
639 #if ENABLE(FTL_JIT)
640         m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize + m_largestFTLStackSize);
641         m_ftlStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize + 2 * m_largestFTLStackSize);
642 #else
643         m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize);
644 #endif
645     } else {
646 #if ENABLE(FTL_JIT)
647         m_stackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize + m_largestFTLStackSize);
648         m_ftlStackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize + 2 * m_largestFTLStackSize);
649 #else
650         m_stackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize);
651 #endif
652     }
653
654 #if PLATFORM(WIN)
655     if (lastStackLimit != m_stackLimit)
656         preCommitStackMemory(m_stackLimit);
657 #endif
658 }
659
660 #if ENABLE(FTL_JIT)
661 void VM::updateFTLLargestStackSize(size_t stackSize)
662 {
663     if (stackSize > m_largestFTLStackSize) {
664         m_largestFTLStackSize = stackSize;
665         updateStackLimit();
666     }
667 }
668 #endif
669
670 void releaseExecutableMemory(VM& vm)
671 {
672     vm.releaseExecutableMemory();
673 }
674
675 #if ENABLE(DFG_JIT)
676 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
677 {
678     for (size_t i = 0; i < scratchBuffers.size(); i++) {
679         ScratchBuffer* scratchBuffer = scratchBuffers[i];
680         if (scratchBuffer->activeLength()) {
681             void* bufferStart = scratchBuffer->dataBuffer();
682             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
683         }
684     }
685 }
686 #endif
687
688 void logSanitizeStack(VM* vm)
689 {
690     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
691         int dummy;
692         dataLog(
693             "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
694             ", current stack pointer at ", RawPointer(&dummy), ", in ",
695             pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
696             vm->topCallFrame->codeOrigin(), "\n");
697     }
698 }
699
700 #if ENABLE(REGEXP_TRACING)
701 void VM::addRegExpToTrace(RegExp* regExp)
702 {
703     gcProtect(regExp);
704     m_rtTraceList->add(regExp);
705 }
706
707 void VM::dumpRegExpTrace()
708 {
709     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
710     RTTraceList::iterator iter = ++m_rtTraceList->begin();
711     
712     if (iter != m_rtTraceList->end()) {
713         dataLogF("\nRegExp Tracing\n");
714         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
715         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
716         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
717     
718         unsigned reCount = 0;
719     
720         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
721             (*iter)->printTraceData();
722             gcUnprotect(*iter);
723         }
724
725         dataLogF("%d Regular Expressions\n", reCount);
726     }
727     
728     m_rtTraceList->clear();
729 }
730 #else
731 void VM::dumpRegExpTrace()
732 {
733 }
734 #endif
735
736 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
737 {
738     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
739     if (result.isNewEntry)
740         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
741     result.iterator->value->add(watchpoint);
742 }
743
744 void VM::addImpureProperty(const String& propertyName)
745 {
746     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
747         watchpointSet->fireAll("Impure property added");
748 }
749
750 class SetEnabledProfilerFunctor {
751 public:
752     bool operator()(CodeBlock* codeBlock)
753     {
754         if (JITCode::isOptimizingJIT(codeBlock->jitType()))
755             codeBlock->jettison(Profiler::JettisonDueToLegacyProfiler);
756         return false;
757     }
758 };
759
760 void VM::setEnabledProfiler(LegacyProfiler* profiler)
761 {
762     m_enabledProfiler = profiler;
763     if (m_enabledProfiler) {
764         prepareToDiscardCode();
765         SetEnabledProfilerFunctor functor;
766         heap.forEachCodeBlock(functor);
767     }
768 }
769
770 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
771 {
772     bool needsToRecompile = false;
773     if (!counter) {
774         doEnableWork();
775         needsToRecompile = true;
776     }
777     counter++;
778
779     return needsToRecompile;
780 }
781
782 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
783 {
784     RELEASE_ASSERT(counter > 0);
785     bool needsToRecompile = false;
786     counter--;
787     if (!counter) {
788         doDisableWork();
789         needsToRecompile = true;
790     }
791
792     return needsToRecompile;
793 }
794
795 bool VM::enableTypeProfiler()
796 {
797     auto enableTypeProfiler = [this] () {
798         this->m_typeProfiler = std::make_unique<TypeProfiler>();
799         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
800     };
801
802     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
803 }
804
805 bool VM::disableTypeProfiler()
806 {
807     auto disableTypeProfiler = [this] () {
808         this->m_typeProfiler.reset(nullptr);
809         this->m_typeProfilerLog.reset(nullptr);
810     };
811
812     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
813 }
814
815 bool VM::enableControlFlowProfiler()
816 {
817     auto enableControlFlowProfiler = [this] () {
818         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
819     };
820
821     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
822 }
823
824 bool VM::disableControlFlowProfiler()
825 {
826     auto disableControlFlowProfiler = [this] () {
827         this->m_controlFlowProfiler.reset(nullptr);
828     };
829
830     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
831 }
832
833 void VM::dumpTypeProfilerData()
834 {
835     if (!typeProfiler())
836         return;
837
838     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
839     typeProfiler()->dumpTypeProfilerData(*this);
840 }
841
842 void sanitizeStackForVM(VM* vm)
843 {
844     logSanitizeStack(vm);
845 #if !ENABLE(JIT)
846     vm->interpreter->stack().sanitizeStack();
847 #else
848     sanitizeStackForVMImpl(vm);
849 #endif
850 }
851
852 } // namespace JSC