[JSC] Get rid of operationInitGlobalConst(), it is useless
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CommonSlowPaths.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
38 #include "Debugger.h"
39 #include "DirectArguments.h"
40 #include "Error.h"
41 #include "ErrorHandlingScope.h"
42 #include "ExceptionFuzz.h"
43 #include "GetterSetter.h"
44 #include "HostCallReturnValue.h"
45 #include "JIT.h"
46 #include "JITExceptions.h"
47 #include "JITToDFGDeferredCompilationCallback.h"
48 #include "JSCInlines.h"
49 #include "JSGeneratorFunction.h"
50 #include "JSGlobalObjectFunctions.h"
51 #include "JSLexicalEnvironment.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
58 #include "Repatch.h"
59 #include "ScopedArguments.h"
60 #include "SuperSampler.h"
61 #include "TestRunnerUtils.h"
62 #include "TypeProfilerLog.h"
63 #include "VMInlines.h"
64 #include <wtf/InlineASM.h>
65
66 namespace JSC {
67
68 extern "C" {
69
70 #if COMPILER(MSVC)
71 void * _ReturnAddress(void);
72 #pragma intrinsic(_ReturnAddress)
73
74 #define OUR_RETURN_ADDRESS _ReturnAddress()
75 #else
76 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
77 #endif
78
79 #if ENABLE(OPCODE_SAMPLING)
80 #define CTI_SAMPLER vm->interpreter->sampler()
81 #else
82 #define CTI_SAMPLER 0
83 #endif
84
85
86 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
87 {
88     // We pass in our own code block, because the callframe hasn't been populated.
89     VM* vm = codeBlock->vm();
90
91     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
92     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
93     if (!callerFrame)
94         callerFrame = exec;
95
96     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
97     throwStackOverflowError(callerFrame);
98 }
99
100 #if ENABLE(WEBASSEMBLY)
101 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
102 {
103     VM* vm = &exec->vm();
104     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
105     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
106
107     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
108     ErrorHandlingScope errorScope(*vm);
109     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
110 }
111
112 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
113 {
114     VM* vm = &exec->vm();
115     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
116     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
117
118     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
119     ErrorHandlingScope errorScope(*vm);
120     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
121 }
122 #endif
123
124 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
125 {
126     VM* vm = &exec->vm();
127     JSStack& stack = vm->interpreter->stack();
128
129     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
130     if (missingArgCount < 0) {
131         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
132         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
133         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
134         throwStackOverflowError(callerFrame);
135     }
136
137     return missingArgCount;
138 }
139
140 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
141 {
142     VM* vm = &exec->vm();
143     JSStack& stack = vm->interpreter->stack();
144
145     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
146     if (missingArgCount < 0) {
147         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
148         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
149         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
150         throwStackOverflowError(callerFrame);
151     }
152
153     return missingArgCount;
154 }
155
156 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
157 {
158     VM* vm = &exec->vm();
159     NativeCallFrameTracer tracer(vm, exec);
160     
161     stubInfo->tookSlowPath = true;
162     
163     JSValue baseValue = JSValue::decode(base);
164     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
165     Identifier ident = Identifier::fromUid(vm, uid);
166     return JSValue::encode(baseValue.get(exec, ident, slot));
167 }
168
169 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
170 {
171     VM* vm = &exec->vm();
172     NativeCallFrameTracer tracer(vm, exec);
173     
174     JSValue baseValue = JSValue::decode(base);
175     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
176     Identifier ident = Identifier::fromUid(vm, uid);
177     return JSValue::encode(baseValue.get(exec, ident, slot));
178 }
179
180 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
181 {
182     VM* vm = &exec->vm();
183     NativeCallFrameTracer tracer(vm, exec);
184     Identifier ident = Identifier::fromUid(vm, uid);
185
186     JSValue baseValue = JSValue::decode(base);
187     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
188     
189     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
190     if (stubInfo->considerCaching())
191         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
192     
193     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
194 }
195
196 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
197 {
198     VM* vm = &exec->vm();
199     NativeCallFrameTracer tracer(vm, exec);
200     
201     if (!base->isObject()) {
202         vm->throwException(exec, createInvalidInParameterError(exec, base));
203         return JSValue::encode(jsUndefined());
204     }
205     
206     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
207
208     Identifier ident = Identifier::fromUid(vm, key);
209     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
210     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
211     
212     RELEASE_ASSERT(accessType == stubInfo->accessType);
213     
214     if (stubInfo->considerCaching())
215         repatchIn(exec, base, ident, result, slot, *stubInfo);
216     
217     return JSValue::encode(jsBoolean(result));
218 }
219
220 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
221 {
222     VM* vm = &exec->vm();
223     NativeCallFrameTracer tracer(vm, exec);
224     
225     stubInfo->tookSlowPath = true;
226
227     if (!base->isObject()) {
228         vm->throwException(exec, createInvalidInParameterError(exec, base));
229         return JSValue::encode(jsUndefined());
230     }
231
232     Identifier ident = Identifier::fromUid(vm, key);
233     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
234 }
235
236 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
237 {
238     VM* vm = &exec->vm();
239     NativeCallFrameTracer tracer(vm, exec);
240
241     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
242 }
243
244 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
245 {
246     VM* vm = &exec->vm();
247     NativeCallFrameTracer tracer(vm, exec);
248     
249     stubInfo->tookSlowPath = true;
250     
251     Identifier ident = Identifier::fromUid(vm, uid);
252     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
253     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
254 }
255
256 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
257 {
258     VM* vm = &exec->vm();
259     NativeCallFrameTracer tracer(vm, exec);
260     
261     stubInfo->tookSlowPath = true;
262     
263     Identifier ident = Identifier::fromUid(vm, uid);
264     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
265     JSValue::decode(encodedBase).putInline(exec, ident, JSValue::decode(encodedValue), slot);
266 }
267
268 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
269 {
270     VM* vm = &exec->vm();
271     NativeCallFrameTracer tracer(vm, exec);
272     
273     stubInfo->tookSlowPath = true;
274     
275     Identifier ident = Identifier::fromUid(vm, uid);
276     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
277     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
278 }
279
280 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
281 {
282     VM* vm = &exec->vm();
283     NativeCallFrameTracer tracer(vm, exec);
284     
285     stubInfo->tookSlowPath = true;
286     
287     Identifier ident = Identifier::fromUid(vm, uid);
288     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
289     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
290 }
291
292 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
293 {
294     VM* vm = &exec->vm();
295     NativeCallFrameTracer tracer(vm, exec);
296     
297     Identifier ident = Identifier::fromUid(vm, uid);
298     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
299
300     JSValue value = JSValue::decode(encodedValue);
301     JSValue baseValue = JSValue::decode(encodedBase);
302     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
303
304     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
305     baseValue.putInline(exec, ident, value, slot);
306     
307     if (accessType != static_cast<AccessType>(stubInfo->accessType))
308         return;
309     
310     if (stubInfo->considerCaching())
311         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
312 }
313
314 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
315 {
316     VM* vm = &exec->vm();
317     NativeCallFrameTracer tracer(vm, exec);
318     
319     Identifier ident = Identifier::fromUid(vm, uid);
320     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
321
322     JSValue value = JSValue::decode(encodedValue);
323     JSValue baseValue = JSValue::decode(encodedBase);
324     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
325
326     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
327     baseValue.putInline(exec, ident, value, slot);
328     
329     if (accessType != static_cast<AccessType>(stubInfo->accessType))
330         return;
331     
332     if (stubInfo->considerCaching())
333         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
334 }
335
336 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
337 {
338     VM* vm = &exec->vm();
339     NativeCallFrameTracer tracer(vm, exec);
340     
341     Identifier ident = Identifier::fromUid(vm, uid);
342     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
343
344     JSValue value = JSValue::decode(encodedValue);
345     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
346     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
347     
348     Structure* structure = baseObject->structure(*vm);
349     baseObject->putDirect(exec->vm(), ident, value, slot);
350     
351     if (accessType != static_cast<AccessType>(stubInfo->accessType))
352         return;
353     
354     if (stubInfo->considerCaching())
355         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
356 }
357
358 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
359 {
360     VM* vm = &exec->vm();
361     NativeCallFrameTracer tracer(vm, exec);
362     
363     Identifier ident = Identifier::fromUid(vm, uid);
364     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
365
366     JSValue value = JSValue::decode(encodedValue);
367     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
368     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
369     
370     Structure* structure = baseObject->structure(*vm);
371     baseObject->putDirect(exec->vm(), ident, value, slot);
372     
373     if (accessType != static_cast<AccessType>(stubInfo->accessType))
374         return;
375     
376     if (stubInfo->considerCaching())
377         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
378 }
379
380 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
381 {
382     VM& vm = exec->vm();
383     NativeCallFrameTracer tracer(&vm, exec);
384
385     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
386     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
387     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
388     base->putDirect(vm, offset, JSValue::decode(value));
389 }
390
391 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
392 {
393     return value.isString() || value.isSymbol();
394 }
395
396 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
397 {
398     VM& vm = callFrame->vm();
399     if (LIKELY(subscript.isUInt32())) {
400         byValInfo->tookSlowPath = true;
401         uint32_t i = subscript.asUInt32();
402         if (baseValue.isObject()) {
403             JSObject* object = asObject(baseValue);
404             if (object->canSetIndexQuickly(i))
405                 object->setIndexQuickly(callFrame->vm(), i, value);
406             else {
407                 // FIXME: This will make us think that in-bounds typed array accesses are actually
408                 // out-of-bounds.
409                 // https://bugs.webkit.org/show_bug.cgi?id=149886
410                 byValInfo->arrayProfile->setOutOfBounds();
411                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
412             }
413         } else
414             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
415         return;
416     }
417
418     auto property = subscript.toPropertyKey(callFrame);
419     // Don't put to an object if toString threw an exception.
420     if (callFrame->vm().exception())
421         return;
422
423     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
424         byValInfo->tookSlowPath = true;
425
426     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
427     baseValue.putInline(callFrame, property, value, slot);
428 }
429
430 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
431 {
432     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
433     if (LIKELY(subscript.isUInt32())) {
434         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
435         byValInfo->tookSlowPath = true;
436         uint32_t index = subscript.asUInt32();
437         ASSERT(isIndex(index));
438         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
439             baseObject->setIndexQuickly(callFrame->vm(), index, value);
440             return;
441         }
442
443         // FIXME: This will make us think that in-bounds typed array accesses are actually
444         // out-of-bounds.
445         // https://bugs.webkit.org/show_bug.cgi?id=149886
446         byValInfo->arrayProfile->setOutOfBounds();
447         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
448         return;
449     }
450
451     if (subscript.isDouble()) {
452         double subscriptAsDouble = subscript.asDouble();
453         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
454         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
455             byValInfo->tookSlowPath = true;
456             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
457             return;
458         }
459     }
460
461     // Don't put to an object if toString threw an exception.
462     auto property = subscript.toPropertyKey(callFrame);
463     if (callFrame->vm().exception())
464         return;
465
466     if (Optional<uint32_t> index = parseIndex(property)) {
467         byValInfo->tookSlowPath = true;
468         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
469         return;
470     }
471
472     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
473         byValInfo->tookSlowPath = true;
474
475     PutPropertySlot slot(baseObject, isStrictMode);
476     baseObject->putDirect(callFrame->vm(), property, value, slot);
477 }
478
479 enum class OptimizationResult {
480     NotOptimized,
481     SeenOnce,
482     Optimized,
483     GiveUp,
484 };
485
486 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
487 {
488     // See if it's worth optimizing at all.
489     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
490
491     VM& vm = exec->vm();
492
493     if (baseValue.isObject() && subscript.isInt32()) {
494         JSObject* object = asObject(baseValue);
495
496         ASSERT(exec->bytecodeOffset());
497         ASSERT(!byValInfo->stubRoutine);
498
499         Structure* structure = object->structure(vm);
500         if (hasOptimizableIndexing(structure)) {
501             // Attempt to optimize.
502             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
503             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
504                 CodeBlock* codeBlock = exec->codeBlock();
505                 ConcurrentJITLocker locker(codeBlock->m_lock);
506                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
507
508                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
509                 optimizationResult = OptimizationResult::Optimized;
510             }
511         }
512
513         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
514         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
515             optimizationResult = OptimizationResult::GiveUp;
516     }
517
518     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
519         const Identifier propertyName = subscript.toPropertyKey(exec);
520         if (!subscript.isString() || !parseIndex(propertyName)) {
521             ASSERT(exec->bytecodeOffset());
522             ASSERT(!byValInfo->stubRoutine);
523             if (byValInfo->seen) {
524                 if (byValInfo->cachedId == propertyName) {
525                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
526                     optimizationResult = OptimizationResult::Optimized;
527                 } else {
528                     // Seem like a generic property access site.
529                     optimizationResult = OptimizationResult::GiveUp;
530                 }
531             } else {
532                 byValInfo->seen = true;
533                 byValInfo->cachedId = propertyName;
534                 optimizationResult = OptimizationResult::SeenOnce;
535             }
536         }
537     }
538
539     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
540         // If we take slow path more than 10 times without patching then make sure we
541         // never make that mistake again. For cases where we see non-index-intercepting
542         // objects, this gives 10 iterations worth of opportunity for us to observe
543         // that the put_by_val may be polymorphic. We count up slowPathCount even if
544         // the result is GiveUp.
545         if (++byValInfo->slowPathCount >= 10)
546             optimizationResult = OptimizationResult::GiveUp;
547     }
548
549     return optimizationResult;
550 }
551
552 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
553 {
554     VM& vm = exec->vm();
555     NativeCallFrameTracer tracer(&vm, exec);
556
557     JSValue baseValue = JSValue::decode(encodedBaseValue);
558     JSValue subscript = JSValue::decode(encodedSubscript);
559     JSValue value = JSValue::decode(encodedValue);
560     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
561         // Don't ever try to optimize.
562         byValInfo->tookSlowPath = true;
563         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
564     }
565     putByVal(exec, baseValue, subscript, value, byValInfo);
566 }
567
568 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
569 {
570     // See if it's worth optimizing at all.
571     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
572
573     VM& vm = exec->vm();
574
575     if (subscript.isInt32()) {
576         ASSERT(exec->bytecodeOffset());
577         ASSERT(!byValInfo->stubRoutine);
578
579         Structure* structure = object->structure(vm);
580         if (hasOptimizableIndexing(structure)) {
581             // Attempt to optimize.
582             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
583             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
584                 CodeBlock* codeBlock = exec->codeBlock();
585                 ConcurrentJITLocker locker(codeBlock->m_lock);
586                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
587
588                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
589                 optimizationResult = OptimizationResult::Optimized;
590             }
591         }
592
593         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
594         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
595             optimizationResult = OptimizationResult::GiveUp;
596     } else if (isStringOrSymbol(subscript)) {
597         const Identifier propertyName = subscript.toPropertyKey(exec);
598         Optional<uint32_t> index = parseIndex(propertyName);
599
600         if (!subscript.isString() || !index) {
601             ASSERT(exec->bytecodeOffset());
602             ASSERT(!byValInfo->stubRoutine);
603             if (byValInfo->seen) {
604                 if (byValInfo->cachedId == propertyName) {
605                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
606                     optimizationResult = OptimizationResult::Optimized;
607                 } else {
608                     // Seem like a generic property access site.
609                     optimizationResult = OptimizationResult::GiveUp;
610                 }
611             } else {
612                 byValInfo->seen = true;
613                 byValInfo->cachedId = propertyName;
614                 optimizationResult = OptimizationResult::SeenOnce;
615             }
616         }
617     }
618
619     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
620         // If we take slow path more than 10 times without patching then make sure we
621         // never make that mistake again. For cases where we see non-index-intercepting
622         // objects, this gives 10 iterations worth of opportunity for us to observe
623         // that the get_by_val may be polymorphic. We count up slowPathCount even if
624         // the result is GiveUp.
625         if (++byValInfo->slowPathCount >= 10)
626             optimizationResult = OptimizationResult::GiveUp;
627     }
628
629     return optimizationResult;
630 }
631
632 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
633 {
634     VM& vm = exec->vm();
635     NativeCallFrameTracer tracer(&vm, exec);
636
637     JSValue baseValue = JSValue::decode(encodedBaseValue);
638     JSValue subscript = JSValue::decode(encodedSubscript);
639     JSValue value = JSValue::decode(encodedValue);
640     RELEASE_ASSERT(baseValue.isObject());
641     JSObject* object = asObject(baseValue);
642     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
643         // Don't ever try to optimize.
644         byValInfo->tookSlowPath = true;
645         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
646     }
647
648     directPutByVal(exec, object, subscript, value, byValInfo);
649 }
650
651 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
652 {
653     VM& vm = exec->vm();
654     NativeCallFrameTracer tracer(&vm, exec);
655     
656     JSValue baseValue = JSValue::decode(encodedBaseValue);
657     JSValue subscript = JSValue::decode(encodedSubscript);
658     JSValue value = JSValue::decode(encodedValue);
659
660     putByVal(exec, baseValue, subscript, value, byValInfo);
661 }
662
663
664 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
665 {
666     VM& vm = exec->vm();
667     NativeCallFrameTracer tracer(&vm, exec);
668     
669     JSValue baseValue = JSValue::decode(encodedBaseValue);
670     JSValue subscript = JSValue::decode(encodedSubscript);
671     JSValue value = JSValue::decode(encodedValue);
672     RELEASE_ASSERT(baseValue.isObject());
673     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
674 }
675
676 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
677 {
678     UNUSED_PARAM(exec);
679
680     execCallee->setCodeBlock(0);
681
682     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
683         return JSValue::encode(JSValue());
684
685     VM* vm = &execCallee->vm();
686     JSValue result = eval(execCallee);
687     if (vm->exception())
688         return EncodedJSValue();
689     
690     return JSValue::encode(result);
691 }
692
693 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
694 {
695     ExecState* exec = execCallee->callerFrame();
696     VM* vm = &exec->vm();
697
698     execCallee->setCodeBlock(0);
699
700     if (callLinkInfo->specializationKind() == CodeForCall) {
701         CallData callData;
702         CallType callType = getCallData(callee, callData);
703     
704         ASSERT(callType != CallType::JS);
705     
706         if (callType == CallType::Host) {
707             NativeCallFrameTracer tracer(vm, execCallee);
708             execCallee->setCallee(asObject(callee));
709             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
710             if (vm->exception()) {
711                 return encodeResult(
712                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
713                     reinterpret_cast<void*>(KeepTheFrame));
714             }
715
716             return encodeResult(
717                 bitwise_cast<void*>(getHostCallReturnValue),
718                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
719         }
720     
721         ASSERT(callType == CallType::None);
722         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
723         return encodeResult(
724             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
725             reinterpret_cast<void*>(KeepTheFrame));
726     }
727
728     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
729     
730     ConstructData constructData;
731     ConstructType constructType = getConstructData(callee, constructData);
732     
733     ASSERT(constructType != ConstructType::JS);
734     
735     if (constructType == ConstructType::Host) {
736         NativeCallFrameTracer tracer(vm, execCallee);
737         execCallee->setCallee(asObject(callee));
738         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
739         if (vm->exception()) {
740             return encodeResult(
741                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
742                 reinterpret_cast<void*>(KeepTheFrame));
743         }
744
745         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
746     }
747     
748     ASSERT(constructType == ConstructType::None);
749     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
750     return encodeResult(
751         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
752         reinterpret_cast<void*>(KeepTheFrame));
753 }
754
755 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
756 {
757     ExecState* exec = execCallee->callerFrame();
758     VM* vm = &exec->vm();
759     CodeSpecializationKind kind = callLinkInfo->specializationKind();
760     NativeCallFrameTracer tracer(vm, exec);
761     
762     JSValue calleeAsValue = execCallee->calleeAsValue();
763     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
764     if (!calleeAsFunctionCell) {
765         // FIXME: We should cache these kinds of calls. They can be common and currently they are
766         // expensive.
767         // https://bugs.webkit.org/show_bug.cgi?id=144458
768         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
769     }
770
771     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
772     JSScope* scope = callee->scopeUnchecked();
773     ExecutableBase* executable = callee->executable();
774
775     MacroAssemblerCodePtr codePtr;
776     CodeBlock* codeBlock = 0;
777     if (executable->isHostFunction()) {
778         codePtr = executable->entrypointFor(kind, MustCheckArity);
779 #if ENABLE(WEBASSEMBLY)
780     } else if (executable->isWebAssemblyExecutable()) {
781         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
782         webAssemblyExecutable->prepareForExecution(execCallee);
783         codeBlock = webAssemblyExecutable->codeBlockForCall();
784         ASSERT(codeBlock);
785         ArityCheckMode arity;
786         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
787             arity = MustCheckArity;
788         else
789             arity = ArityCheckNotRequired;
790         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
791 #endif
792     } else {
793         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
794
795         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
796             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
797             return encodeResult(
798                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
799                 reinterpret_cast<void*>(KeepTheFrame));
800         }
801
802         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
803         if (error) {
804             exec->vm().throwException(exec, error);
805             return encodeResult(
806                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
807                 reinterpret_cast<void*>(KeepTheFrame));
808         }
809         codeBlock = functionExecutable->codeBlockFor(kind);
810         ArityCheckMode arity;
811         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
812             arity = MustCheckArity;
813         else
814             arity = ArityCheckNotRequired;
815         codePtr = functionExecutable->entrypointFor(kind, arity);
816     }
817     if (!callLinkInfo->seenOnce())
818         callLinkInfo->setSeen();
819     else
820         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
821     
822     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
823 }
824
825 inline SlowPathReturnType virtualForWithFunction(
826     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
827 {
828     ExecState* exec = execCallee->callerFrame();
829     VM* vm = &exec->vm();
830     CodeSpecializationKind kind = callLinkInfo->specializationKind();
831     NativeCallFrameTracer tracer(vm, exec);
832
833     JSValue calleeAsValue = execCallee->calleeAsValue();
834     calleeAsFunctionCell = getJSFunction(calleeAsValue);
835     if (UNLIKELY(!calleeAsFunctionCell))
836         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
837     
838     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
839     JSScope* scope = function->scopeUnchecked();
840     ExecutableBase* executable = function->executable();
841     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
842         bool isWebAssemblyExecutable = false;
843 #if ENABLE(WEBASSEMBLY)
844         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
845 #endif
846         if (!isWebAssemblyExecutable) {
847             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
848
849             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
850                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
851                 return encodeResult(
852                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
853                     reinterpret_cast<void*>(KeepTheFrame));
854             }
855
856             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
857             if (error) {
858                 exec->vm().throwException(exec, error);
859                 return encodeResult(
860                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
861                     reinterpret_cast<void*>(KeepTheFrame));
862             }
863         } else {
864 #if ENABLE(WEBASSEMBLY)
865             if (!isCall(kind)) {
866                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
867                 return encodeResult(
868                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
869                     reinterpret_cast<void*>(KeepTheFrame));
870             }
871
872             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
873             webAssemblyExecutable->prepareForExecution(execCallee);
874 #endif
875         }
876     }
877     return encodeResult(executable->entrypointFor(
878         kind, MustCheckArity).executableAddress(),
879         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
880 }
881
882 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
883 {
884     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
885     JSCell* calleeAsFunctionCell;
886     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
887
888     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
889     
890     return result;
891 }
892
893 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
894 {
895     JSCell* calleeAsFunctionCellIgnored;
896     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
897 }
898
899 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
900 {
901     VM* vm = &exec->vm();
902     NativeCallFrameTracer tracer(vm, exec);
903     
904     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
905 }
906
907 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
908 {
909     VM* vm = &exec->vm();
910     NativeCallFrameTracer tracer(vm, exec);
911
912     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
913 }
914
915 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
916 {
917     VM* vm = &exec->vm();
918     NativeCallFrameTracer tracer(vm, exec);
919
920     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
921 }
922
923 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
924 {
925     VM* vm = &exec->vm();
926     NativeCallFrameTracer tracer(vm, exec);
927
928     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
929 }
930
931 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
932 {
933     VM* vm = &exec->vm();
934     NativeCallFrameTracer tracer(vm, exec);
935     
936     return JSValue::decode(encodedOp).toBoolean(exec);
937 }
938
939 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
940 {
941     VM* vm = &exec->vm();
942     NativeCallFrameTracer tracer(vm, exec);
943
944     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
945 }
946
947 #if USE(JSVALUE64)
948 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
949 #else
950 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
951 #endif
952 {
953     VM* vm = &exec->vm();
954     NativeCallFrameTracer tracer(vm, exec);
955
956     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
957 #if USE(JSVALUE64)
958     return JSValue::encode(jsBoolean(result));
959 #else
960     return result;
961 #endif
962 }
963
964 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
965 {
966     VM* vm = &exec->vm();
967     NativeCallFrameTracer tracer(vm, exec);
968     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
969 }
970
971 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
972 {
973     VM* vm = &exec->vm();
974     NativeCallFrameTracer tracer(vm, exec);
975     return JSValue::encode(constructArray(exec, profile, values, size));
976 }
977
978 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
979 {
980     VM* vm = &exec->vm();
981     NativeCallFrameTracer tracer(vm, exec);
982     JSValue sizeValue = JSValue::decode(size);
983     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
984 }
985
986 }
987
988 template<typename FunctionType>
989 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
990 {
991     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
992     VM& vm = exec->vm();
993     NativeCallFrameTracer tracer(&vm, exec);
994     if (isInvalidated)
995         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
996     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
997 }
998
999 extern "C" {
1000
1001 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1002 {
1003     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1004 }
1005
1006 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1007 {
1008     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1009 }
1010
1011 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1012 {
1013     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1014 }
1015
1016 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1017 {
1018     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1019 }
1020
1021 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1022 {
1023     JSFunction* func = jsCast<JSFunction*>(funcCell);
1024     JSValue name = JSValue::decode(encodedName);
1025     func->setFunctionName(exec, name);
1026 }
1027
1028 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1029 {
1030     VM* vm = &exec->vm();
1031     NativeCallFrameTracer tracer(vm, exec);
1032
1033     return constructEmptyObject(exec, structure);
1034 }
1035
1036 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1037 {
1038     SuperSamplerScope superSamplerScope(false);
1039     VM& vm = exec->vm();
1040     NativeCallFrameTracer tracer(&vm, exec);
1041     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1042     if (!regexp->isValid()) {
1043         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1044         return JSValue::encode(jsUndefined());
1045     }
1046
1047     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1048 }
1049
1050 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1051 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1052 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1053 // future, we can switch to using that then.
1054 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1055 {
1056     VM& vm = exec->vm();
1057     NativeCallFrameTracer tracer(&vm, exec);
1058
1059     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1060         vm.throwException(exec, createTerminatedExecutionException(&vm));
1061
1062     return nullptr;
1063 }
1064
1065 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1066 {
1067     VM& vm = exec->vm();
1068     NativeCallFrameTracer tracer(&vm, exec);
1069     JSValue errorMessageValue = JSValue::decode(encodedValue);
1070     RELEASE_ASSERT(errorMessageValue.isString());
1071     String errorMessage = asString(errorMessageValue)->value(exec);
1072     if (referenceErrorFlag)
1073         vm.throwException(exec, createReferenceError(exec, errorMessage));
1074     else
1075         vm.throwException(exec, createTypeError(exec, errorMessage));
1076 }
1077
1078 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1079 {
1080     VM& vm = exec->vm();
1081     NativeCallFrameTracer tracer(&vm, exec);
1082
1083     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1084 }
1085
1086 #if ENABLE(DFG_JIT)
1087 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1088 {
1089     codeBlock->updateAllPredictions();
1090     codeBlock->optimizeAfterWarmUp();
1091 }
1092
1093 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1094 {
1095     VM& vm = exec->vm();
1096     NativeCallFrameTracer tracer(&vm, exec);
1097
1098     // Defer GC for a while so that it doesn't run between when we enter into this
1099     // slow path and when we figure out the state of our code block. This prevents
1100     // a number of awkward reentrancy scenarios, including:
1101     //
1102     // - The optimized version of our code block being jettisoned by GC right after
1103     //   we concluded that we wanted to use it, but have not planted it into the JS
1104     //   stack yet.
1105     //
1106     // - An optimized version of our code block being installed just as we decided
1107     //   that it wasn't ready yet.
1108     //
1109     // Note that jettisoning won't happen if we already initiated OSR, because in
1110     // that case we would have already planted the optimized code block into the JS
1111     // stack.
1112     DeferGCForAWhile deferGC(vm.heap);
1113     
1114     CodeBlock* codeBlock = exec->codeBlock();
1115     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1116         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1117         RELEASE_ASSERT_NOT_REACHED();
1118     }
1119     
1120     if (bytecodeIndex) {
1121         // If we're attempting to OSR from a loop, assume that this should be
1122         // separately optimized.
1123         codeBlock->m_shouldAlwaysBeInlined = false;
1124     }
1125
1126     if (Options::verboseOSR()) {
1127         dataLog(
1128             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1129             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1130             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1131             ", exitCounter = ");
1132         if (codeBlock->hasOptimizedReplacement())
1133             dataLog(codeBlock->replacement()->osrExitCounter());
1134         else
1135             dataLog("N/A");
1136         dataLog("\n");
1137     }
1138
1139     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1140         codeBlock->updateAllPredictions();
1141         if (Options::verboseOSR())
1142             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1143         return encodeResult(0, 0);
1144     }
1145     
1146     if (vm.enabledProfiler()) {
1147         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1148         return encodeResult(0, 0);
1149     }
1150
1151     Debugger* debugger = codeBlock->globalObject()->debugger();
1152     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1153         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1154         return encodeResult(0, 0);
1155     }
1156
1157     if (codeBlock->m_shouldAlwaysBeInlined) {
1158         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1159         if (Options::verboseOSR())
1160             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1161         return encodeResult(0, 0);
1162     }
1163
1164     // We cannot be in the process of asynchronous compilation and also have an optimized
1165     // replacement.
1166     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1167     ASSERT(
1168         !worklist
1169         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1170         && codeBlock->hasOptimizedReplacement()));
1171
1172     DFG::Worklist::State worklistState;
1173     if (worklist) {
1174         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1175         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1176         // what the result was so that we don't plow ahead and attempt OSR or immediate
1177         // reoptimization. This will have already also set the appropriate JIT execution
1178         // count threshold depending on what happened, so if the compilation was anything
1179         // but successful we just want to return early. See the case for worklistState ==
1180         // DFG::Worklist::Compiled, below.
1181         
1182         // Note that we could have alternatively just called Worklist::compilationState()
1183         // here, and if it returned Compiled, we could have then called
1184         // completeAndScheduleOSR() below. But that would have meant that it could take
1185         // longer for code blocks to be completed: they would only complete when *their*
1186         // execution count trigger fired; but that could take a while since the firing is
1187         // racy. It could also mean that code blocks that never run again after being
1188         // compiled would sit on the worklist until next GC. That's fine, but it's
1189         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1190         // possible in order to minimize the chances of us executing baseline code after
1191         // optimized code is already available.
1192         worklistState = worklist->completeAllReadyPlansForVM(
1193             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1194     } else
1195         worklistState = DFG::Worklist::NotKnown;
1196
1197     if (worklistState == DFG::Worklist::Compiling) {
1198         // We cannot be in the process of asynchronous compilation and also have an optimized
1199         // replacement.
1200         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1201         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1202         return encodeResult(0, 0);
1203     }
1204
1205     if (worklistState == DFG::Worklist::Compiled) {
1206         // If we don't have an optimized replacement but we did just get compiled, then
1207         // the compilation failed or was invalidated, in which case the execution count
1208         // thresholds have already been set appropriately by
1209         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1210         // nothing left to do.
1211         if (!codeBlock->hasOptimizedReplacement()) {
1212             codeBlock->updateAllPredictions();
1213             if (Options::verboseOSR())
1214                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1215             return encodeResult(0, 0);
1216         }
1217     } else if (codeBlock->hasOptimizedReplacement()) {
1218         if (Options::verboseOSR())
1219             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1220         // If we have an optimized replacement, then it must be the case that we entered
1221         // cti_optimize from a loop. That's because if there's an optimized replacement,
1222         // then all calls to this function will be relinked to the replacement and so
1223         // the prologue OSR will never fire.
1224         
1225         // This is an interesting threshold check. Consider that a function OSR exits
1226         // in the middle of a loop, while having a relatively low exit count. The exit
1227         // will reset the execution counter to some target threshold, meaning that this
1228         // code won't be reached until that loop heats up for >=1000 executions. But then
1229         // we do a second check here, to see if we should either reoptimize, or just
1230         // attempt OSR entry. Hence it might even be correct for
1231         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1232         // additional checking anyway, to reduce the amount of recompilation thrashing.
1233         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1234             if (Options::verboseOSR()) {
1235                 dataLog(
1236                     "Triggering reoptimization of ", *codeBlock,
1237                     "(", *codeBlock->replacement(), ") (in loop).\n");
1238             }
1239             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1240             return encodeResult(0, 0);
1241         }
1242     } else {
1243         if (!codeBlock->shouldOptimizeNow()) {
1244             if (Options::verboseOSR()) {
1245                 dataLog(
1246                     "Delaying optimization for ", *codeBlock,
1247                     " because of insufficient profiling.\n");
1248             }
1249             return encodeResult(0, 0);
1250         }
1251
1252         if (Options::verboseOSR())
1253             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1254
1255         unsigned numVarsWithValues;
1256         if (bytecodeIndex)
1257             numVarsWithValues = codeBlock->m_numVars;
1258         else
1259             numVarsWithValues = 0;
1260         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1261         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1262         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1263             int operand = mustHandleValues.operandForIndex(i);
1264             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1265                 continue;
1266             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1267         }
1268
1269         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1270         CompilationResult result = DFG::compile(
1271             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1272             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1273         
1274         if (result != CompilationSuccessful)
1275             return encodeResult(0, 0);
1276     }
1277     
1278     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1279     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1280     
1281     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1282         if (Options::verboseOSR()) {
1283             dataLog(
1284                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1285         }
1286
1287         codeBlock->optimizeSoon();
1288         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1289     }
1290
1291     if (Options::verboseOSR()) {
1292         dataLog(
1293             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1294             " succeeded, OSR failed, after a delay of ",
1295             codeBlock->optimizationDelayCounter(), ".\n");
1296     }
1297
1298     // Count the OSR failure as a speculation failure. If this happens a lot, then
1299     // reoptimize.
1300     optimizedCodeBlock->countOSRExit();
1301
1302     // We are a lot more conservative about triggering reoptimization after OSR failure than
1303     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1304     // already, then we really would like to reoptimize immediately. But this case covers
1305     // something else: there weren't many (or any) speculation failures before, but we just
1306     // failed to enter the speculative code because some variable had the wrong value or
1307     // because the OSR code decided for any spurious reason that it did not want to OSR
1308     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1309     // reoptimization trigger.
1310     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1311         if (Options::verboseOSR()) {
1312             dataLog(
1313                 "Triggering reoptimization of ", *codeBlock, " -> ",
1314                 *codeBlock->replacement(), " (after OSR fail).\n");
1315         }
1316         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1317         return encodeResult(0, 0);
1318     }
1319
1320     // OSR failed this time, but it might succeed next time! Let the code run a bit
1321     // longer and then try again.
1322     codeBlock->optimizeAfterWarmUp();
1323     
1324     return encodeResult(0, 0);
1325 }
1326 #endif
1327
1328 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1329 {
1330     VM& vm = exec->vm();
1331     NativeCallFrameTracer tracer(&vm, exec);
1332
1333     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1334     ASSERT(isJSArray(arrayValue));
1335     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1336 }
1337
1338 enum class AccessorType {
1339     Getter,
1340     Setter
1341 };
1342
1343 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1344 {
1345     auto propertyKey = subscript.toPropertyKey(exec);
1346     if (exec->hadException())
1347         return;
1348
1349     if (accessorType == AccessorType::Getter)
1350         base->putGetter(exec, propertyKey, accessor, attribute);
1351     else
1352         base->putSetter(exec, propertyKey, accessor, attribute);
1353 }
1354
1355 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1356 {
1357     VM& vm = exec->vm();
1358     NativeCallFrameTracer tracer(&vm, exec);
1359
1360     ASSERT(object && object->isObject());
1361     JSObject* baseObj = object->getObject();
1362
1363     ASSERT(getter->isObject());
1364     baseObj->putGetter(exec, uid, getter, options);
1365 }
1366
1367 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1368 {
1369     VM& vm = exec->vm();
1370     NativeCallFrameTracer tracer(&vm, exec);
1371
1372     ASSERT(object && object->isObject());
1373     JSObject* baseObj = object->getObject();
1374
1375     ASSERT(setter->isObject());
1376     baseObj->putSetter(exec, uid, setter, options);
1377 }
1378
1379 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1380 {
1381     VM& vm = exec->vm();
1382     NativeCallFrameTracer tracer(&vm, exec);
1383
1384     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1385 }
1386
1387 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1388 {
1389     VM& vm = exec->vm();
1390     NativeCallFrameTracer tracer(&vm, exec);
1391
1392     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1393 }
1394
1395 #if USE(JSVALUE64)
1396 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1397 {
1398     VM& vm = exec->vm();
1399     NativeCallFrameTracer tracer(&vm, exec);
1400
1401     ASSERT(object && object->isObject());
1402     JSObject* baseObj = asObject(object);
1403
1404     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1405
1406     JSValue getter = JSValue::decode(encodedGetterValue);
1407     JSValue setter = JSValue::decode(encodedSetterValue);
1408     ASSERT(getter.isObject() || getter.isUndefined());
1409     ASSERT(setter.isObject() || setter.isUndefined());
1410     ASSERT(getter.isObject() || setter.isObject());
1411
1412     if (!getter.isUndefined())
1413         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1414     if (!setter.isUndefined())
1415         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1416     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1417 }
1418
1419 #else
1420 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1421 {
1422     VM& vm = exec->vm();
1423     NativeCallFrameTracer tracer(&vm, exec);
1424
1425     ASSERT(object && object->isObject());
1426     JSObject* baseObj = asObject(object);
1427
1428     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1429
1430     ASSERT(!getter || getter->isObject());
1431     ASSERT(!setter || setter->isObject());
1432     ASSERT(getter || setter);
1433
1434     if (getter)
1435         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1436     if (setter)
1437         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1438     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1439 }
1440 #endif
1441
1442 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1443 {
1444     VM& vm = exec->vm();
1445     NativeCallFrameTracer tracer(&vm, exec);
1446
1447     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1448     exec->uncheckedR(scopeReg) = scope->next();
1449 }
1450
1451 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1452 {
1453     VM& vm = exec->vm();
1454     NativeCallFrameTracer tracer(&vm, exec);
1455
1456     if (LegacyProfiler* profiler = vm.enabledProfiler())
1457         profiler->didExecute(exec, JSValue::decode(encodedValue));
1458 }
1459
1460 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1461 {
1462     VM& vm = exec->vm();
1463     NativeCallFrameTracer tracer(&vm, exec);
1464
1465     if (LegacyProfiler* profiler = vm.enabledProfiler())
1466         profiler->willExecute(exec, JSValue::decode(encodedValue));
1467 }
1468
1469 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1470 {
1471     VM& vm = exec->vm();
1472     NativeCallFrameTracer tracer(&vm, exec);
1473
1474     JSValue value = JSValue::decode(encodedValue);
1475     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1476
1477     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1478
1479     if (constructor->hasInstance(exec, value, hasInstanceValue))
1480         return 1;
1481     return 0;
1482 }
1483
1484 }
1485
1486 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1487 {
1488     switch (object.structure()->typeInfo().type()) {
1489     case DirectArgumentsType: {
1490         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1491         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1492             return true;
1493         break;
1494     }
1495     case ScopedArgumentsType: {
1496         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1497         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1498             return true;
1499         break;
1500     }
1501     default:
1502         break;
1503     }
1504     return false;
1505 }
1506
1507 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1508 {
1509     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1510         VM& vm = exec->vm();
1511         Structure& structure = *baseValue.asCell()->structure(vm);
1512         if (JSCell::canUseFastGetOwnProperty(structure)) {
1513             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1514                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1515                     ASSERT(exec->bytecodeOffset());
1516                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1517                         byValInfo->tookSlowPath = true;
1518                     return result;
1519                 }
1520             }
1521         }
1522     }
1523
1524     if (subscript.isUInt32()) {
1525         ASSERT(exec->bytecodeOffset());
1526         byValInfo->tookSlowPath = true;
1527
1528         uint32_t i = subscript.asUInt32();
1529         if (isJSString(baseValue)) {
1530             if (asString(baseValue)->canGetIndex(i)) {
1531                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1532                 return asString(baseValue)->getIndex(exec, i);
1533             }
1534             byValInfo->arrayProfile->setOutOfBounds();
1535         } else if (baseValue.isObject()) {
1536             JSObject* object = asObject(baseValue);
1537             if (object->canGetIndexQuickly(i))
1538                 return object->getIndexQuickly(i);
1539
1540             if (!canAccessArgumentIndexQuickly(*object, i)) {
1541                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1542                 // out-of-bounds.
1543                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1544                 byValInfo->arrayProfile->setOutOfBounds();
1545             }
1546         }
1547
1548         return baseValue.get(exec, i);
1549     }
1550
1551     baseValue.requireObjectCoercible(exec);
1552     if (exec->hadException())
1553         return jsUndefined();
1554     auto property = subscript.toPropertyKey(exec);
1555     if (exec->hadException())
1556         return jsUndefined();
1557
1558     ASSERT(exec->bytecodeOffset());
1559     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1560         byValInfo->tookSlowPath = true;
1561
1562     return baseValue.get(exec, property);
1563 }
1564
1565 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1566 {
1567     // See if it's worth optimizing this at all.
1568     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1569
1570     VM& vm = exec->vm();
1571
1572     if (baseValue.isObject() && subscript.isInt32()) {
1573         JSObject* object = asObject(baseValue);
1574
1575         ASSERT(exec->bytecodeOffset());
1576         ASSERT(!byValInfo->stubRoutine);
1577
1578         if (hasOptimizableIndexing(object->structure(vm))) {
1579             // Attempt to optimize.
1580             Structure* structure = object->structure(vm);
1581             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1582             if (arrayMode != byValInfo->arrayMode) {
1583                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1584                 // Let's update the profile to do better next time.
1585                 CodeBlock* codeBlock = exec->codeBlock();
1586                 ConcurrentJITLocker locker(codeBlock->m_lock);
1587                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1588
1589                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1590                 optimizationResult = OptimizationResult::Optimized;
1591             }
1592         }
1593
1594         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1595         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1596             optimizationResult = OptimizationResult::GiveUp;
1597     }
1598
1599     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1600         const Identifier propertyName = subscript.toPropertyKey(exec);
1601         if (!subscript.isString() || !parseIndex(propertyName)) {
1602             ASSERT(exec->bytecodeOffset());
1603             ASSERT(!byValInfo->stubRoutine);
1604             if (byValInfo->seen) {
1605                 if (byValInfo->cachedId == propertyName) {
1606                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1607                     optimizationResult = OptimizationResult::Optimized;
1608                 } else {
1609                     // Seem like a generic property access site.
1610                     optimizationResult = OptimizationResult::GiveUp;
1611                 }
1612             } else {
1613                 byValInfo->seen = true;
1614                 byValInfo->cachedId = propertyName;
1615                 optimizationResult = OptimizationResult::SeenOnce;
1616             }
1617
1618         }
1619     }
1620
1621     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1622         // If we take slow path more than 10 times without patching then make sure we
1623         // never make that mistake again. For cases where we see non-index-intercepting
1624         // objects, this gives 10 iterations worth of opportunity for us to observe
1625         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1626         // the result is GiveUp.
1627         if (++byValInfo->slowPathCount >= 10)
1628             optimizationResult = OptimizationResult::GiveUp;
1629     }
1630
1631     return optimizationResult;
1632 }
1633
1634 extern "C" {
1635
1636 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1637 {
1638     VM& vm = exec->vm();
1639     NativeCallFrameTracer tracer(&vm, exec);
1640     JSValue baseValue = JSValue::decode(encodedBase);
1641     JSValue subscript = JSValue::decode(encodedSubscript);
1642
1643     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1644     return JSValue::encode(result);
1645 }
1646
1647 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1648 {
1649     VM& vm = exec->vm();
1650     NativeCallFrameTracer tracer(&vm, exec);
1651
1652     JSValue baseValue = JSValue::decode(encodedBase);
1653     JSValue subscript = JSValue::decode(encodedSubscript);
1654     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1655     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1656         // Don't ever try to optimize.
1657         byValInfo->tookSlowPath = true;
1658         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1659     }
1660
1661     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1662 }
1663
1664 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1665 {
1666     VM& vm = exec->vm();
1667     NativeCallFrameTracer tracer(&vm, exec);
1668     JSValue baseValue = JSValue::decode(encodedBase);
1669     JSValue subscript = JSValue::decode(encodedSubscript);
1670     
1671     ASSERT(baseValue.isObject());
1672     ASSERT(subscript.isUInt32());
1673
1674     JSObject* object = asObject(baseValue);
1675     bool didOptimize = false;
1676
1677     ASSERT(exec->bytecodeOffset());
1678     ASSERT(!byValInfo->stubRoutine);
1679     
1680     if (hasOptimizableIndexing(object->structure(vm))) {
1681         // Attempt to optimize.
1682         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1683         if (arrayMode != byValInfo->arrayMode) {
1684             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1685             didOptimize = true;
1686         }
1687     }
1688     
1689     if (!didOptimize) {
1690         // If we take slow path more than 10 times without patching then make sure we
1691         // never make that mistake again. Or, if we failed to patch and we have some object
1692         // that intercepts indexed get, then don't even wait until 10 times. For cases
1693         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1694         // opportunity for us to observe that the get_by_val may be polymorphic.
1695         if (++byValInfo->slowPathCount >= 10
1696             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1697             // Don't ever try to optimize.
1698             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1699         }
1700     }
1701
1702     uint32_t index = subscript.asUInt32();
1703     if (object->canGetIndexQuickly(index))
1704         return JSValue::encode(JSValue(JSValue::JSTrue));
1705
1706     if (!canAccessArgumentIndexQuickly(*object, index)) {
1707         // FIXME: This will make us think that in-bounds typed array accesses are actually
1708         // out-of-bounds.
1709         // https://bugs.webkit.org/show_bug.cgi?id=149886
1710         byValInfo->arrayProfile->setOutOfBounds();
1711     }
1712     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1713 }
1714     
1715 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1716 {
1717     VM& vm = exec->vm();
1718     NativeCallFrameTracer tracer(&vm, exec);
1719     JSValue baseValue = JSValue::decode(encodedBase);
1720     JSValue subscript = JSValue::decode(encodedSubscript);
1721     
1722     ASSERT(baseValue.isObject());
1723     ASSERT(subscript.isUInt32());
1724
1725     JSObject* object = asObject(baseValue);
1726     uint32_t index = subscript.asUInt32();
1727     if (object->canGetIndexQuickly(index))
1728         return JSValue::encode(JSValue(JSValue::JSTrue));
1729
1730     if (!canAccessArgumentIndexQuickly(*object, index)) {
1731         // FIXME: This will make us think that in-bounds typed array accesses are actually
1732         // out-of-bounds.
1733         // https://bugs.webkit.org/show_bug.cgi?id=149886
1734         byValInfo->arrayProfile->setOutOfBounds();
1735     }
1736     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1737 }
1738     
1739 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1740 {
1741     VM& vm = exec->vm();
1742     NativeCallFrameTracer tracer(&vm, exec);
1743     JSValue baseValue = JSValue::decode(encodedBase);
1744     JSValue subscript = JSValue::decode(encodedSubscript);
1745     
1746     JSValue result;
1747     if (LIKELY(subscript.isUInt32())) {
1748         uint32_t i = subscript.asUInt32();
1749         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1750             result = asString(baseValue)->getIndex(exec, i);
1751         else {
1752             result = baseValue.get(exec, i);
1753             if (!isJSString(baseValue)) {
1754                 ASSERT(exec->bytecodeOffset());
1755                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1756             }
1757         }
1758     } else {
1759         baseValue.requireObjectCoercible(exec);
1760         if (exec->hadException())
1761             return JSValue::encode(jsUndefined());
1762         auto property = subscript.toPropertyKey(exec);
1763         if (exec->hadException())
1764             return JSValue::encode(jsUndefined());
1765         result = baseValue.get(exec, property);
1766     }
1767
1768     return JSValue::encode(result);
1769 }
1770
1771 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1772 {
1773     VM& vm = exec->vm();
1774     NativeCallFrameTracer tracer(&vm, exec);
1775
1776     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1777     if (!baseObj)
1778         JSValue::encode(JSValue());
1779     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1780     JSValue result = jsBoolean(couldDelete);
1781     if (!couldDelete && exec->codeBlock()->isStrictMode())
1782         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1783     return JSValue::encode(result);
1784 }
1785
1786 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1787 {
1788     VM& vm = exec->vm();
1789     NativeCallFrameTracer tracer(&vm, exec);
1790     JSValue value = JSValue::decode(encodedValue);
1791     JSValue proto = JSValue::decode(encodedProto);
1792     
1793     bool result = JSObject::defaultHasInstance(exec, value, proto);
1794     return JSValue::encode(jsBoolean(result));
1795 }
1796
1797 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1798 {
1799     VM& vm = exec->vm();
1800     NativeCallFrameTracer tracer(&vm, exec);
1801     JSStack* stack = &exec->interpreter()->stack();
1802     JSValue arguments = JSValue::decode(encodedArguments);
1803     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1804 }
1805
1806 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1807 {
1808     VM& vm = exec->vm();
1809     NativeCallFrameTracer tracer(&vm, exec);
1810     JSValue arguments = JSValue::decode(encodedArguments);
1811     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1812     return newCallFrame;
1813 }
1814
1815 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1816 {
1817     VM& vm = exec->vm();
1818     NativeCallFrameTracer tracer(&vm, exec);
1819     JSObject* obj = JSValue::decode(value).toObject(exec);
1820     if (!obj)
1821         return JSValue::encode(JSValue());
1822     return JSValue::encode(obj);
1823 }
1824
1825 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1826 {
1827     VM& vm = exec->vm();
1828     NativeCallFrameTracer tracer(&vm, exec);
1829     JSValue key = JSValue::decode(encodedKey);
1830     CodeBlock* codeBlock = exec->codeBlock();
1831
1832     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1833     void* result = jumpTable.ctiDefault.executableAddress();
1834
1835     if (key.isString()) {
1836         StringImpl* value = asString(key)->value(exec).impl();
1837         if (value->length() == 1)
1838             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1839     }
1840
1841     return reinterpret_cast<char*>(result);
1842 }
1843
1844 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1845 {
1846     VM& vm = exec->vm();
1847     NativeCallFrameTracer tracer(&vm, exec);
1848     JSValue key = JSValue::decode(encodedKey);
1849     CodeBlock* codeBlock = exec->codeBlock();
1850
1851     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1852     void* result;
1853     if (key.isInt32())
1854         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1855     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1856         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1857     else
1858         result = jumpTable.ctiDefault.executableAddress();
1859     return reinterpret_cast<char*>(result);
1860 }
1861
1862 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1863 {
1864     VM& vm = exec->vm();
1865     NativeCallFrameTracer tracer(&vm, exec);
1866     JSValue key = JSValue::decode(encodedKey);
1867     CodeBlock* codeBlock = exec->codeBlock();
1868
1869     void* result;
1870     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1871
1872     if (key.isString()) {
1873         StringImpl* value = asString(key)->value(exec).impl();
1874         result = jumpTable.ctiForValue(value).executableAddress();
1875     } else
1876         result = jumpTable.ctiDefault.executableAddress();
1877
1878     return reinterpret_cast<char*>(result);
1879 }
1880
1881 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1882 {
1883     VM& vm = exec->vm();
1884     NativeCallFrameTracer tracer(&vm, exec);
1885     CodeBlock* codeBlock = exec->codeBlock();
1886     Instruction* pc = bytecodePC;
1887
1888     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1889     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1890     GetPutInfo getPutInfo(pc[4].u.operand);
1891
1892     // ModuleVar is always converted to ClosureVar for get_from_scope.
1893     ASSERT(getPutInfo.resolveType() != ModuleVar);
1894
1895     PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1896     if (!scope->getPropertySlot(exec, ident, slot)) {
1897         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1898             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1899         return JSValue::encode(jsUndefined());
1900     }
1901
1902     JSValue result = JSValue();
1903     if (scope->isGlobalLexicalEnvironment()) {
1904         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1905         result = slot.getValue(exec, ident);
1906         if (result == jsTDZValue()) {
1907             exec->vm().throwException(exec, createTDZError(exec));
1908             return JSValue::encode(jsUndefined());
1909         }
1910     }
1911
1912     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1913
1914     if (!result)
1915         result = slot.getValue(exec, ident);
1916     return JSValue::encode(result);
1917 }
1918
1919 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1920 {
1921     VM& vm = exec->vm();
1922     NativeCallFrameTracer tracer(&vm, exec);
1923     Instruction* pc = bytecodePC;
1924
1925     CodeBlock* codeBlock = exec->codeBlock();
1926     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1927     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1928     JSValue value = exec->r(pc[3].u.operand).jsValue();
1929     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1930
1931     // ModuleVar does not keep the scope register value alive in DFG.
1932     ASSERT(getPutInfo.resolveType() != ModuleVar);
1933
1934     if (getPutInfo.resolveType() == LocalClosureVar) {
1935         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1936         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1937         if (WatchpointSet* set = pc[5].u.watchpointSet)
1938             set->touch("Executed op_put_scope<LocalClosureVar>");
1939         return;
1940     }
1941
1942     bool hasProperty = scope->hasProperty(exec, ident);
1943     if (hasProperty
1944         && scope->isGlobalLexicalEnvironment()
1945         && getPutInfo.initializationMode() != Initialization) {
1946         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1947         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1948         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1949         if (slot.getValue(exec, ident) == jsTDZValue()) {
1950             exec->vm().throwException(exec, createTDZError(exec));
1951             return;
1952         }
1953     }
1954
1955     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
1956         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1957         return;
1958     }
1959
1960     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
1961     scope->methodTable()->put(scope, exec, ident, value, slot);
1962     
1963     if (exec->vm().exception())
1964         return;
1965
1966     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
1967 }
1968
1969 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1970 {
1971     VM* vm = &exec->vm();
1972     NativeCallFrameTracer tracer(vm, exec);
1973
1974     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1975     vm->throwException(exec, exceptionValue);
1976
1977     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
1978     genericUnwind(vm, exec);
1979 }
1980
1981 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1982 {
1983     VM* vm = &exec->vm();
1984     NativeCallFrameTracer tracer(vm, exec);
1985     vm->heap.flushWriteBarrierBuffer(cell);
1986 }
1987
1988 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1989 {
1990     VM* vm = &exec->vm();
1991     NativeCallFrameTracer tracer(vm, exec);
1992     vm->heap.writeBarrier(cell);
1993 }
1994
1995 // NB: We don't include the value as part of the barrier because the write barrier elision
1996 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
1997 // would be much more complicated to try to model the value being stored as well.
1998 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
1999 {
2000     VM* vm = &exec->vm();
2001     NativeCallFrameTracer tracer(vm, exec);
2002     vm->heap.writeBarrier(cell);
2003 }
2004
2005 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2006 {
2007     NativeCallFrameTracer tracer(vm, exec);
2008     genericUnwind(vm, exec);
2009     ASSERT(vm->targetMachinePCForThrow);
2010 }
2011
2012 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2013 {
2014     NativeCallFrameTracer tracer(vm, exec);
2015     genericUnwind(vm, exec, UnwindFromCallerFrame);
2016     ASSERT(vm->targetMachinePCForThrow);
2017 }
2018
2019 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2020 {
2021     VM* vm = &exec->vm();
2022     NativeCallFrameTracer tracer(vm, exec);
2023     genericUnwind(vm, exec);
2024 }
2025
2026 // This function "should" just take the ExecState*, but doing so would make it more difficult
2027 // to call from exception check sites. So, unlike all of our other functions, we allow
2028 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2029 // particularly safe here since this is never called on the critical path - it's only for
2030 // testing.
2031 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2032 {
2033     VM* vm = &exec->vm();
2034     NativeCallFrameTracer tracer(vm, exec);
2035 #if COMPILER(GCC_OR_CLANG)
2036     void* returnPC = __builtin_return_address(0);
2037     doExceptionFuzzing(exec, "JITOperations", returnPC);
2038 #endif // COMPILER(GCC_OR_CLANG)
2039 }
2040
2041 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2042 {
2043     VM& vm = exec->vm();
2044     NativeCallFrameTracer tracer(&vm, exec);
2045     JSValue baseValue = JSValue::decode(encodedBaseValue);
2046     if (baseValue.isUndefinedOrNull())
2047         return JSValue::encode(jsBoolean(false));
2048
2049     JSObject* base = baseValue.toObject(exec);
2050     if (!base)
2051         return JSValue::encode(JSValue());
2052     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2053 }
2054
2055 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2056 {
2057     VM& vm = exec->vm();
2058     NativeCallFrameTracer tracer(&vm, exec);
2059     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2060     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2061 }
2062     
2063 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2064 {
2065     VM& vm = exec->vm();
2066     NativeCallFrameTracer tracer(&vm, exec);
2067
2068     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2069
2070     return propertyNameEnumerator(exec, base);
2071 }
2072
2073 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2074 {
2075     VM& vm = exec->vm();
2076     NativeCallFrameTracer tracer(&vm, exec);
2077     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2078     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2079     return JSValue::encode(propertyName ? propertyName : jsNull());
2080 }
2081
2082 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2083 {
2084     VM& vm = exec->vm();
2085     NativeCallFrameTracer tracer(&vm, exec);
2086     return jsString(exec, Identifier::from(exec, index).string());
2087 }
2088
2089 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2090 {
2091     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2092 }
2093
2094 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2095 {
2096     VM& vm = exec->vm();
2097     NativeCallFrameTracer tracer(&vm, exec);
2098     RELEASE_ASSERT(!!vm.exception());
2099
2100     if (LegacyProfiler* profiler = vm.enabledProfiler())
2101         profiler->exceptionUnwind(exec);
2102
2103     if (isTerminatedExecutionException(vm.exception())) {
2104         genericUnwind(&vm, exec);
2105         return 1;
2106     } else
2107         return 0;
2108 }
2109
2110 } // extern "C"
2111
2112 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2113 // definition of getHostCallReturnValue() below because the Windows build
2114 // requires it.
2115 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2116 {
2117     if (!exec)
2118         return JSValue::encode(JSValue());
2119     return JSValue::encode(exec->vm().hostCallReturnValue);
2120 }
2121
2122 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2123 asm (
2124 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2125 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2126 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2127     "lea -8(%rsp), %rdi\n"
2128     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2129 );
2130
2131 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2132 asm (
2133 ".text" "\n" \
2134 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2135 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2136 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2137     "push %ebp\n"
2138     "mov %esp, %eax\n"
2139     "leal -4(%esp), %esp\n"
2140     "push %eax\n"
2141     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2142     "leal 8(%esp), %esp\n"
2143     "pop %ebp\n"
2144     "ret\n"
2145 );
2146
2147 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2148 asm (
2149 ".text" "\n"
2150 ".align 2" "\n"
2151 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2152 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2153 ".thumb" "\n"
2154 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2155 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2156     "sub r0, sp, #8" "\n"
2157     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2158 );
2159
2160 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2161 asm (
2162 ".text" "\n"
2163 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2164 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2165 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2166 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2167     "sub r0, sp, #8" "\n"
2168     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2169 );
2170
2171 #elif CPU(ARM64)
2172 asm (
2173 ".text" "\n"
2174 ".align 2" "\n"
2175 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2176 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2177 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2178      "sub x0, sp, #16" "\n"
2179      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2180 );
2181
2182 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2183
2184 #if WTF_MIPS_PIC
2185 #define LOAD_FUNCTION_TO_T9(function) \
2186         ".set noreorder" "\n" \
2187         ".cpload $25" "\n" \
2188         ".set reorder" "\n" \
2189         "la $t9, " LOCAL_REFERENCE(function) "\n"
2190 #else
2191 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2192 #endif
2193
2194 asm (
2195 ".text" "\n"
2196 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2197 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2198 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2199     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2200     "addi $a0, $sp, -8" "\n"
2201     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2202 );
2203
2204 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2205
2206 #define SH4_SCRATCH_REGISTER "r11"
2207
2208 asm (
2209 ".text" "\n"
2210 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2211 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2212 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2213     "mov r15, r4" "\n"
2214     "add -8, r4" "\n"
2215     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2216     "braf " SH4_SCRATCH_REGISTER "\n"
2217     "nop" "\n"
2218     "1: .balign 4" "\n"
2219     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2220 );
2221
2222 #elif COMPILER(MSVC) && CPU(X86)
2223 extern "C" {
2224     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2225     {
2226         __asm lea eax, [esp - 4]
2227         __asm mov [esp + 4], eax;
2228         __asm jmp getHostCallReturnValueWithExecState
2229     }
2230 }
2231 #endif
2232
2233 } // namespace JSC
2234
2235 #endif // ENABLE(JIT)