Unreviewed, rolling back in r190450
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "ScopedArguments.h"
58 #include "TestRunnerUtils.h"
59 #include "TypeProfilerLog.h"
60 #include "VMInlines.h"
61 #include <wtf/InlineASM.h>
62
63 namespace JSC {
64
65 extern "C" {
66
67 #if COMPILER(MSVC)
68 void * _ReturnAddress(void);
69 #pragma intrinsic(_ReturnAddress)
70
71 #define OUR_RETURN_ADDRESS _ReturnAddress()
72 #else
73 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
74 #endif
75
76 #if ENABLE(OPCODE_SAMPLING)
77 #define CTI_SAMPLER vm->interpreter->sampler()
78 #else
79 #define CTI_SAMPLER 0
80 #endif
81
82
83 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
84 {
85     // We pass in our own code block, because the callframe hasn't been populated.
86     VM* vm = codeBlock->vm();
87
88     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
89     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
90     if (!callerFrame)
91         callerFrame = exec;
92
93     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
94     throwStackOverflowError(callerFrame);
95 }
96
97 #if ENABLE(WEBASSEMBLY)
98 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
99 {
100     VM* vm = &exec->vm();
101     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
102     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     ErrorHandlingScope errorScope(*vm);
106     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
107 }
108
109 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
110 {
111     VM* vm = &exec->vm();
112     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
113     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
114
115     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
116     ErrorHandlingScope errorScope(*vm);
117     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
118 }
119 #endif
120
121 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
122 {
123     VM* vm = &exec->vm();
124     JSStack& stack = vm->interpreter->stack();
125
126     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
127     if (missingArgCount < 0) {
128         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
129         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
130         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
131         throwStackOverflowError(callerFrame);
132     }
133
134     return missingArgCount;
135 }
136
137 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
138 {
139     VM* vm = &exec->vm();
140     JSStack& stack = vm->interpreter->stack();
141
142     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
143     if (missingArgCount < 0) {
144         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
145         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
146         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
147         throwStackOverflowError(callerFrame);
148     }
149
150     return missingArgCount;
151 }
152
153 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
154 {
155     VM* vm = &exec->vm();
156     NativeCallFrameTracer tracer(vm, exec);
157     
158     stubInfo->tookSlowPath = true;
159     
160     JSValue baseValue = JSValue::decode(base);
161     PropertySlot slot(baseValue);
162     Identifier ident = Identifier::fromUid(vm, uid);
163     return JSValue::encode(baseValue.get(exec, ident, slot));
164 }
165
166 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
167 {
168     VM* vm = &exec->vm();
169     NativeCallFrameTracer tracer(vm, exec);
170     
171     JSValue baseValue = JSValue::decode(base);
172     PropertySlot slot(baseValue);
173     Identifier ident = Identifier::fromUid(vm, uid);
174     return JSValue::encode(baseValue.get(exec, ident, slot));
175 }
176
177 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
178 {
179     VM* vm = &exec->vm();
180     NativeCallFrameTracer tracer(vm, exec);
181     Identifier ident = Identifier::fromUid(vm, uid);
182
183     JSValue baseValue = JSValue::decode(base);
184     PropertySlot slot(baseValue);
185     
186     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
187     if (stubInfo->seen)
188         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
189     else
190         stubInfo->seen = true;
191     
192     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
193 }
194
195 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
196 {
197     VM* vm = &exec->vm();
198     NativeCallFrameTracer tracer(vm, exec);
199     
200     if (!base->isObject()) {
201         vm->throwException(exec, createInvalidInParameterError(exec, base));
202         return JSValue::encode(jsUndefined());
203     }
204     
205     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
206
207     Identifier ident = Identifier::fromUid(vm, key);
208     PropertySlot slot(base);
209     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
210     
211     RELEASE_ASSERT(accessType == stubInfo->accessType);
212     
213     if (stubInfo->seen)
214         repatchIn(exec, base, ident, result, slot, *stubInfo);
215     else
216         stubInfo->seen = true;
217     
218     return JSValue::encode(jsBoolean(result));
219 }
220
221 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
222 {
223     VM* vm = &exec->vm();
224     NativeCallFrameTracer tracer(vm, exec);
225     
226     stubInfo->tookSlowPath = true;
227
228     if (!base->isObject()) {
229         vm->throwException(exec, createInvalidInParameterError(exec, base));
230         return JSValue::encode(jsUndefined());
231     }
232
233     Identifier ident = Identifier::fromUid(vm, key);
234     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
235 }
236
237 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
238 {
239     VM* vm = &exec->vm();
240     NativeCallFrameTracer tracer(vm, exec);
241
242     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
243 }
244
245 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
246 {
247     VM* vm = &exec->vm();
248     NativeCallFrameTracer tracer(vm, exec);
249     
250     stubInfo->tookSlowPath = true;
251     
252     Identifier ident = Identifier::fromUid(vm, uid);
253     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
254     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
255 }
256
257 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
258 {
259     VM* vm = &exec->vm();
260     NativeCallFrameTracer tracer(vm, exec);
261     
262     stubInfo->tookSlowPath = true;
263     
264     Identifier ident = Identifier::fromUid(vm, uid);
265     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
266     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
267 }
268
269 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
270 {
271     VM* vm = &exec->vm();
272     NativeCallFrameTracer tracer(vm, exec);
273     
274     stubInfo->tookSlowPath = true;
275     
276     Identifier ident = Identifier::fromUid(vm, uid);
277     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
278     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
279 }
280
281 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
282 {
283     VM* vm = &exec->vm();
284     NativeCallFrameTracer tracer(vm, exec);
285     
286     stubInfo->tookSlowPath = true;
287     
288     Identifier ident = Identifier::fromUid(vm, uid);
289     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
290     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
291 }
292
293 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
294 {
295     VM* vm = &exec->vm();
296     NativeCallFrameTracer tracer(vm, exec);
297     
298     Identifier ident = Identifier::fromUid(vm, uid);
299     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
300
301     JSValue value = JSValue::decode(encodedValue);
302     JSValue baseValue = JSValue::decode(encodedBase);
303     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
304
305     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
306     baseValue.put(exec, ident, value, slot);
307     
308     if (accessType != static_cast<AccessType>(stubInfo->accessType))
309         return;
310     
311     if (stubInfo->seen)
312         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
313     else
314         stubInfo->seen = true;
315 }
316
317 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
318 {
319     VM* vm = &exec->vm();
320     NativeCallFrameTracer tracer(vm, exec);
321     
322     Identifier ident = Identifier::fromUid(vm, uid);
323     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
324
325     JSValue value = JSValue::decode(encodedValue);
326     JSValue baseValue = JSValue::decode(encodedBase);
327     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
328
329     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
330     baseValue.put(exec, ident, value, slot);
331     
332     if (accessType != static_cast<AccessType>(stubInfo->accessType))
333         return;
334     
335     if (stubInfo->seen)
336         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
337     else
338         stubInfo->seen = true;
339 }
340
341 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
342 {
343     VM* vm = &exec->vm();
344     NativeCallFrameTracer tracer(vm, exec);
345     
346     Identifier ident = Identifier::fromUid(vm, uid);
347     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
348
349     JSValue value = JSValue::decode(encodedValue);
350     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
351     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
352     
353     Structure* structure = baseObject->structure(*vm);
354     baseObject->putDirect(exec->vm(), ident, value, slot);
355     
356     if (accessType != static_cast<AccessType>(stubInfo->accessType))
357         return;
358     
359     if (stubInfo->seen)
360         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
361     else
362         stubInfo->seen = true;
363 }
364
365 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
366 {
367     VM* vm = &exec->vm();
368     NativeCallFrameTracer tracer(vm, exec);
369     
370     Identifier ident = Identifier::fromUid(vm, uid);
371     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
372
373     JSValue value = JSValue::decode(encodedValue);
374     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
375     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
376     
377     Structure* structure = baseObject->structure(*vm);
378     baseObject->putDirect(exec->vm(), ident, value, slot);
379     
380     if (accessType != static_cast<AccessType>(stubInfo->accessType))
381         return;
382     
383     if (stubInfo->seen)
384         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
385     else
386         stubInfo->seen = true;
387 }
388
389 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
390 {
391     VM& vm = exec->vm();
392     NativeCallFrameTracer tracer(&vm, exec);
393
394     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
395     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
396     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
397     base->putDirect(vm, offset, JSValue::decode(value));
398 }
399
400 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
401 {
402     return value.isString() || value.isSymbol();
403 }
404
405 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
406 {
407     VM& vm = callFrame->vm();
408     if (LIKELY(subscript.isUInt32())) {
409         byValInfo->tookSlowPath = true;
410         uint32_t i = subscript.asUInt32();
411         if (baseValue.isObject()) {
412             JSObject* object = asObject(baseValue);
413             if (object->canSetIndexQuickly(i))
414                 object->setIndexQuickly(callFrame->vm(), i, value);
415             else {
416                 byValInfo->arrayProfile->setOutOfBounds();
417                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
418             }
419         } else
420             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
421         return;
422     }
423
424     auto property = subscript.toPropertyKey(callFrame);
425     // Don't put to an object if toString threw an exception.
426     if (callFrame->vm().exception())
427         return;
428
429     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
430         byValInfo->tookSlowPath = true;
431
432     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
433     baseValue.put(callFrame, property, value, slot);
434 }
435
436 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
437 {
438     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
439     if (LIKELY(subscript.isUInt32())) {
440         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
441         byValInfo->tookSlowPath = true;
442         uint32_t index = subscript.asUInt32();
443         ASSERT(isIndex(index));
444         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
445             baseObject->setIndexQuickly(callFrame->vm(), index, value);
446             return;
447         }
448
449         byValInfo->arrayProfile->setOutOfBounds();
450         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
451         return;
452     }
453
454     if (subscript.isDouble()) {
455         double subscriptAsDouble = subscript.asDouble();
456         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
457         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
458             byValInfo->tookSlowPath = true;
459             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
460             return;
461         }
462     }
463
464     // Don't put to an object if toString threw an exception.
465     auto property = subscript.toPropertyKey(callFrame);
466     if (callFrame->vm().exception())
467         return;
468
469     if (Optional<uint32_t> index = parseIndex(property)) {
470         byValInfo->tookSlowPath = true;
471         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
472         return;
473     }
474
475     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
476         byValInfo->tookSlowPath = true;
477
478     PutPropertySlot slot(baseObject, isStrictMode);
479     baseObject->putDirect(callFrame->vm(), property, value, slot);
480 }
481
482 enum class OptimizationResult {
483     NotOptimized,
484     SeenOnce,
485     Optimized,
486     GiveUp,
487 };
488
489 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
490 {
491     // See if it's worth optimizing at all.
492     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
493
494     VM& vm = exec->vm();
495
496     if (baseValue.isObject() && subscript.isInt32()) {
497         JSObject* object = asObject(baseValue);
498
499         ASSERT(exec->bytecodeOffset());
500         ASSERT(!byValInfo->stubRoutine);
501
502         Structure* structure = object->structure(vm);
503         if (hasOptimizableIndexing(structure)) {
504             // Attempt to optimize.
505             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
506             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
507                 CodeBlock* codeBlock = exec->codeBlock();
508                 ConcurrentJITLocker locker(codeBlock->m_lock);
509                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
510
511                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
512                 optimizationResult = OptimizationResult::Optimized;
513             }
514         }
515
516         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
517         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
518             optimizationResult = OptimizationResult::GiveUp;
519     }
520
521     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
522         const Identifier propertyName = subscript.toPropertyKey(exec);
523         if (!subscript.isString() || !parseIndex(propertyName)) {
524             ASSERT(exec->bytecodeOffset());
525             ASSERT(!byValInfo->stubRoutine);
526             if (byValInfo->seen) {
527                 if (byValInfo->cachedId == propertyName) {
528                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
529                     optimizationResult = OptimizationResult::Optimized;
530                 } else {
531                     // Seem like a generic property access site.
532                     optimizationResult = OptimizationResult::GiveUp;
533                 }
534             } else {
535                 byValInfo->seen = true;
536                 byValInfo->cachedId = propertyName;
537                 optimizationResult = OptimizationResult::SeenOnce;
538             }
539         }
540     }
541
542     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
543         // If we take slow path more than 10 times without patching then make sure we
544         // never make that mistake again. For cases where we see non-index-intercepting
545         // objects, this gives 10 iterations worth of opportunity for us to observe
546         // that the put_by_val may be polymorphic. We count up slowPathCount even if
547         // the result is GiveUp.
548         if (++byValInfo->slowPathCount >= 10)
549             optimizationResult = OptimizationResult::GiveUp;
550     }
551
552     return optimizationResult;
553 }
554
555 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
556 {
557     VM& vm = exec->vm();
558     NativeCallFrameTracer tracer(&vm, exec);
559
560     JSValue baseValue = JSValue::decode(encodedBaseValue);
561     JSValue subscript = JSValue::decode(encodedSubscript);
562     JSValue value = JSValue::decode(encodedValue);
563     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
564         // Don't ever try to optimize.
565         byValInfo->tookSlowPath = true;
566         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
567     }
568     putByVal(exec, baseValue, subscript, value, byValInfo);
569 }
570
571 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
572 {
573     // See if it's worth optimizing at all.
574     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
575
576     VM& vm = exec->vm();
577
578     if (subscript.isInt32()) {
579         ASSERT(exec->bytecodeOffset());
580         ASSERT(!byValInfo->stubRoutine);
581
582         Structure* structure = object->structure(vm);
583         if (hasOptimizableIndexing(structure)) {
584             // Attempt to optimize.
585             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
586             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
587                 CodeBlock* codeBlock = exec->codeBlock();
588                 ConcurrentJITLocker locker(codeBlock->m_lock);
589                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
590
591                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
592                 optimizationResult = OptimizationResult::Optimized;
593             }
594         }
595
596         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
597         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
598             optimizationResult = OptimizationResult::GiveUp;
599     } else if (isStringOrSymbol(subscript)) {
600         const Identifier propertyName = subscript.toPropertyKey(exec);
601         Optional<uint32_t> index = parseIndex(propertyName);
602
603         if (!subscript.isString() || !index) {
604             ASSERT(exec->bytecodeOffset());
605             ASSERT(!byValInfo->stubRoutine);
606             if (byValInfo->seen) {
607                 if (byValInfo->cachedId == propertyName) {
608                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
609                     optimizationResult = OptimizationResult::Optimized;
610                 } else {
611                     // Seem like a generic property access site.
612                     optimizationResult = OptimizationResult::GiveUp;
613                 }
614             } else {
615                 byValInfo->seen = true;
616                 byValInfo->cachedId = propertyName;
617                 optimizationResult = OptimizationResult::SeenOnce;
618             }
619         }
620     }
621
622     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
623         // If we take slow path more than 10 times without patching then make sure we
624         // never make that mistake again. For cases where we see non-index-intercepting
625         // objects, this gives 10 iterations worth of opportunity for us to observe
626         // that the get_by_val may be polymorphic. We count up slowPathCount even if
627         // the result is GiveUp.
628         if (++byValInfo->slowPathCount >= 10)
629             optimizationResult = OptimizationResult::GiveUp;
630     }
631
632     return optimizationResult;
633 }
634
635 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
636 {
637     VM& vm = exec->vm();
638     NativeCallFrameTracer tracer(&vm, exec);
639
640     JSValue baseValue = JSValue::decode(encodedBaseValue);
641     JSValue subscript = JSValue::decode(encodedSubscript);
642     JSValue value = JSValue::decode(encodedValue);
643     RELEASE_ASSERT(baseValue.isObject());
644     JSObject* object = asObject(baseValue);
645     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
646         // Don't ever try to optimize.
647         byValInfo->tookSlowPath = true;
648         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
649     }
650
651     directPutByVal(exec, object, subscript, value, byValInfo);
652 }
653
654 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
655 {
656     VM& vm = exec->vm();
657     NativeCallFrameTracer tracer(&vm, exec);
658     
659     JSValue baseValue = JSValue::decode(encodedBaseValue);
660     JSValue subscript = JSValue::decode(encodedSubscript);
661     JSValue value = JSValue::decode(encodedValue);
662
663     putByVal(exec, baseValue, subscript, value, byValInfo);
664 }
665
666
667 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
668 {
669     VM& vm = exec->vm();
670     NativeCallFrameTracer tracer(&vm, exec);
671     
672     JSValue baseValue = JSValue::decode(encodedBaseValue);
673     JSValue subscript = JSValue::decode(encodedSubscript);
674     JSValue value = JSValue::decode(encodedValue);
675     RELEASE_ASSERT(baseValue.isObject());
676     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
677 }
678
679 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
680 {
681     UNUSED_PARAM(exec);
682
683     execCallee->setCodeBlock(0);
684
685     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
686         return JSValue::encode(JSValue());
687
688     VM* vm = &execCallee->vm();
689     JSValue result = eval(execCallee);
690     if (vm->exception())
691         return EncodedJSValue();
692     
693     return JSValue::encode(result);
694 }
695
696 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
697 {
698     ExecState* exec = execCallee->callerFrame();
699     VM* vm = &exec->vm();
700
701     execCallee->setCodeBlock(0);
702
703     if (callLinkInfo->specializationKind() == CodeForCall) {
704         CallData callData;
705         CallType callType = getCallData(callee, callData);
706     
707         ASSERT(callType != CallTypeJS);
708     
709         if (callType == CallTypeHost) {
710             NativeCallFrameTracer tracer(vm, execCallee);
711             execCallee->setCallee(asObject(callee));
712             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
713             if (vm->exception()) {
714                 return encodeResult(
715                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
716                     reinterpret_cast<void*>(KeepTheFrame));
717             }
718
719             return encodeResult(
720                 bitwise_cast<void*>(getHostCallReturnValue),
721                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
722         }
723     
724         ASSERT(callType == CallTypeNone);
725         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
726         return encodeResult(
727             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
728             reinterpret_cast<void*>(KeepTheFrame));
729     }
730
731     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
732     
733     ConstructData constructData;
734     ConstructType constructType = getConstructData(callee, constructData);
735     
736     ASSERT(constructType != ConstructTypeJS);
737     
738     if (constructType == ConstructTypeHost) {
739         NativeCallFrameTracer tracer(vm, execCallee);
740         execCallee->setCallee(asObject(callee));
741         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
742         if (vm->exception()) {
743             return encodeResult(
744                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
745                 reinterpret_cast<void*>(KeepTheFrame));
746         }
747
748         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
749     }
750     
751     ASSERT(constructType == ConstructTypeNone);
752     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
753     return encodeResult(
754         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
755         reinterpret_cast<void*>(KeepTheFrame));
756 }
757
758 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
759 {
760     ExecState* exec = execCallee->callerFrame();
761     VM* vm = &exec->vm();
762     CodeSpecializationKind kind = callLinkInfo->specializationKind();
763     NativeCallFrameTracer tracer(vm, exec);
764     
765     JSValue calleeAsValue = execCallee->calleeAsValue();
766     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
767     if (!calleeAsFunctionCell) {
768         // FIXME: We should cache these kinds of calls. They can be common and currently they are
769         // expensive.
770         // https://bugs.webkit.org/show_bug.cgi?id=144458
771         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
772     }
773
774     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
775     JSScope* scope = callee->scopeUnchecked();
776     ExecutableBase* executable = callee->executable();
777
778     MacroAssemblerCodePtr codePtr;
779     CodeBlock* codeBlock = 0;
780     if (executable->isHostFunction()) {
781         codePtr = executable->entrypointFor(kind, MustCheckArity);
782 #if ENABLE(WEBASSEMBLY)
783     } else if (executable->isWebAssemblyExecutable()) {
784         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
785         webAssemblyExecutable->prepareForExecution(execCallee);
786         codeBlock = webAssemblyExecutable->codeBlockForCall();
787         ASSERT(codeBlock);
788         ArityCheckMode arity;
789         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
790             arity = MustCheckArity;
791         else
792             arity = ArityCheckNotRequired;
793         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
794 #endif
795     } else {
796         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
797
798         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
799             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
800             return encodeResult(
801                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
802                 reinterpret_cast<void*>(KeepTheFrame));
803         }
804
805         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
806         if (error) {
807             exec->vm().throwException(exec, error);
808             return encodeResult(
809                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
810                 reinterpret_cast<void*>(KeepTheFrame));
811         }
812         codeBlock = functionExecutable->codeBlockFor(kind);
813         ArityCheckMode arity;
814         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
815             arity = MustCheckArity;
816         else
817             arity = ArityCheckNotRequired;
818         codePtr = functionExecutable->entrypointFor(kind, arity);
819     }
820     if (!callLinkInfo->seenOnce())
821         callLinkInfo->setSeen();
822     else
823         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
824     
825     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
826 }
827
828 inline SlowPathReturnType virtualForWithFunction(
829     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
830 {
831     ExecState* exec = execCallee->callerFrame();
832     VM* vm = &exec->vm();
833     CodeSpecializationKind kind = callLinkInfo->specializationKind();
834     NativeCallFrameTracer tracer(vm, exec);
835
836     JSValue calleeAsValue = execCallee->calleeAsValue();
837     calleeAsFunctionCell = getJSFunction(calleeAsValue);
838     if (UNLIKELY(!calleeAsFunctionCell))
839         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
840     
841     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
842     JSScope* scope = function->scopeUnchecked();
843     ExecutableBase* executable = function->executable();
844     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
845         bool isWebAssemblyExecutable = false;
846 #if ENABLE(WEBASSEMBLY)
847         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
848 #endif
849         if (!isWebAssemblyExecutable) {
850             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
851
852             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
853                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
854                 return encodeResult(
855                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
856                     reinterpret_cast<void*>(KeepTheFrame));
857             }
858
859             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
860             if (error) {
861                 exec->vm().throwException(exec, error);
862                 return encodeResult(
863                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
864                     reinterpret_cast<void*>(KeepTheFrame));
865             }
866         } else {
867 #if ENABLE(WEBASSEMBLY)
868             if (!isCall(kind)) {
869                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
870                 return encodeResult(
871                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
872                     reinterpret_cast<void*>(KeepTheFrame));
873             }
874
875             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
876             webAssemblyExecutable->prepareForExecution(execCallee);
877 #endif
878         }
879     }
880     return encodeResult(executable->entrypointFor(
881         kind, MustCheckArity).executableAddress(),
882         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
883 }
884
885 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
886 {
887     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
888     JSCell* calleeAsFunctionCell;
889     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
890
891     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
892     
893     return result;
894 }
895
896 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
897 {
898     JSCell* calleeAsFunctionCellIgnored;
899     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
900 }
901
902 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
903 {
904     VM* vm = &exec->vm();
905     NativeCallFrameTracer tracer(vm, exec);
906     
907     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
908 }
909
910 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
911 {
912     VM* vm = &exec->vm();
913     NativeCallFrameTracer tracer(vm, exec);
914
915     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
916 }
917
918 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
919 {
920     VM* vm = &exec->vm();
921     NativeCallFrameTracer tracer(vm, exec);
922
923     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
924 }
925
926 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
927 {
928     VM* vm = &exec->vm();
929     NativeCallFrameTracer tracer(vm, exec);
930
931     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
932 }
933
934 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
935 {
936     VM* vm = &exec->vm();
937     NativeCallFrameTracer tracer(vm, exec);
938     
939     return JSValue::decode(encodedOp).toBoolean(exec);
940 }
941
942 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
943 {
944     VM* vm = &exec->vm();
945     NativeCallFrameTracer tracer(vm, exec);
946
947     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
948 }
949
950 #if USE(JSVALUE64)
951 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
952 #else
953 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
954 #endif
955 {
956     VM* vm = &exec->vm();
957     NativeCallFrameTracer tracer(vm, exec);
958
959     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
960 #if USE(JSVALUE64)
961     return JSValue::encode(jsBoolean(result));
962 #else
963     return result;
964 #endif
965 }
966
967 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
968 {
969     int result = base->hasProperty(exec, property->toIdentifier(exec));
970     return result;
971 }
972     
973
974 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
975 {
976     VM* vm = &exec->vm();
977     NativeCallFrameTracer tracer(vm, exec);
978     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
979 }
980
981 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
982 {
983     VM* vm = &exec->vm();
984     NativeCallFrameTracer tracer(vm, exec);
985     return JSValue::encode(constructArray(exec, profile, values, size));
986 }
987
988 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
989 {
990     VM* vm = &exec->vm();
991     NativeCallFrameTracer tracer(vm, exec);
992     JSValue sizeValue = JSValue::decode(size);
993     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
994 }
995
996 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
997 {
998     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
999     VM& vm = exec->vm();
1000     NativeCallFrameTracer tracer(&vm, exec);
1001     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1002 }
1003
1004 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1005 {
1006     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1007     VM& vm = exec->vm();
1008     NativeCallFrameTracer tracer(&vm, exec);
1009     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1010 }
1011
1012 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1013 {
1014     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1015     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1016     VM& vm = exec->vm();
1017     NativeCallFrameTracer tracer(&vm, exec);
1018         
1019     JSArrowFunction* arrowFunction  = isInvalidated
1020         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1021         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1022     
1023     return JSValue::encode(arrowFunction);
1024 }
1025     
1026 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1027 {
1028     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1029 }
1030     
1031 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1032 {
1033     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1034 }
1035
1036 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1037 {
1038     VM* vm = &exec->vm();
1039     NativeCallFrameTracer tracer(vm, exec);
1040     
1041     return constructEmptyObject(exec, structure);
1042 }
1043
1044 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1045 {
1046     VM& vm = exec->vm();
1047     NativeCallFrameTracer tracer(&vm, exec);
1048     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1049     if (!regexp->isValid()) {
1050         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1051         return JSValue::encode(jsUndefined());
1052     }
1053
1054     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1055 }
1056
1057 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1058 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1059 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1060 // future, we can switch to using that then.
1061 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1062 {
1063     VM& vm = exec->vm();
1064     NativeCallFrameTracer tracer(&vm, exec);
1065
1066     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1067         vm.throwException(exec, createTerminatedExecutionException(&vm));
1068
1069     return nullptr;
1070 }
1071
1072 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1073 {
1074     VM& vm = exec->vm();
1075     NativeCallFrameTracer tracer(&vm, exec);
1076     JSValue errorMessageValue = JSValue::decode(encodedValue);
1077     RELEASE_ASSERT(errorMessageValue.isString());
1078     String errorMessage = asString(errorMessageValue)->value(exec);
1079     if (referenceErrorFlag)
1080         vm.throwException(exec, createReferenceError(exec, errorMessage));
1081     else
1082         vm.throwException(exec, createTypeError(exec, errorMessage));
1083 }
1084
1085 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1086 {
1087     VM& vm = exec->vm();
1088     NativeCallFrameTracer tracer(&vm, exec);
1089
1090     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1091 }
1092
1093 #if ENABLE(DFG_JIT)
1094 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1095 {
1096     codeBlock->updateAllPredictions();
1097     codeBlock->optimizeAfterWarmUp();
1098 }
1099
1100 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1101 {
1102     VM& vm = exec->vm();
1103     NativeCallFrameTracer tracer(&vm, exec);
1104
1105     // Defer GC for a while so that it doesn't run between when we enter into this
1106     // slow path and when we figure out the state of our code block. This prevents
1107     // a number of awkward reentrancy scenarios, including:
1108     //
1109     // - The optimized version of our code block being jettisoned by GC right after
1110     //   we concluded that we wanted to use it, but have not planted it into the JS
1111     //   stack yet.
1112     //
1113     // - An optimized version of our code block being installed just as we decided
1114     //   that it wasn't ready yet.
1115     //
1116     // Note that jettisoning won't happen if we already initiated OSR, because in
1117     // that case we would have already planted the optimized code block into the JS
1118     // stack.
1119     DeferGCForAWhile deferGC(vm.heap);
1120     
1121     CodeBlock* codeBlock = exec->codeBlock();
1122     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1123         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1124         RELEASE_ASSERT_NOT_REACHED();
1125     }
1126     
1127     if (bytecodeIndex) {
1128         // If we're attempting to OSR from a loop, assume that this should be
1129         // separately optimized.
1130         codeBlock->m_shouldAlwaysBeInlined = false;
1131     }
1132
1133     if (Options::verboseOSR()) {
1134         dataLog(
1135             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1136             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1137             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1138             ", exitCounter = ");
1139         if (codeBlock->hasOptimizedReplacement())
1140             dataLog(codeBlock->replacement()->osrExitCounter());
1141         else
1142             dataLog("N/A");
1143         dataLog("\n");
1144     }
1145
1146     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1147         codeBlock->updateAllPredictions();
1148         if (Options::verboseOSR())
1149             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1150         return encodeResult(0, 0);
1151     }
1152     
1153     if (vm.enabledProfiler()) {
1154         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1155         return encodeResult(0, 0);
1156     }
1157
1158     Debugger* debugger = codeBlock->globalObject()->debugger();
1159     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1160         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1161         return encodeResult(0, 0);
1162     }
1163
1164     if (codeBlock->m_shouldAlwaysBeInlined) {
1165         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1166         if (Options::verboseOSR())
1167             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1168         return encodeResult(0, 0);
1169     }
1170
1171     // We cannot be in the process of asynchronous compilation and also have an optimized
1172     // replacement.
1173     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1174     ASSERT(
1175         !worklist
1176         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1177         && codeBlock->hasOptimizedReplacement()));
1178
1179     DFG::Worklist::State worklistState;
1180     if (worklist) {
1181         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1182         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1183         // what the result was so that we don't plow ahead and attempt OSR or immediate
1184         // reoptimization. This will have already also set the appropriate JIT execution
1185         // count threshold depending on what happened, so if the compilation was anything
1186         // but successful we just want to return early. See the case for worklistState ==
1187         // DFG::Worklist::Compiled, below.
1188         
1189         // Note that we could have alternatively just called Worklist::compilationState()
1190         // here, and if it returned Compiled, we could have then called
1191         // completeAndScheduleOSR() below. But that would have meant that it could take
1192         // longer for code blocks to be completed: they would only complete when *their*
1193         // execution count trigger fired; but that could take a while since the firing is
1194         // racy. It could also mean that code blocks that never run again after being
1195         // compiled would sit on the worklist until next GC. That's fine, but it's
1196         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1197         // possible in order to minimize the chances of us executing baseline code after
1198         // optimized code is already available.
1199         worklistState = worklist->completeAllReadyPlansForVM(
1200             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1201     } else
1202         worklistState = DFG::Worklist::NotKnown;
1203
1204     if (worklistState == DFG::Worklist::Compiling) {
1205         // We cannot be in the process of asynchronous compilation and also have an optimized
1206         // replacement.
1207         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1208         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1209         return encodeResult(0, 0);
1210     }
1211
1212     if (worklistState == DFG::Worklist::Compiled) {
1213         // If we don't have an optimized replacement but we did just get compiled, then
1214         // the compilation failed or was invalidated, in which case the execution count
1215         // thresholds have already been set appropriately by
1216         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1217         // nothing left to do.
1218         if (!codeBlock->hasOptimizedReplacement()) {
1219             codeBlock->updateAllPredictions();
1220             if (Options::verboseOSR())
1221                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1222             return encodeResult(0, 0);
1223         }
1224     } else if (codeBlock->hasOptimizedReplacement()) {
1225         if (Options::verboseOSR())
1226             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1227         // If we have an optimized replacement, then it must be the case that we entered
1228         // cti_optimize from a loop. That's because if there's an optimized replacement,
1229         // then all calls to this function will be relinked to the replacement and so
1230         // the prologue OSR will never fire.
1231         
1232         // This is an interesting threshold check. Consider that a function OSR exits
1233         // in the middle of a loop, while having a relatively low exit count. The exit
1234         // will reset the execution counter to some target threshold, meaning that this
1235         // code won't be reached until that loop heats up for >=1000 executions. But then
1236         // we do a second check here, to see if we should either reoptimize, or just
1237         // attempt OSR entry. Hence it might even be correct for
1238         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1239         // additional checking anyway, to reduce the amount of recompilation thrashing.
1240         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1241             if (Options::verboseOSR()) {
1242                 dataLog(
1243                     "Triggering reoptimization of ", *codeBlock,
1244                     "(", *codeBlock->replacement(), ") (in loop).\n");
1245             }
1246             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1247             return encodeResult(0, 0);
1248         }
1249     } else {
1250         if (!codeBlock->shouldOptimizeNow()) {
1251             if (Options::verboseOSR()) {
1252                 dataLog(
1253                     "Delaying optimization for ", *codeBlock,
1254                     " because of insufficient profiling.\n");
1255             }
1256             return encodeResult(0, 0);
1257         }
1258
1259         if (Options::verboseOSR())
1260             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1261
1262         unsigned numVarsWithValues;
1263         if (bytecodeIndex)
1264             numVarsWithValues = codeBlock->m_numVars;
1265         else
1266             numVarsWithValues = 0;
1267         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1268         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1269         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1270             int operand = mustHandleValues.operandForIndex(i);
1271             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1272                 continue;
1273             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1274         }
1275
1276         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1277         CompilationResult result = DFG::compile(
1278             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1279             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1280         
1281         if (result != CompilationSuccessful)
1282             return encodeResult(0, 0);
1283     }
1284     
1285     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1286     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1287     
1288     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1289         if (Options::verboseOSR()) {
1290             dataLog(
1291                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1292         }
1293
1294         codeBlock->optimizeSoon();
1295         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1296     }
1297
1298     if (Options::verboseOSR()) {
1299         dataLog(
1300             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1301             " succeeded, OSR failed, after a delay of ",
1302             codeBlock->optimizationDelayCounter(), ".\n");
1303     }
1304
1305     // Count the OSR failure as a speculation failure. If this happens a lot, then
1306     // reoptimize.
1307     optimizedCodeBlock->countOSRExit();
1308
1309     // We are a lot more conservative about triggering reoptimization after OSR failure than
1310     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1311     // already, then we really would like to reoptimize immediately. But this case covers
1312     // something else: there weren't many (or any) speculation failures before, but we just
1313     // failed to enter the speculative code because some variable had the wrong value or
1314     // because the OSR code decided for any spurious reason that it did not want to OSR
1315     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1316     // reoptimization trigger.
1317     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1318         if (Options::verboseOSR()) {
1319             dataLog(
1320                 "Triggering reoptimization of ", *codeBlock, " -> ",
1321                 *codeBlock->replacement(), " (after OSR fail).\n");
1322         }
1323         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1324         return encodeResult(0, 0);
1325     }
1326
1327     // OSR failed this time, but it might succeed next time! Let the code run a bit
1328     // longer and then try again.
1329     codeBlock->optimizeAfterWarmUp();
1330     
1331     return encodeResult(0, 0);
1332 }
1333 #endif
1334
1335 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1336 {
1337     VM& vm = exec->vm();
1338     NativeCallFrameTracer tracer(&vm, exec);
1339
1340     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1341     ASSERT(isJSArray(arrayValue));
1342     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1343 }
1344
1345 enum class AccessorType {
1346     Getter,
1347     Setter
1348 };
1349
1350 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1351 {
1352     auto propertyKey = subscript.toPropertyKey(exec);
1353     if (exec->hadException())
1354         return;
1355
1356     if (accessorType == AccessorType::Getter)
1357         base->putGetter(exec, propertyKey, accessor, attribute);
1358     else
1359         base->putSetter(exec, propertyKey, accessor, attribute);
1360 }
1361
1362 #if USE(JSVALUE64)
1363 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedGetterValue)
1364 {
1365     VM& vm = exec->vm();
1366     NativeCallFrameTracer tracer(&vm, exec);
1367
1368     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1369     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1370
1371     JSValue getter = JSValue::decode(encodedGetterValue);
1372     ASSERT(getter.isObject());
1373     baseObj->putGetter(exec, *identifier, asObject(getter), options);
1374 }
1375
1376 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedSetterValue)
1377 {
1378     VM& vm = exec->vm();
1379     NativeCallFrameTracer tracer(&vm, exec);
1380
1381     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1382     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1383
1384     JSValue setter = JSValue::decode(encodedSetterValue);
1385     ASSERT(setter.isObject());
1386     baseObj->putSetter(exec, *identifier, asObject(setter), options);
1387 }
1388
1389 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t attribute,
1390     EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1391 {
1392     VM& vm = exec->vm();
1393     NativeCallFrameTracer tracer(&vm, exec);
1394
1395     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1396     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1397
1398     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1399
1400     JSValue getter = JSValue::decode(encodedGetterValue);
1401     JSValue setter = JSValue::decode(encodedSetterValue);
1402     ASSERT(getter.isObject() || getter.isUndefined());
1403     ASSERT(setter.isObject() || setter.isUndefined());
1404     ASSERT(getter.isObject() || setter.isObject());
1405
1406     if (!getter.isUndefined())
1407         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1408     if (!setter.isUndefined())
1409         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1410     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1411 }
1412
1413 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedGetter)
1414 {
1415     VM& vm = exec->vm();
1416     NativeCallFrameTracer tracer(&vm, exec);
1417     JSObject* base = asObject(JSValue::decode(encodedBase));
1418     JSValue subscript = JSValue::decode(encodedSubscript);
1419     JSObject* getter = asObject(JSValue::decode(encodedGetter));
1420     putAccessorByVal(exec, base, subscript, attribute, getter, AccessorType::Getter);
1421 }
1422
1423 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedSetter)
1424 {
1425     VM& vm = exec->vm();
1426     NativeCallFrameTracer tracer(&vm, exec);
1427     JSObject* base = asObject(JSValue::decode(encodedBase));
1428     JSValue subscript = JSValue::decode(encodedSubscript);
1429     JSObject* setter = asObject(JSValue::decode(encodedSetter));
1430     putAccessorByVal(exec, base, subscript, attribute, setter, AccessorType::Setter);
1431 }
1432
1433 #else
1434 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* getter)
1435 {
1436     VM& vm = exec->vm();
1437     NativeCallFrameTracer tracer(&vm, exec);
1438
1439     ASSERT(object && object->isObject());
1440     JSObject* baseObj = object->getObject();
1441
1442     ASSERT(getter->isObject());
1443     baseObj->putGetter(exec, *identifier, getter, options);
1444 }
1445
1446 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* setter)
1447 {
1448     VM& vm = exec->vm();
1449     NativeCallFrameTracer tracer(&vm, exec);
1450
1451     ASSERT(object && object->isObject());
1452     JSObject* baseObj = object->getObject();
1453
1454     ASSERT(setter->isObject());
1455     baseObj->putSetter(exec, *identifier, setter, options);
1456 }
1457
1458 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, int32_t attribute, JSCell* getter, JSCell* setter)
1459 {
1460     VM& vm = exec->vm();
1461     NativeCallFrameTracer tracer(&vm, exec);
1462
1463     ASSERT(object && object->isObject());
1464     JSObject* baseObj = object->getObject();
1465
1466     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1467
1468     ASSERT(!getter || getter->isObject());
1469     ASSERT(!setter || setter->isObject());
1470     ASSERT(getter || setter);
1471
1472     if (getter)
1473         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1474     if (setter)
1475         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1476     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1477 }
1478
1479 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1480 {
1481     VM& vm = exec->vm();
1482     NativeCallFrameTracer tracer(&vm, exec);
1483
1484     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1485 }
1486
1487 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1488 {
1489     VM& vm = exec->vm();
1490     NativeCallFrameTracer tracer(&vm, exec);
1491
1492     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1493 }
1494
1495 #endif
1496
1497 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1498 {
1499     VM& vm = exec->vm();
1500     NativeCallFrameTracer tracer(&vm, exec);
1501
1502     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1503     exec->uncheckedR(scopeReg) = scope->next();
1504 }
1505
1506 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1507 {
1508     VM& vm = exec->vm();
1509     NativeCallFrameTracer tracer(&vm, exec);
1510
1511     if (LegacyProfiler* profiler = vm.enabledProfiler())
1512         profiler->didExecute(exec, JSValue::decode(encodedValue));
1513 }
1514
1515 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1516 {
1517     VM& vm = exec->vm();
1518     NativeCallFrameTracer tracer(&vm, exec);
1519
1520     if (LegacyProfiler* profiler = vm.enabledProfiler())
1521         profiler->willExecute(exec, JSValue::decode(encodedValue));
1522 }
1523
1524 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1525 {
1526     VM& vm = exec->vm();
1527     NativeCallFrameTracer tracer(&vm, exec);
1528
1529     JSValue value = JSValue::decode(encodedValue);
1530     JSValue baseVal = JSValue::decode(encodedBaseVal);
1531
1532     if (baseVal.isObject()) {
1533         JSObject* baseObject = asObject(baseVal);
1534         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1535         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1536             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1537             return JSValue::encode(jsBoolean(result));
1538         }
1539     }
1540
1541     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1542     return JSValue::encode(JSValue());
1543 }
1544
1545 }
1546
1547 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1548 {
1549     switch (object.structure()->typeInfo().type()) {
1550     case DirectArgumentsType: {
1551         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1552         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1553             return true;
1554         break;
1555     }
1556     case ScopedArgumentsType: {
1557         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1558         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1559             return true;
1560         break;
1561     }
1562     default:
1563         break;
1564     }
1565     return false;
1566 }
1567
1568 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1569 {
1570     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1571         VM& vm = exec->vm();
1572         Structure& structure = *baseValue.asCell()->structure(vm);
1573         if (JSCell::canUseFastGetOwnProperty(structure)) {
1574             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1575                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1576                     ASSERT(exec->bytecodeOffset());
1577                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1578                         byValInfo->tookSlowPath = true;
1579                     return result;
1580                 }
1581             }
1582         }
1583     }
1584
1585     if (subscript.isUInt32()) {
1586         ASSERT(exec->bytecodeOffset());
1587         byValInfo->tookSlowPath = true;
1588
1589         uint32_t i = subscript.asUInt32();
1590         if (isJSString(baseValue)) {
1591             if (asString(baseValue)->canGetIndex(i)) {
1592                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1593                 return asString(baseValue)->getIndex(exec, i);
1594             }
1595             byValInfo->arrayProfile->setOutOfBounds();
1596         } else if (baseValue.isObject()) {
1597             JSObject* object = asObject(baseValue);
1598             if (object->canGetIndexQuickly(i))
1599                 return object->getIndexQuickly(i);
1600
1601             if (!canAccessArgumentIndexQuickly(*object, i))
1602                 byValInfo->arrayProfile->setOutOfBounds();
1603         }
1604
1605         return baseValue.get(exec, i);
1606     }
1607
1608     baseValue.requireObjectCoercible(exec);
1609     if (exec->hadException())
1610         return jsUndefined();
1611     auto property = subscript.toPropertyKey(exec);
1612     if (exec->hadException())
1613         return jsUndefined();
1614
1615     ASSERT(exec->bytecodeOffset());
1616     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1617         byValInfo->tookSlowPath = true;
1618
1619     return baseValue.get(exec, property);
1620 }
1621
1622 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1623 {
1624     // See if it's worth optimizing this at all.
1625     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1626
1627     VM& vm = exec->vm();
1628
1629     if (baseValue.isObject() && subscript.isInt32()) {
1630         JSObject* object = asObject(baseValue);
1631
1632         ASSERT(exec->bytecodeOffset());
1633         ASSERT(!byValInfo->stubRoutine);
1634
1635         if (hasOptimizableIndexing(object->structure(vm))) {
1636             // Attempt to optimize.
1637             Structure* structure = object->structure(vm);
1638             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1639             if (arrayMode != byValInfo->arrayMode) {
1640                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1641                 // Let's update the profile to do better next time.
1642                 CodeBlock* codeBlock = exec->codeBlock();
1643                 ConcurrentJITLocker locker(codeBlock->m_lock);
1644                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1645
1646                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1647                 optimizationResult = OptimizationResult::Optimized;
1648             }
1649         }
1650
1651         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1652         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1653             optimizationResult = OptimizationResult::GiveUp;
1654     }
1655
1656     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1657         const Identifier propertyName = subscript.toPropertyKey(exec);
1658         if (!subscript.isString() || !parseIndex(propertyName)) {
1659             ASSERT(exec->bytecodeOffset());
1660             ASSERT(!byValInfo->stubRoutine);
1661             if (byValInfo->seen) {
1662                 if (byValInfo->cachedId == propertyName) {
1663                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1664                     optimizationResult = OptimizationResult::Optimized;
1665                 } else {
1666                     // Seem like a generic property access site.
1667                     optimizationResult = OptimizationResult::GiveUp;
1668                 }
1669             } else {
1670                 byValInfo->seen = true;
1671                 byValInfo->cachedId = propertyName;
1672                 optimizationResult = OptimizationResult::SeenOnce;
1673             }
1674
1675         }
1676     }
1677
1678     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1679         // If we take slow path more than 10 times without patching then make sure we
1680         // never make that mistake again. For cases where we see non-index-intercepting
1681         // objects, this gives 10 iterations worth of opportunity for us to observe
1682         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1683         // the result is GiveUp.
1684         if (++byValInfo->slowPathCount >= 10)
1685             optimizationResult = OptimizationResult::GiveUp;
1686     }
1687
1688     return optimizationResult;
1689 }
1690
1691 extern "C" {
1692
1693 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1694 {
1695     VM& vm = exec->vm();
1696     NativeCallFrameTracer tracer(&vm, exec);
1697     JSValue baseValue = JSValue::decode(encodedBase);
1698     JSValue subscript = JSValue::decode(encodedSubscript);
1699
1700     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1701     return JSValue::encode(result);
1702 }
1703
1704 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1705 {
1706     VM& vm = exec->vm();
1707     NativeCallFrameTracer tracer(&vm, exec);
1708
1709     JSValue baseValue = JSValue::decode(encodedBase);
1710     JSValue subscript = JSValue::decode(encodedSubscript);
1711     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1712     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1713         // Don't ever try to optimize.
1714         byValInfo->tookSlowPath = true;
1715         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1716     }
1717
1718     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1719 }
1720
1721 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1722 {
1723     VM& vm = exec->vm();
1724     NativeCallFrameTracer tracer(&vm, exec);
1725     JSValue baseValue = JSValue::decode(encodedBase);
1726     JSValue subscript = JSValue::decode(encodedSubscript);
1727     
1728     ASSERT(baseValue.isObject());
1729     ASSERT(subscript.isUInt32());
1730
1731     JSObject* object = asObject(baseValue);
1732     bool didOptimize = false;
1733
1734     ASSERT(exec->bytecodeOffset());
1735     ASSERT(!byValInfo->stubRoutine);
1736     
1737     if (hasOptimizableIndexing(object->structure(vm))) {
1738         // Attempt to optimize.
1739         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1740         if (arrayMode != byValInfo->arrayMode) {
1741             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1742             didOptimize = true;
1743         }
1744     }
1745     
1746     if (!didOptimize) {
1747         // If we take slow path more than 10 times without patching then make sure we
1748         // never make that mistake again. Or, if we failed to patch and we have some object
1749         // that intercepts indexed get, then don't even wait until 10 times. For cases
1750         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1751         // opportunity for us to observe that the get_by_val may be polymorphic.
1752         if (++byValInfo->slowPathCount >= 10
1753             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1754             // Don't ever try to optimize.
1755             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1756         }
1757     }
1758
1759     uint32_t index = subscript.asUInt32();
1760     if (object->canGetIndexQuickly(index))
1761         return JSValue::encode(JSValue(JSValue::JSTrue));
1762
1763     if (!canAccessArgumentIndexQuickly(*object, index))
1764         byValInfo->arrayProfile->setOutOfBounds();
1765     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1766 }
1767     
1768 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1769 {
1770     VM& vm = exec->vm();
1771     NativeCallFrameTracer tracer(&vm, exec);
1772     JSValue baseValue = JSValue::decode(encodedBase);
1773     JSValue subscript = JSValue::decode(encodedSubscript);
1774     
1775     ASSERT(baseValue.isObject());
1776     ASSERT(subscript.isUInt32());
1777
1778     JSObject* object = asObject(baseValue);
1779     uint32_t index = subscript.asUInt32();
1780     if (object->canGetIndexQuickly(index))
1781         return JSValue::encode(JSValue(JSValue::JSTrue));
1782
1783     if (!canAccessArgumentIndexQuickly(*object, index))
1784         byValInfo->arrayProfile->setOutOfBounds();
1785     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1786 }
1787     
1788 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1789 {
1790     VM& vm = exec->vm();
1791     NativeCallFrameTracer tracer(&vm, exec);
1792     JSValue baseValue = JSValue::decode(encodedBase);
1793     JSValue subscript = JSValue::decode(encodedSubscript);
1794     
1795     JSValue result;
1796     if (LIKELY(subscript.isUInt32())) {
1797         uint32_t i = subscript.asUInt32();
1798         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1799             result = asString(baseValue)->getIndex(exec, i);
1800         else {
1801             result = baseValue.get(exec, i);
1802             if (!isJSString(baseValue)) {
1803                 ASSERT(exec->bytecodeOffset());
1804                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1805             }
1806         }
1807     } else {
1808         baseValue.requireObjectCoercible(exec);
1809         if (exec->hadException())
1810             return JSValue::encode(jsUndefined());
1811         auto property = subscript.toPropertyKey(exec);
1812         if (exec->hadException())
1813             return JSValue::encode(jsUndefined());
1814         result = baseValue.get(exec, property);
1815     }
1816
1817     return JSValue::encode(result);
1818 }
1819
1820 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1821 {
1822     VM& vm = exec->vm();
1823     NativeCallFrameTracer tracer(&vm, exec);
1824
1825     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1826     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1827     JSValue result = jsBoolean(couldDelete);
1828     if (!couldDelete && exec->codeBlock()->isStrictMode())
1829         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1830     return JSValue::encode(result);
1831 }
1832
1833 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1834 {
1835     VM& vm = exec->vm();
1836     NativeCallFrameTracer tracer(&vm, exec);
1837     JSValue value = JSValue::decode(encodedValue);
1838     JSValue proto = JSValue::decode(encodedProto);
1839     
1840     ASSERT(!value.isObject() || !proto.isObject());
1841
1842     bool result = JSObject::defaultHasInstance(exec, value, proto);
1843     return JSValue::encode(jsBoolean(result));
1844 }
1845
1846 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1847 {
1848     VM& vm = exec->vm();
1849     NativeCallFrameTracer tracer(&vm, exec);
1850     JSStack* stack = &exec->interpreter()->stack();
1851     JSValue arguments = JSValue::decode(encodedArguments);
1852     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1853 }
1854
1855 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1856 {
1857     VM& vm = exec->vm();
1858     NativeCallFrameTracer tracer(&vm, exec);
1859     JSValue arguments = JSValue::decode(encodedArguments);
1860     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1861     return newCallFrame;
1862 }
1863
1864 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1865 {
1866     VM& vm = exec->vm();
1867     NativeCallFrameTracer tracer(&vm, exec);
1868     return JSValue::encode(JSValue::decode(value).toObject(exec));
1869 }
1870
1871 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1872 {
1873     VM& vm = exec->vm();
1874     NativeCallFrameTracer tracer(&vm, exec);
1875     JSValue key = JSValue::decode(encodedKey);
1876     CodeBlock* codeBlock = exec->codeBlock();
1877
1878     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1879     void* result = jumpTable.ctiDefault.executableAddress();
1880
1881     if (key.isString()) {
1882         StringImpl* value = asString(key)->value(exec).impl();
1883         if (value->length() == 1)
1884             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1885     }
1886
1887     return reinterpret_cast<char*>(result);
1888 }
1889
1890 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1891 {
1892     VM& vm = exec->vm();
1893     NativeCallFrameTracer tracer(&vm, exec);
1894     JSValue key = JSValue::decode(encodedKey);
1895     CodeBlock* codeBlock = exec->codeBlock();
1896
1897     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1898     void* result;
1899     if (key.isInt32())
1900         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1901     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1902         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1903     else
1904         result = jumpTable.ctiDefault.executableAddress();
1905     return reinterpret_cast<char*>(result);
1906 }
1907
1908 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1909 {
1910     VM& vm = exec->vm();
1911     NativeCallFrameTracer tracer(&vm, exec);
1912     JSValue key = JSValue::decode(encodedKey);
1913     CodeBlock* codeBlock = exec->codeBlock();
1914
1915     void* result;
1916     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1917
1918     if (key.isString()) {
1919         StringImpl* value = asString(key)->value(exec).impl();
1920         result = jumpTable.ctiForValue(value).executableAddress();
1921     } else
1922         result = jumpTable.ctiDefault.executableAddress();
1923
1924     return reinterpret_cast<char*>(result);
1925 }
1926
1927 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1928 {
1929     VM& vm = exec->vm();
1930     NativeCallFrameTracer tracer(&vm, exec);
1931     CodeBlock* codeBlock = exec->codeBlock();
1932     Instruction* pc = bytecodePC;
1933
1934     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1935     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1936     GetPutInfo getPutInfo(pc[4].u.operand);
1937
1938     // ModuleVar is always converted to ClosureVar for get_from_scope.
1939     ASSERT(getPutInfo.resolveType() != ModuleVar);
1940
1941     PropertySlot slot(scope);
1942     if (!scope->getPropertySlot(exec, ident, slot)) {
1943         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1944             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1945         return JSValue::encode(jsUndefined());
1946     }
1947
1948     JSValue result = JSValue();
1949     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1950         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1951         result = slot.getValue(exec, ident);
1952         if (result == jsTDZValue()) {
1953             exec->vm().throwException(exec, createTDZError(exec));
1954             return JSValue::encode(jsUndefined());
1955         }
1956     }
1957
1958     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1959
1960     if (!result)
1961         result = slot.getValue(exec, ident);
1962     return JSValue::encode(result);
1963 }
1964
1965 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1966 {
1967     VM& vm = exec->vm();
1968     NativeCallFrameTracer tracer(&vm, exec);
1969     Instruction* pc = bytecodePC;
1970
1971     CodeBlock* codeBlock = exec->codeBlock();
1972     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1973     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1974     JSValue value = exec->r(pc[3].u.operand).jsValue();
1975     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1976
1977     // ModuleVar does not keep the scope register value alive in DFG.
1978     ASSERT(getPutInfo.resolveType() != ModuleVar);
1979
1980     if (getPutInfo.resolveType() == LocalClosureVar) {
1981         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1982         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1983         if (WatchpointSet* set = pc[5].u.watchpointSet)
1984             set->touch("Executed op_put_scope<LocalClosureVar>");
1985         return;
1986     }
1987
1988     bool hasProperty = scope->hasProperty(exec, ident);
1989     if (hasProperty
1990         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1991         && getPutInfo.initializationMode() != Initialization) {
1992         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1993         PropertySlot slot(scope);
1994         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1995         if (slot.getValue(exec, ident) == jsTDZValue()) {
1996             exec->vm().throwException(exec, createTDZError(exec));
1997             return;
1998         }
1999     }
2000
2001     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2002         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2003         return;
2004     }
2005
2006     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
2007     scope->methodTable()->put(scope, exec, ident, value, slot);
2008     
2009     if (exec->vm().exception())
2010         return;
2011
2012     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2013 }
2014
2015 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2016 {
2017     VM* vm = &exec->vm();
2018     NativeCallFrameTracer tracer(vm, exec);
2019
2020     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2021     vm->throwException(exec, exceptionValue);
2022
2023     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2024     genericUnwind(vm, exec);
2025 }
2026
2027 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2028 {
2029     VM* vm = &exec->vm();
2030     NativeCallFrameTracer tracer(vm, exec);
2031     vm->heap.flushWriteBarrierBuffer(cell);
2032 }
2033
2034 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2035 {
2036     VM* vm = &exec->vm();
2037     NativeCallFrameTracer tracer(vm, exec);
2038     vm->heap.writeBarrier(cell);
2039 }
2040
2041 // NB: We don't include the value as part of the barrier because the write barrier elision
2042 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2043 // would be much more complicated to try to model the value being stored as well.
2044 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2045 {
2046     VM* vm = &exec->vm();
2047     NativeCallFrameTracer tracer(vm, exec);
2048     vm->heap.writeBarrier(cell);
2049 }
2050
2051 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2052 {
2053     VM* vm = &exec->vm();
2054     NativeCallFrameTracer tracer(vm, exec);
2055
2056     JSValue value = exec->r(pc[2].u.operand).jsValue();
2057     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2058 }
2059
2060 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2061 {
2062     NativeCallFrameTracer tracer(vm, exec);
2063     genericUnwind(vm, exec);
2064     ASSERT(vm->targetMachinePCForThrow);
2065 }
2066
2067 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2068 {
2069     NativeCallFrameTracer tracer(vm, exec);
2070     genericUnwind(vm, exec, UnwindFromCallerFrame);
2071     ASSERT(vm->targetMachinePCForThrow);
2072 }
2073
2074 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2075 {
2076     VM* vm = &exec->vm();
2077     NativeCallFrameTracer tracer(vm, exec);
2078     genericUnwind(vm, exec);
2079 }
2080
2081 // This function "should" just take the ExecState*, but doing so would make it more difficult
2082 // to call from exception check sites. So, unlike all of our other functions, we allow
2083 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2084 // particularly safe here since this is never called on the critical path - it's only for
2085 // testing.
2086 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2087 {
2088     VM* vm = &exec->vm();
2089     NativeCallFrameTracer tracer(vm, exec);
2090 #if COMPILER(GCC_OR_CLANG)
2091     void* returnPC = __builtin_return_address(0);
2092     doExceptionFuzzing(exec, "JITOperations", returnPC);
2093 #endif // COMPILER(GCC_OR_CLANG)
2094 }
2095
2096 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2097 {
2098     VM& vm = exec->vm();
2099     NativeCallFrameTracer tracer(&vm, exec);
2100     JSValue baseValue = JSValue::decode(encodedBaseValue);
2101     if (baseValue.isUndefinedOrNull())
2102         return JSValue::encode(jsBoolean(false));
2103
2104     JSObject* base = baseValue.toObject(exec);
2105     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2106 }
2107
2108 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2109 {
2110     VM& vm = exec->vm();
2111     NativeCallFrameTracer tracer(&vm, exec);
2112     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2113     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2114 }
2115     
2116 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2117 {
2118     VM& vm = exec->vm();
2119     NativeCallFrameTracer tracer(&vm, exec);
2120
2121     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2122
2123     return propertyNameEnumerator(exec, base);
2124 }
2125
2126 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2127 {
2128     VM& vm = exec->vm();
2129     NativeCallFrameTracer tracer(&vm, exec);
2130     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2131     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2132     return JSValue::encode(propertyName ? propertyName : jsNull());
2133 }
2134
2135 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2136 {
2137     VM& vm = exec->vm();
2138     NativeCallFrameTracer tracer(&vm, exec);
2139     return jsString(exec, Identifier::from(exec, index).string());
2140 }
2141
2142 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2143 {
2144     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2145 }
2146
2147 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2148 {
2149     VM& vm = exec->vm();
2150     NativeCallFrameTracer tracer(&vm, exec);
2151     RELEASE_ASSERT(!!vm.exception());
2152
2153     if (LegacyProfiler* profiler = vm.enabledProfiler())
2154         profiler->exceptionUnwind(exec);
2155
2156     if (isTerminatedExecutionException(vm.exception())) {
2157         genericUnwind(&vm, exec);
2158         return 1;
2159     } else
2160         return 0;
2161 }
2162
2163 } // extern "C"
2164
2165 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2166 // definition of getHostCallReturnValue() below because the Windows build
2167 // requires it.
2168 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2169 {
2170     if (!exec)
2171         return JSValue::encode(JSValue());
2172     return JSValue::encode(exec->vm().hostCallReturnValue);
2173 }
2174
2175 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2176 asm (
2177 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2178 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2179 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2180     "mov %rbp, %rdi\n"
2181     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2182 );
2183
2184 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2185 asm (
2186 ".text" "\n" \
2187 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2188 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2189 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2190     "push %ebp\n"
2191     "leal -4(%esp), %esp\n"
2192     "push %ebp\n"
2193     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2194     "leal 8(%esp), %esp\n"
2195     "pop %ebp\n"
2196     "ret\n"
2197 );
2198
2199 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2200 asm (
2201 ".text" "\n"
2202 ".align 2" "\n"
2203 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2204 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2205 ".thumb" "\n"
2206 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2207 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2208     "mov r0, r7" "\n"
2209     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2210 );
2211
2212 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2213 asm (
2214 ".text" "\n"
2215 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2216 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2217 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2218 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2219     "mov r0, r11" "\n"
2220     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2221 );
2222
2223 #elif CPU(ARM64)
2224 asm (
2225 ".text" "\n"
2226 ".align 2" "\n"
2227 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2228 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2229 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2230      "mov x0, x29" "\n"
2231      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2232 );
2233
2234 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2235
2236 #if WTF_MIPS_PIC
2237 #define LOAD_FUNCTION_TO_T9(function) \
2238         ".set noreorder" "\n" \
2239         ".cpload $25" "\n" \
2240         ".set reorder" "\n" \
2241         "la $t9, " LOCAL_REFERENCE(function) "\n"
2242 #else
2243 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2244 #endif
2245
2246 asm (
2247 ".text" "\n"
2248 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2249 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2250 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2251     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2252     "move $a0, $fp" "\n"
2253     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2254 );
2255
2256 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2257
2258 #define SH4_SCRATCH_REGISTER "r11"
2259
2260 asm (
2261 ".text" "\n"
2262 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2263 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2264 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2265     "mov r14, r4" "\n"
2266     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2267     "braf " SH4_SCRATCH_REGISTER "\n"
2268     "nop" "\n"
2269     "1: .balign 4" "\n"
2270     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2271 );
2272
2273 #elif COMPILER(MSVC) && CPU(X86)
2274 extern "C" {
2275     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2276     {
2277         __asm mov [esp + 4], ebp;
2278         __asm jmp getHostCallReturnValueWithExecState
2279     }
2280 }
2281 #endif
2282
2283 } // namespace JSC
2284
2285 #endif // ENABLE(JIT)