Implement linear memory instructions in WebAssembly
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
37 #include "Debugger.h"
38 #include "DirectArguments.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
44 #include "JIT.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSArrowFunction.h"
47 #include "JSCInlines.h"
48 #include "JSGlobalObjectFunctions.h"
49 #include "JSLexicalEnvironment.h"
50 #include "JSPropertyNameEnumerator.h"
51 #include "JSStackInlines.h"
52 #include "JSWithScope.h"
53 #include "LegacyProfiler.h"
54 #include "ObjectConstructor.h"
55 #include "PropertyName.h"
56 #include "Repatch.h"
57 #include "ScopedArguments.h"
58 #include "TestRunnerUtils.h"
59 #include "TypeProfilerLog.h"
60 #include "VMInlines.h"
61 #include <wtf/InlineASM.h>
62
63 namespace JSC {
64
65 extern "C" {
66
67 #if COMPILER(MSVC)
68 void * _ReturnAddress(void);
69 #pragma intrinsic(_ReturnAddress)
70
71 #define OUR_RETURN_ADDRESS _ReturnAddress()
72 #else
73 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
74 #endif
75
76 #if ENABLE(OPCODE_SAMPLING)
77 #define CTI_SAMPLER vm->interpreter->sampler()
78 #else
79 #define CTI_SAMPLER 0
80 #endif
81
82
83 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
84 {
85     // We pass in our own code block, because the callframe hasn't been populated.
86     VM* vm = codeBlock->vm();
87
88     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
89     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
90     if (!callerFrame)
91         callerFrame = exec;
92
93     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
94     throwStackOverflowError(callerFrame);
95 }
96
97 #if ENABLE(WEBASSEMBLY)
98 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
99 {
100     VM* vm = &exec->vm();
101     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
102     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     ErrorHandlingScope errorScope(*vm);
106     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
107 }
108
109 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
110 {
111     VM* vm = &exec->vm();
112     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
113     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
114
115     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
116     ErrorHandlingScope errorScope(*vm);
117     vm->throwException(callerFrame, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
118 }
119 #endif
120
121 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
122 {
123     VM* vm = &exec->vm();
124     JSStack& stack = vm->interpreter->stack();
125
126     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
127     if (missingArgCount < 0) {
128         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
129         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
130         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
131         throwStackOverflowError(callerFrame);
132     }
133
134     return missingArgCount;
135 }
136
137 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
138 {
139     VM* vm = &exec->vm();
140     JSStack& stack = vm->interpreter->stack();
141
142     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
143     if (missingArgCount < 0) {
144         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
145         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
146         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
147         throwStackOverflowError(callerFrame);
148     }
149
150     return missingArgCount;
151 }
152
153 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
154 {
155     VM* vm = &exec->vm();
156     NativeCallFrameTracer tracer(vm, exec);
157     
158     stubInfo->tookSlowPath = true;
159     
160     JSValue baseValue = JSValue::decode(base);
161     PropertySlot slot(baseValue);
162     Identifier ident = Identifier::fromUid(vm, uid);
163     return JSValue::encode(baseValue.get(exec, ident, slot));
164 }
165
166 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
167 {
168     VM* vm = &exec->vm();
169     NativeCallFrameTracer tracer(vm, exec);
170     
171     JSValue baseValue = JSValue::decode(base);
172     PropertySlot slot(baseValue);
173     Identifier ident = Identifier::fromUid(vm, uid);
174     return JSValue::encode(baseValue.get(exec, ident, slot));
175 }
176
177 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
178 {
179     VM* vm = &exec->vm();
180     NativeCallFrameTracer tracer(vm, exec);
181     Identifier ident = Identifier::fromUid(vm, uid);
182
183     JSValue baseValue = JSValue::decode(base);
184     PropertySlot slot(baseValue);
185     
186     bool hasResult = baseValue.getPropertySlot(exec, ident, slot);
187     if (stubInfo->seen)
188         repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
189     else
190         stubInfo->seen = true;
191     
192     return JSValue::encode(hasResult? slot.getValue(exec, ident) : jsUndefined());
193 }
194
195 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
196 {
197     VM* vm = &exec->vm();
198     NativeCallFrameTracer tracer(vm, exec);
199     
200     if (!base->isObject()) {
201         vm->throwException(exec, createInvalidInParameterError(exec, base));
202         return JSValue::encode(jsUndefined());
203     }
204     
205     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
206
207     Identifier ident = Identifier::fromUid(vm, key);
208     PropertySlot slot(base);
209     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
210     
211     RELEASE_ASSERT(accessType == stubInfo->accessType);
212     
213     if (stubInfo->seen)
214         repatchIn(exec, base, ident, result, slot, *stubInfo);
215     else
216         stubInfo->seen = true;
217     
218     return JSValue::encode(jsBoolean(result));
219 }
220
221 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
222 {
223     VM* vm = &exec->vm();
224     NativeCallFrameTracer tracer(vm, exec);
225     
226     stubInfo->tookSlowPath = true;
227
228     if (!base->isObject()) {
229         vm->throwException(exec, createInvalidInParameterError(exec, base));
230         return JSValue::encode(jsUndefined());
231     }
232
233     Identifier ident = Identifier::fromUid(vm, key);
234     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
235 }
236
237 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
238 {
239     VM* vm = &exec->vm();
240     NativeCallFrameTracer tracer(vm, exec);
241
242     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
243 }
244
245 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
246 {
247     VM* vm = &exec->vm();
248     NativeCallFrameTracer tracer(vm, exec);
249     
250     stubInfo->tookSlowPath = true;
251     
252     Identifier ident = Identifier::fromUid(vm, uid);
253     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
254     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
255 }
256
257 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
258 {
259     VM* vm = &exec->vm();
260     NativeCallFrameTracer tracer(vm, exec);
261     
262     stubInfo->tookSlowPath = true;
263     
264     Identifier ident = Identifier::fromUid(vm, uid);
265     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
266     JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
267 }
268
269 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
270 {
271     VM* vm = &exec->vm();
272     NativeCallFrameTracer tracer(vm, exec);
273     
274     stubInfo->tookSlowPath = true;
275     
276     Identifier ident = Identifier::fromUid(vm, uid);
277     PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
278     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
279 }
280
281 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
282 {
283     VM* vm = &exec->vm();
284     NativeCallFrameTracer tracer(vm, exec);
285     
286     stubInfo->tookSlowPath = true;
287     
288     Identifier ident = Identifier::fromUid(vm, uid);
289     PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
290     asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
291 }
292
293 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
294 {
295     VM* vm = &exec->vm();
296     NativeCallFrameTracer tracer(vm, exec);
297     
298     Identifier ident = Identifier::fromUid(vm, uid);
299     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
300
301     JSValue value = JSValue::decode(encodedValue);
302     JSValue baseValue = JSValue::decode(encodedBase);
303     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
304
305     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
306     baseValue.put(exec, ident, value, slot);
307     
308     if (accessType != static_cast<AccessType>(stubInfo->accessType))
309         return;
310     
311     if (stubInfo->seen)
312         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
313     else
314         stubInfo->seen = true;
315 }
316
317 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
318 {
319     VM* vm = &exec->vm();
320     NativeCallFrameTracer tracer(vm, exec);
321     
322     Identifier ident = Identifier::fromUid(vm, uid);
323     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
324
325     JSValue value = JSValue::decode(encodedValue);
326     JSValue baseValue = JSValue::decode(encodedBase);
327     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
328
329     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
330     baseValue.put(exec, ident, value, slot);
331     
332     if (accessType != static_cast<AccessType>(stubInfo->accessType))
333         return;
334     
335     if (stubInfo->seen)
336         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
337     else
338         stubInfo->seen = true;
339 }
340
341 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
342 {
343     VM* vm = &exec->vm();
344     NativeCallFrameTracer tracer(vm, exec);
345     
346     Identifier ident = Identifier::fromUid(vm, uid);
347     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
348
349     JSValue value = JSValue::decode(encodedValue);
350     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
351     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
352     
353     Structure* structure = baseObject->structure(*vm);
354     baseObject->putDirect(exec->vm(), ident, value, slot);
355     
356     if (accessType != static_cast<AccessType>(stubInfo->accessType))
357         return;
358     
359     if (stubInfo->seen)
360         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
361     else
362         stubInfo->seen = true;
363 }
364
365 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
366 {
367     VM* vm = &exec->vm();
368     NativeCallFrameTracer tracer(vm, exec);
369     
370     Identifier ident = Identifier::fromUid(vm, uid);
371     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
372
373     JSValue value = JSValue::decode(encodedValue);
374     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
375     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
376     
377     Structure* structure = baseObject->structure(*vm);
378     baseObject->putDirect(exec->vm(), ident, value, slot);
379     
380     if (accessType != static_cast<AccessType>(stubInfo->accessType))
381         return;
382     
383     if (stubInfo->seen)
384         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
385     else
386         stubInfo->seen = true;
387 }
388
389 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
390 {
391     VM& vm = exec->vm();
392     NativeCallFrameTracer tracer(&vm, exec);
393
394     ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
395     ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
396     base->setStructureAndReallocateStorageIfNecessary(vm, structure);
397     base->putDirect(vm, offset, JSValue::decode(value));
398 }
399
400 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
401 {
402     return value.isString() || value.isSymbol();
403 }
404
405 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
406 {
407     VM& vm = callFrame->vm();
408     if (LIKELY(subscript.isUInt32())) {
409         byValInfo->tookSlowPath = true;
410         uint32_t i = subscript.asUInt32();
411         if (baseValue.isObject()) {
412             JSObject* object = asObject(baseValue);
413             if (object->canSetIndexQuickly(i))
414                 object->setIndexQuickly(callFrame->vm(), i, value);
415             else {
416                 byValInfo->arrayProfile->setOutOfBounds();
417                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
418             }
419         } else
420             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
421         return;
422     }
423
424     auto property = subscript.toPropertyKey(callFrame);
425     // Don't put to an object if toString threw an exception.
426     if (callFrame->vm().exception())
427         return;
428
429     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
430         byValInfo->tookSlowPath = true;
431
432     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
433     baseValue.put(callFrame, property, value, slot);
434 }
435
436 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
437 {
438     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
439     if (LIKELY(subscript.isUInt32())) {
440         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
441         byValInfo->tookSlowPath = true;
442         uint32_t index = subscript.asUInt32();
443         ASSERT(isIndex(index));
444         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
445             baseObject->setIndexQuickly(callFrame->vm(), index, value);
446             return;
447         }
448
449         byValInfo->arrayProfile->setOutOfBounds();
450         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
451         return;
452     }
453
454     if (subscript.isDouble()) {
455         double subscriptAsDouble = subscript.asDouble();
456         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
457         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
458             byValInfo->tookSlowPath = true;
459             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
460             return;
461         }
462     }
463
464     // Don't put to an object if toString threw an exception.
465     auto property = subscript.toPropertyKey(callFrame);
466     if (callFrame->vm().exception())
467         return;
468
469     if (Optional<uint32_t> index = parseIndex(property)) {
470         byValInfo->tookSlowPath = true;
471         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
472         return;
473     }
474
475     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
476         byValInfo->tookSlowPath = true;
477
478     PutPropertySlot slot(baseObject, isStrictMode);
479     baseObject->putDirect(callFrame->vm(), property, value, slot);
480 }
481
482 enum class OptimizationResult {
483     NotOptimized,
484     SeenOnce,
485     Optimized,
486     GiveUp,
487 };
488
489 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
490 {
491     // See if it's worth optimizing at all.
492     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
493
494     VM& vm = exec->vm();
495
496     if (baseValue.isObject() && subscript.isInt32()) {
497         JSObject* object = asObject(baseValue);
498
499         ASSERT(exec->bytecodeOffset());
500         ASSERT(!byValInfo->stubRoutine);
501
502         Structure* structure = object->structure(vm);
503         if (hasOptimizableIndexing(structure)) {
504             // Attempt to optimize.
505             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
506             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
507                 CodeBlock* codeBlock = exec->codeBlock();
508                 ConcurrentJITLocker locker(codeBlock->m_lock);
509                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
510
511                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
512                 optimizationResult = OptimizationResult::Optimized;
513             }
514         }
515
516         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
517         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
518             optimizationResult = OptimizationResult::GiveUp;
519     }
520
521     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
522         const Identifier propertyName = subscript.toPropertyKey(exec);
523         if (!subscript.isString() || !parseIndex(propertyName)) {
524             ASSERT(exec->bytecodeOffset());
525             ASSERT(!byValInfo->stubRoutine);
526             if (byValInfo->seen) {
527                 if (byValInfo->cachedId == propertyName) {
528                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
529                     optimizationResult = OptimizationResult::Optimized;
530                 } else {
531                     // Seem like a generic property access site.
532                     optimizationResult = OptimizationResult::GiveUp;
533                 }
534             } else {
535                 byValInfo->seen = true;
536                 byValInfo->cachedId = propertyName;
537                 optimizationResult = OptimizationResult::SeenOnce;
538             }
539         }
540     }
541
542     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
543         // If we take slow path more than 10 times without patching then make sure we
544         // never make that mistake again. For cases where we see non-index-intercepting
545         // objects, this gives 10 iterations worth of opportunity for us to observe
546         // that the put_by_val may be polymorphic. We count up slowPathCount even if
547         // the result is GiveUp.
548         if (++byValInfo->slowPathCount >= 10)
549             optimizationResult = OptimizationResult::GiveUp;
550     }
551
552     return optimizationResult;
553 }
554
555 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
556 {
557     VM& vm = exec->vm();
558     NativeCallFrameTracer tracer(&vm, exec);
559
560     JSValue baseValue = JSValue::decode(encodedBaseValue);
561     JSValue subscript = JSValue::decode(encodedSubscript);
562     JSValue value = JSValue::decode(encodedValue);
563     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
564         // Don't ever try to optimize.
565         byValInfo->tookSlowPath = true;
566         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
567     }
568     putByVal(exec, baseValue, subscript, value, byValInfo);
569 }
570
571 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
572 {
573     // See if it's worth optimizing at all.
574     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
575
576     VM& vm = exec->vm();
577
578     if (subscript.isInt32()) {
579         ASSERT(exec->bytecodeOffset());
580         ASSERT(!byValInfo->stubRoutine);
581
582         Structure* structure = object->structure(vm);
583         if (hasOptimizableIndexing(structure)) {
584             // Attempt to optimize.
585             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
586             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
587                 CodeBlock* codeBlock = exec->codeBlock();
588                 ConcurrentJITLocker locker(codeBlock->m_lock);
589                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
590
591                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
592                 optimizationResult = OptimizationResult::Optimized;
593             }
594         }
595
596         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
597         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
598             optimizationResult = OptimizationResult::GiveUp;
599     } else if (isStringOrSymbol(subscript)) {
600         const Identifier propertyName = subscript.toPropertyKey(exec);
601         Optional<uint32_t> index = parseIndex(propertyName);
602
603         if (!subscript.isString() || !index) {
604             ASSERT(exec->bytecodeOffset());
605             ASSERT(!byValInfo->stubRoutine);
606             if (byValInfo->seen) {
607                 if (byValInfo->cachedId == propertyName) {
608                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
609                     optimizationResult = OptimizationResult::Optimized;
610                 } else {
611                     // Seem like a generic property access site.
612                     optimizationResult = OptimizationResult::GiveUp;
613                 }
614             } else {
615                 byValInfo->seen = true;
616                 byValInfo->cachedId = propertyName;
617                 optimizationResult = OptimizationResult::SeenOnce;
618             }
619         }
620     }
621
622     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
623         // If we take slow path more than 10 times without patching then make sure we
624         // never make that mistake again. For cases where we see non-index-intercepting
625         // objects, this gives 10 iterations worth of opportunity for us to observe
626         // that the get_by_val may be polymorphic. We count up slowPathCount even if
627         // the result is GiveUp.
628         if (++byValInfo->slowPathCount >= 10)
629             optimizationResult = OptimizationResult::GiveUp;
630     }
631
632     return optimizationResult;
633 }
634
635 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
636 {
637     VM& vm = exec->vm();
638     NativeCallFrameTracer tracer(&vm, exec);
639
640     JSValue baseValue = JSValue::decode(encodedBaseValue);
641     JSValue subscript = JSValue::decode(encodedSubscript);
642     JSValue value = JSValue::decode(encodedValue);
643     RELEASE_ASSERT(baseValue.isObject());
644     JSObject* object = asObject(baseValue);
645     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
646         // Don't ever try to optimize.
647         byValInfo->tookSlowPath = true;
648         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
649     }
650
651     directPutByVal(exec, object, subscript, value, byValInfo);
652 }
653
654 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
655 {
656     VM& vm = exec->vm();
657     NativeCallFrameTracer tracer(&vm, exec);
658     
659     JSValue baseValue = JSValue::decode(encodedBaseValue);
660     JSValue subscript = JSValue::decode(encodedSubscript);
661     JSValue value = JSValue::decode(encodedValue);
662
663     putByVal(exec, baseValue, subscript, value, byValInfo);
664 }
665
666
667 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
668 {
669     VM& vm = exec->vm();
670     NativeCallFrameTracer tracer(&vm, exec);
671     
672     JSValue baseValue = JSValue::decode(encodedBaseValue);
673     JSValue subscript = JSValue::decode(encodedSubscript);
674     JSValue value = JSValue::decode(encodedValue);
675     RELEASE_ASSERT(baseValue.isObject());
676     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
677 }
678
679 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
680 {
681     UNUSED_PARAM(exec);
682
683     execCallee->setCodeBlock(0);
684
685     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
686         return JSValue::encode(JSValue());
687
688     VM* vm = &execCallee->vm();
689     JSValue result = eval(execCallee);
690     if (vm->exception())
691         return EncodedJSValue();
692     
693     return JSValue::encode(result);
694 }
695
696 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
697 {
698     ExecState* exec = execCallee->callerFrame();
699     VM* vm = &exec->vm();
700
701     execCallee->setCodeBlock(0);
702
703     if (callLinkInfo->specializationKind() == CodeForCall) {
704         CallData callData;
705         CallType callType = getCallData(callee, callData);
706     
707         ASSERT(callType != CallTypeJS);
708     
709         if (callType == CallTypeHost) {
710             NativeCallFrameTracer tracer(vm, execCallee);
711             execCallee->setCallee(asObject(callee));
712             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
713             if (vm->exception()) {
714                 return encodeResult(
715                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
716                     reinterpret_cast<void*>(KeepTheFrame));
717             }
718
719             return encodeResult(
720                 bitwise_cast<void*>(getHostCallReturnValue),
721                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
722         }
723     
724         ASSERT(callType == CallTypeNone);
725         exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
726         return encodeResult(
727             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
728             reinterpret_cast<void*>(KeepTheFrame));
729     }
730
731     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
732     
733     ConstructData constructData;
734     ConstructType constructType = getConstructData(callee, constructData);
735     
736     ASSERT(constructType != ConstructTypeJS);
737     
738     if (constructType == ConstructTypeHost) {
739         NativeCallFrameTracer tracer(vm, execCallee);
740         execCallee->setCallee(asObject(callee));
741         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
742         if (vm->exception()) {
743             return encodeResult(
744                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
745                 reinterpret_cast<void*>(KeepTheFrame));
746         }
747
748         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
749     }
750     
751     ASSERT(constructType == ConstructTypeNone);
752     exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
753     return encodeResult(
754         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
755         reinterpret_cast<void*>(KeepTheFrame));
756 }
757
758 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
759 {
760     ExecState* exec = execCallee->callerFrame();
761     VM* vm = &exec->vm();
762     CodeSpecializationKind kind = callLinkInfo->specializationKind();
763     NativeCallFrameTracer tracer(vm, exec);
764     
765     JSValue calleeAsValue = execCallee->calleeAsValue();
766     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
767     if (!calleeAsFunctionCell) {
768         // FIXME: We should cache these kinds of calls. They can be common and currently they are
769         // expensive.
770         // https://bugs.webkit.org/show_bug.cgi?id=144458
771         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
772     }
773
774     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
775     JSScope* scope = callee->scopeUnchecked();
776     ExecutableBase* executable = callee->executable();
777
778     MacroAssemblerCodePtr codePtr;
779     CodeBlock* codeBlock = 0;
780     if (executable->isHostFunction()) {
781         codePtr = executable->entrypointFor(kind, MustCheckArity);
782 #if ENABLE(WEBASSEMBLY)
783     } else if (executable->isWebAssemblyExecutable()) {
784         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
785         webAssemblyExecutable->prepareForExecution(execCallee);
786         codeBlock = webAssemblyExecutable->codeBlockForCall();
787         ASSERT(codeBlock);
788         ArityCheckMode arity;
789         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
790             arity = MustCheckArity;
791         else
792             arity = ArityCheckNotRequired;
793         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
794 #endif
795     } else {
796         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
797
798         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
799             exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
800             return encodeResult(
801                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
802                 reinterpret_cast<void*>(KeepTheFrame));
803         }
804
805         JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, scope, kind);
806         if (error) {
807             exec->vm().throwException(exec, error);
808             return encodeResult(
809                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
810                 reinterpret_cast<void*>(KeepTheFrame));
811         }
812         codeBlock = functionExecutable->codeBlockFor(kind);
813         ArityCheckMode arity;
814         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
815             arity = MustCheckArity;
816         else
817             arity = ArityCheckNotRequired;
818         codePtr = functionExecutable->entrypointFor(kind, arity);
819     }
820     if (!callLinkInfo->seenOnce())
821         callLinkInfo->setSeen();
822     else
823         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
824     
825     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
826 }
827
828 inline SlowPathReturnType virtualForWithFunction(
829     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
830 {
831     ExecState* exec = execCallee->callerFrame();
832     VM* vm = &exec->vm();
833     CodeSpecializationKind kind = callLinkInfo->specializationKind();
834     NativeCallFrameTracer tracer(vm, exec);
835
836     JSValue calleeAsValue = execCallee->calleeAsValue();
837     calleeAsFunctionCell = getJSFunction(calleeAsValue);
838     if (UNLIKELY(!calleeAsFunctionCell))
839         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
840     
841     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
842     JSScope* scope = function->scopeUnchecked();
843     ExecutableBase* executable = function->executable();
844     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
845         bool isWebAssemblyExecutable = false;
846 #if ENABLE(WEBASSEMBLY)
847         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
848 #endif
849         if (!isWebAssemblyExecutable) {
850             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
851
852             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
853                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
854                 return encodeResult(
855                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
856                     reinterpret_cast<void*>(KeepTheFrame));
857             }
858
859             JSObject* error = functionExecutable->prepareForExecution(execCallee, function, scope, kind);
860             if (error) {
861                 exec->vm().throwException(exec, error);
862                 return encodeResult(
863                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
864                     reinterpret_cast<void*>(KeepTheFrame));
865             }
866         } else {
867 #if ENABLE(WEBASSEMBLY)
868             if (!isCall(kind)) {
869                 exec->vm().throwException(exec, createNotAConstructorError(exec, function));
870                 return encodeResult(
871                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
872                     reinterpret_cast<void*>(KeepTheFrame));
873             }
874
875             WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
876             webAssemblyExecutable->prepareForExecution(execCallee);
877 #endif
878         }
879     }
880     return encodeResult(executable->entrypointFor(
881         kind, MustCheckArity).executableAddress(),
882         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
883 }
884
885 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
886 {
887     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
888     JSCell* calleeAsFunctionCell;
889     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
890
891     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
892     
893     return result;
894 }
895
896 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
897 {
898     JSCell* calleeAsFunctionCellIgnored;
899     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
900 }
901
902 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
903 {
904     VM* vm = &exec->vm();
905     NativeCallFrameTracer tracer(vm, exec);
906     
907     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
908 }
909
910 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
911 {
912     VM* vm = &exec->vm();
913     NativeCallFrameTracer tracer(vm, exec);
914
915     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
916 }
917
918 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
919 {
920     VM* vm = &exec->vm();
921     NativeCallFrameTracer tracer(vm, exec);
922
923     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
924 }
925
926 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
927 {
928     VM* vm = &exec->vm();
929     NativeCallFrameTracer tracer(vm, exec);
930
931     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
932 }
933
934 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
935 {
936     VM* vm = &exec->vm();
937     NativeCallFrameTracer tracer(vm, exec);
938     
939     return JSValue::decode(encodedOp).toBoolean(exec);
940 }
941
942 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
943 {
944     VM* vm = &exec->vm();
945     NativeCallFrameTracer tracer(vm, exec);
946
947     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
948 }
949
950 #if USE(JSVALUE64)
951 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
952 #else
953 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
954 #endif
955 {
956     VM* vm = &exec->vm();
957     NativeCallFrameTracer tracer(vm, exec);
958
959     bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
960 #if USE(JSVALUE64)
961     return JSValue::encode(jsBoolean(result));
962 #else
963     return result;
964 #endif
965 }
966
967 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
968 {
969     int result = base->hasProperty(exec, property->toIdentifier(exec));
970     return result;
971 }
972     
973
974 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
975 {
976     VM* vm = &exec->vm();
977     NativeCallFrameTracer tracer(vm, exec);
978     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
979 }
980
981 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
982 {
983     VM* vm = &exec->vm();
984     NativeCallFrameTracer tracer(vm, exec);
985     return JSValue::encode(constructArray(exec, profile, values, size));
986 }
987
988 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
989 {
990     VM* vm = &exec->vm();
991     NativeCallFrameTracer tracer(vm, exec);
992     JSValue sizeValue = JSValue::decode(size);
993     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
994 }
995
996 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
997 {
998     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
999     VM& vm = exec->vm();
1000     NativeCallFrameTracer tracer(&vm, exec);
1001     return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1002 }
1003
1004 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1005 {
1006     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1007     VM& vm = exec->vm();
1008     NativeCallFrameTracer tracer(&vm, exec);
1009     return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1010 }
1011
1012 EncodedJSValue static operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue, bool isInvalidated)
1013 {
1014     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1015     FunctionExecutable* executable = static_cast<FunctionExecutable*>(functionExecutable);
1016     VM& vm = exec->vm();
1017     NativeCallFrameTracer tracer(&vm, exec);
1018         
1019     JSArrowFunction* arrowFunction  = isInvalidated
1020         ? JSArrowFunction::createWithInvalidatedReallocationWatchpoint(vm, executable, scope, JSValue::decode(thisValue))
1021         : JSArrowFunction::create(vm, executable, scope, JSValue::decode(thisValue));
1022     
1023     return JSValue::encode(arrowFunction);
1024 }
1025     
1026 EncodedJSValue JIT_OPERATION operationNewArrowFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1027 {
1028     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, true);
1029 }
1030     
1031 EncodedJSValue JIT_OPERATION operationNewArrowFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable, EncodedJSValue thisValue)
1032 {
1033     return operationNewFunctionCommon(exec, scope, functionExecutable, thisValue, false);
1034 }
1035
1036 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1037 {
1038     VM* vm = &exec->vm();
1039     NativeCallFrameTracer tracer(vm, exec);
1040     
1041     return constructEmptyObject(exec, structure);
1042 }
1043
1044 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1045 {
1046     VM& vm = exec->vm();
1047     NativeCallFrameTracer tracer(&vm, exec);
1048     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1049     if (!regexp->isValid()) {
1050         vm.throwException(exec, createSyntaxError(exec, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1051         return JSValue::encode(jsUndefined());
1052     }
1053
1054     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1055 }
1056
1057 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1058 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1059 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1060 // future, we can switch to using that then.
1061 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1062 {
1063     VM& vm = exec->vm();
1064     NativeCallFrameTracer tracer(&vm, exec);
1065
1066     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1067         vm.throwException(exec, createTerminatedExecutionException(&vm));
1068
1069     return nullptr;
1070 }
1071
1072 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
1073 {
1074     VM& vm = exec->vm();
1075     NativeCallFrameTracer tracer(&vm, exec);
1076     JSValue errorMessageValue = JSValue::decode(encodedValue);
1077     RELEASE_ASSERT(errorMessageValue.isString());
1078     String errorMessage = asString(errorMessageValue)->value(exec);
1079     if (referenceErrorFlag)
1080         vm.throwException(exec, createReferenceError(exec, errorMessage));
1081     else
1082         vm.throwException(exec, createTypeError(exec, errorMessage));
1083 }
1084
1085 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
1086 {
1087     VM& vm = exec->vm();
1088     NativeCallFrameTracer tracer(&vm, exec);
1089
1090     vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1091 }
1092
1093 #if ENABLE(DFG_JIT)
1094 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1095 {
1096     codeBlock->updateAllPredictions();
1097     codeBlock->optimizeAfterWarmUp();
1098 }
1099
1100 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1101 {
1102     VM& vm = exec->vm();
1103     NativeCallFrameTracer tracer(&vm, exec);
1104
1105     // Defer GC for a while so that it doesn't run between when we enter into this
1106     // slow path and when we figure out the state of our code block. This prevents
1107     // a number of awkward reentrancy scenarios, including:
1108     //
1109     // - The optimized version of our code block being jettisoned by GC right after
1110     //   we concluded that we wanted to use it, but have not planted it into the JS
1111     //   stack yet.
1112     //
1113     // - An optimized version of our code block being installed just as we decided
1114     //   that it wasn't ready yet.
1115     //
1116     // Note that jettisoning won't happen if we already initiated OSR, because in
1117     // that case we would have already planted the optimized code block into the JS
1118     // stack.
1119     DeferGCForAWhile deferGC(vm.heap);
1120     
1121     CodeBlock* codeBlock = exec->codeBlock();
1122     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1123         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1124         RELEASE_ASSERT_NOT_REACHED();
1125     }
1126     
1127     if (bytecodeIndex) {
1128         // If we're attempting to OSR from a loop, assume that this should be
1129         // separately optimized.
1130         codeBlock->m_shouldAlwaysBeInlined = false;
1131     }
1132
1133     if (Options::verboseOSR()) {
1134         dataLog(
1135             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1136             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1137             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1138             ", exitCounter = ");
1139         if (codeBlock->hasOptimizedReplacement())
1140             dataLog(codeBlock->replacement()->osrExitCounter());
1141         else
1142             dataLog("N/A");
1143         dataLog("\n");
1144     }
1145
1146     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1147         codeBlock->updateAllPredictions();
1148         if (Options::verboseOSR())
1149             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1150         return encodeResult(0, 0);
1151     }
1152     
1153     if (vm.enabledProfiler()) {
1154         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1155         return encodeResult(0, 0);
1156     }
1157
1158     Debugger* debugger = codeBlock->globalObject()->debugger();
1159     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1160         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1161         return encodeResult(0, 0);
1162     }
1163
1164     if (codeBlock->m_shouldAlwaysBeInlined) {
1165         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1166         if (Options::verboseOSR())
1167             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1168         return encodeResult(0, 0);
1169     }
1170
1171     // We cannot be in the process of asynchronous compilation and also have an optimized
1172     // replacement.
1173     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1174     ASSERT(
1175         !worklist
1176         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1177         && codeBlock->hasOptimizedReplacement()));
1178
1179     DFG::Worklist::State worklistState;
1180     if (worklist) {
1181         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1182         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1183         // what the result was so that we don't plow ahead and attempt OSR or immediate
1184         // reoptimization. This will have already also set the appropriate JIT execution
1185         // count threshold depending on what happened, so if the compilation was anything
1186         // but successful we just want to return early. See the case for worklistState ==
1187         // DFG::Worklist::Compiled, below.
1188         
1189         // Note that we could have alternatively just called Worklist::compilationState()
1190         // here, and if it returned Compiled, we could have then called
1191         // completeAndScheduleOSR() below. But that would have meant that it could take
1192         // longer for code blocks to be completed: they would only complete when *their*
1193         // execution count trigger fired; but that could take a while since the firing is
1194         // racy. It could also mean that code blocks that never run again after being
1195         // compiled would sit on the worklist until next GC. That's fine, but it's
1196         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1197         // possible in order to minimize the chances of us executing baseline code after
1198         // optimized code is already available.
1199         worklistState = worklist->completeAllReadyPlansForVM(
1200             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1201     } else
1202         worklistState = DFG::Worklist::NotKnown;
1203
1204     if (worklistState == DFG::Worklist::Compiling) {
1205         // We cannot be in the process of asynchronous compilation and also have an optimized
1206         // replacement.
1207         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1208         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1209         return encodeResult(0, 0);
1210     }
1211
1212     if (worklistState == DFG::Worklist::Compiled) {
1213         // If we don't have an optimized replacement but we did just get compiled, then
1214         // the compilation failed or was invalidated, in which case the execution count
1215         // thresholds have already been set appropriately by
1216         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1217         // nothing left to do.
1218         if (!codeBlock->hasOptimizedReplacement()) {
1219             codeBlock->updateAllPredictions();
1220             if (Options::verboseOSR())
1221                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1222             return encodeResult(0, 0);
1223         }
1224     } else if (codeBlock->hasOptimizedReplacement()) {
1225         if (Options::verboseOSR())
1226             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1227         // If we have an optimized replacement, then it must be the case that we entered
1228         // cti_optimize from a loop. That's because if there's an optimized replacement,
1229         // then all calls to this function will be relinked to the replacement and so
1230         // the prologue OSR will never fire.
1231         
1232         // This is an interesting threshold check. Consider that a function OSR exits
1233         // in the middle of a loop, while having a relatively low exit count. The exit
1234         // will reset the execution counter to some target threshold, meaning that this
1235         // code won't be reached until that loop heats up for >=1000 executions. But then
1236         // we do a second check here, to see if we should either reoptimize, or just
1237         // attempt OSR entry. Hence it might even be correct for
1238         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1239         // additional checking anyway, to reduce the amount of recompilation thrashing.
1240         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1241             if (Options::verboseOSR()) {
1242                 dataLog(
1243                     "Triggering reoptimization of ", *codeBlock,
1244                     "(", *codeBlock->replacement(), ") (in loop).\n");
1245             }
1246             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1247             return encodeResult(0, 0);
1248         }
1249     } else {
1250         if (!codeBlock->shouldOptimizeNow()) {
1251             if (Options::verboseOSR()) {
1252                 dataLog(
1253                     "Delaying optimization for ", *codeBlock,
1254                     " because of insufficient profiling.\n");
1255             }
1256             return encodeResult(0, 0);
1257         }
1258
1259         if (Options::verboseOSR())
1260             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1261
1262         unsigned numVarsWithValues;
1263         if (bytecodeIndex)
1264             numVarsWithValues = codeBlock->m_numVars;
1265         else
1266             numVarsWithValues = 0;
1267         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1268         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1269         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1270             int operand = mustHandleValues.operandForIndex(i);
1271             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1272                 continue;
1273             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1274         }
1275
1276         RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1277         CompilationResult result = DFG::compile(
1278             vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1279             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1280         
1281         if (result != CompilationSuccessful) {
1282             ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1283             return encodeResult(0, 0);
1284         }
1285     }
1286     
1287     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1288     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1289     
1290     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1291         if (Options::verboseOSR()) {
1292             dataLog(
1293                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1294         }
1295
1296         codeBlock->optimizeSoon();
1297         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1298     }
1299
1300     if (Options::verboseOSR()) {
1301         dataLog(
1302             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1303             " succeeded, OSR failed, after a delay of ",
1304             codeBlock->optimizationDelayCounter(), ".\n");
1305     }
1306
1307     // Count the OSR failure as a speculation failure. If this happens a lot, then
1308     // reoptimize.
1309     optimizedCodeBlock->countOSRExit();
1310
1311     // We are a lot more conservative about triggering reoptimization after OSR failure than
1312     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1313     // already, then we really would like to reoptimize immediately. But this case covers
1314     // something else: there weren't many (or any) speculation failures before, but we just
1315     // failed to enter the speculative code because some variable had the wrong value or
1316     // because the OSR code decided for any spurious reason that it did not want to OSR
1317     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1318     // reoptimization trigger.
1319     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1320         if (Options::verboseOSR()) {
1321             dataLog(
1322                 "Triggering reoptimization of ", *codeBlock, " -> ",
1323                 *codeBlock->replacement(), " (after OSR fail).\n");
1324         }
1325         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1326         return encodeResult(0, 0);
1327     }
1328
1329     // OSR failed this time, but it might succeed next time! Let the code run a bit
1330     // longer and then try again.
1331     codeBlock->optimizeAfterWarmUp();
1332     
1333     return encodeResult(0, 0);
1334 }
1335 #endif
1336
1337 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1338 {
1339     VM& vm = exec->vm();
1340     NativeCallFrameTracer tracer(&vm, exec);
1341
1342     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1343     ASSERT(isJSArray(arrayValue));
1344     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1345 }
1346
1347 enum class AccessorType {
1348     Getter,
1349     Setter
1350 };
1351
1352 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1353 {
1354     auto propertyKey = subscript.toPropertyKey(exec);
1355     if (exec->hadException())
1356         return;
1357
1358     if (accessorType == AccessorType::Getter)
1359         base->putGetter(exec, propertyKey, accessor, attribute);
1360     else
1361         base->putSetter(exec, propertyKey, accessor, attribute);
1362 }
1363
1364 #if USE(JSVALUE64)
1365 void JIT_OPERATION operationPutGetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedGetterValue)
1366 {
1367     VM& vm = exec->vm();
1368     NativeCallFrameTracer tracer(&vm, exec);
1369
1370     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1371     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1372
1373     JSValue getter = JSValue::decode(encodedGetterValue);
1374     ASSERT(getter.isObject());
1375     baseObj->putGetter(exec, *identifier, asObject(getter), options);
1376 }
1377
1378 void JIT_OPERATION operationPutSetterById(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t options, EncodedJSValue encodedSetterValue)
1379 {
1380     VM& vm = exec->vm();
1381     NativeCallFrameTracer tracer(&vm, exec);
1382
1383     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1384     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1385
1386     JSValue setter = JSValue::decode(encodedSetterValue);
1387     ASSERT(setter.isObject());
1388     baseObj->putSetter(exec, *identifier, asObject(setter), options);
1389 }
1390
1391 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, int32_t attribute,
1392     EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1393 {
1394     VM& vm = exec->vm();
1395     NativeCallFrameTracer tracer(&vm, exec);
1396
1397     ASSERT(JSValue::decode(encodedObjectValue).isObject());
1398     JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1399
1400     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1401
1402     JSValue getter = JSValue::decode(encodedGetterValue);
1403     JSValue setter = JSValue::decode(encodedSetterValue);
1404     ASSERT(getter.isObject() || getter.isUndefined());
1405     ASSERT(setter.isObject() || setter.isUndefined());
1406     ASSERT(getter.isObject() || setter.isObject());
1407
1408     if (!getter.isUndefined())
1409         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1410     if (!setter.isUndefined())
1411         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1412     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1413 }
1414
1415 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedGetter)
1416 {
1417     VM& vm = exec->vm();
1418     NativeCallFrameTracer tracer(&vm, exec);
1419     JSObject* base = asObject(JSValue::decode(encodedBase));
1420     JSValue subscript = JSValue::decode(encodedSubscript);
1421     JSObject* getter = asObject(JSValue::decode(encodedGetter));
1422     putAccessorByVal(exec, base, subscript, attribute, getter, AccessorType::Getter);
1423 }
1424
1425 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, int32_t attribute, EncodedJSValue encodedSetter)
1426 {
1427     VM& vm = exec->vm();
1428     NativeCallFrameTracer tracer(&vm, exec);
1429     JSObject* base = asObject(JSValue::decode(encodedBase));
1430     JSValue subscript = JSValue::decode(encodedSubscript);
1431     JSObject* setter = asObject(JSValue::decode(encodedSetter));
1432     putAccessorByVal(exec, base, subscript, attribute, setter, AccessorType::Setter);
1433 }
1434
1435 #else
1436 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* getter)
1437 {
1438     VM& vm = exec->vm();
1439     NativeCallFrameTracer tracer(&vm, exec);
1440
1441     ASSERT(object && object->isObject());
1442     JSObject* baseObj = object->getObject();
1443
1444     ASSERT(getter->isObject());
1445     baseObj->putGetter(exec, *identifier, getter, options);
1446 }
1447
1448 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, Identifier* identifier, int32_t options, JSCell* setter)
1449 {
1450     VM& vm = exec->vm();
1451     NativeCallFrameTracer tracer(&vm, exec);
1452
1453     ASSERT(object && object->isObject());
1454     JSObject* baseObj = object->getObject();
1455
1456     ASSERT(setter->isObject());
1457     baseObj->putSetter(exec, *identifier, setter, options);
1458 }
1459
1460 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, int32_t attribute, JSCell* getter, JSCell* setter)
1461 {
1462     VM& vm = exec->vm();
1463     NativeCallFrameTracer tracer(&vm, exec);
1464
1465     ASSERT(object && object->isObject());
1466     JSObject* baseObj = object->getObject();
1467
1468     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1469
1470     ASSERT(!getter || getter->isObject());
1471     ASSERT(!setter || setter->isObject());
1472     ASSERT(getter || setter);
1473
1474     if (getter)
1475         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1476     if (setter)
1477         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1478     baseObj->putDirectAccessor(exec, *identifier, accessor, attribute);
1479 }
1480
1481 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1482 {
1483     VM& vm = exec->vm();
1484     NativeCallFrameTracer tracer(&vm, exec);
1485
1486     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1487 }
1488
1489 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1490 {
1491     VM& vm = exec->vm();
1492     NativeCallFrameTracer tracer(&vm, exec);
1493
1494     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1495 }
1496
1497 #endif
1498
1499 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1500 {
1501     VM& vm = exec->vm();
1502     NativeCallFrameTracer tracer(&vm, exec);
1503
1504     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1505     exec->uncheckedR(scopeReg) = scope->next();
1506 }
1507
1508 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1509 {
1510     VM& vm = exec->vm();
1511     NativeCallFrameTracer tracer(&vm, exec);
1512
1513     if (LegacyProfiler* profiler = vm.enabledProfiler())
1514         profiler->didExecute(exec, JSValue::decode(encodedValue));
1515 }
1516
1517 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1518 {
1519     VM& vm = exec->vm();
1520     NativeCallFrameTracer tracer(&vm, exec);
1521
1522     if (LegacyProfiler* profiler = vm.enabledProfiler())
1523         profiler->willExecute(exec, JSValue::decode(encodedValue));
1524 }
1525
1526 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1527 {
1528     VM& vm = exec->vm();
1529     NativeCallFrameTracer tracer(&vm, exec);
1530
1531     JSValue value = JSValue::decode(encodedValue);
1532     JSValue baseVal = JSValue::decode(encodedBaseVal);
1533
1534     if (baseVal.isObject()) {
1535         JSObject* baseObject = asObject(baseVal);
1536         ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1537         if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1538             bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1539             return JSValue::encode(jsBoolean(result));
1540         }
1541     }
1542
1543     vm.throwException(exec, createInvalidInstanceofParameterError(exec, baseVal));
1544     return JSValue::encode(JSValue());
1545 }
1546
1547 }
1548
1549 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1550 {
1551     switch (object.structure()->typeInfo().type()) {
1552     case DirectArgumentsType: {
1553         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1554         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1555             return true;
1556         break;
1557     }
1558     case ScopedArgumentsType: {
1559         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1560         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1561             return true;
1562         break;
1563     }
1564     default:
1565         break;
1566     }
1567     return false;
1568 }
1569
1570 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1571 {
1572     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1573         VM& vm = exec->vm();
1574         Structure& structure = *baseValue.asCell()->structure(vm);
1575         if (JSCell::canUseFastGetOwnProperty(structure)) {
1576             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1577                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1578                     ASSERT(exec->bytecodeOffset());
1579                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1580                         byValInfo->tookSlowPath = true;
1581                     return result;
1582                 }
1583             }
1584         }
1585     }
1586
1587     if (subscript.isUInt32()) {
1588         ASSERT(exec->bytecodeOffset());
1589         byValInfo->tookSlowPath = true;
1590
1591         uint32_t i = subscript.asUInt32();
1592         if (isJSString(baseValue)) {
1593             if (asString(baseValue)->canGetIndex(i)) {
1594                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1595                 return asString(baseValue)->getIndex(exec, i);
1596             }
1597             byValInfo->arrayProfile->setOutOfBounds();
1598         } else if (baseValue.isObject()) {
1599             JSObject* object = asObject(baseValue);
1600             if (object->canGetIndexQuickly(i))
1601                 return object->getIndexQuickly(i);
1602
1603             if (!canAccessArgumentIndexQuickly(*object, i))
1604                 byValInfo->arrayProfile->setOutOfBounds();
1605         }
1606
1607         return baseValue.get(exec, i);
1608     }
1609
1610     baseValue.requireObjectCoercible(exec);
1611     if (exec->hadException())
1612         return jsUndefined();
1613     auto property = subscript.toPropertyKey(exec);
1614     if (exec->hadException())
1615         return jsUndefined();
1616
1617     ASSERT(exec->bytecodeOffset());
1618     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1619         byValInfo->tookSlowPath = true;
1620
1621     return baseValue.get(exec, property);
1622 }
1623
1624 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1625 {
1626     // See if it's worth optimizing this at all.
1627     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1628
1629     VM& vm = exec->vm();
1630
1631     if (baseValue.isObject() && subscript.isInt32()) {
1632         JSObject* object = asObject(baseValue);
1633
1634         ASSERT(exec->bytecodeOffset());
1635         ASSERT(!byValInfo->stubRoutine);
1636
1637         if (hasOptimizableIndexing(object->structure(vm))) {
1638             // Attempt to optimize.
1639             Structure* structure = object->structure(vm);
1640             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1641             if (arrayMode != byValInfo->arrayMode) {
1642                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1643                 // Let's update the profile to do better next time.
1644                 CodeBlock* codeBlock = exec->codeBlock();
1645                 ConcurrentJITLocker locker(codeBlock->m_lock);
1646                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1647
1648                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1649                 optimizationResult = OptimizationResult::Optimized;
1650             }
1651         }
1652
1653         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1654         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1655             optimizationResult = OptimizationResult::GiveUp;
1656     }
1657
1658     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1659         const Identifier propertyName = subscript.toPropertyKey(exec);
1660         if (!subscript.isString() || !parseIndex(propertyName)) {
1661             ASSERT(exec->bytecodeOffset());
1662             ASSERT(!byValInfo->stubRoutine);
1663             if (byValInfo->seen) {
1664                 if (byValInfo->cachedId == propertyName) {
1665                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1666                     optimizationResult = OptimizationResult::Optimized;
1667                 } else {
1668                     // Seem like a generic property access site.
1669                     optimizationResult = OptimizationResult::GiveUp;
1670                 }
1671             } else {
1672                 byValInfo->seen = true;
1673                 byValInfo->cachedId = propertyName;
1674                 optimizationResult = OptimizationResult::SeenOnce;
1675             }
1676
1677         }
1678     }
1679
1680     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1681         // If we take slow path more than 10 times without patching then make sure we
1682         // never make that mistake again. For cases where we see non-index-intercepting
1683         // objects, this gives 10 iterations worth of opportunity for us to observe
1684         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1685         // the result is GiveUp.
1686         if (++byValInfo->slowPathCount >= 10)
1687             optimizationResult = OptimizationResult::GiveUp;
1688     }
1689
1690     return optimizationResult;
1691 }
1692
1693 extern "C" {
1694
1695 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1696 {
1697     VM& vm = exec->vm();
1698     NativeCallFrameTracer tracer(&vm, exec);
1699     JSValue baseValue = JSValue::decode(encodedBase);
1700     JSValue subscript = JSValue::decode(encodedSubscript);
1701
1702     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1703     return JSValue::encode(result);
1704 }
1705
1706 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1707 {
1708     VM& vm = exec->vm();
1709     NativeCallFrameTracer tracer(&vm, exec);
1710
1711     JSValue baseValue = JSValue::decode(encodedBase);
1712     JSValue subscript = JSValue::decode(encodedSubscript);
1713     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1714     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1715         // Don't ever try to optimize.
1716         byValInfo->tookSlowPath = true;
1717         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1718     }
1719
1720     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1721 }
1722
1723 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1724 {
1725     VM& vm = exec->vm();
1726     NativeCallFrameTracer tracer(&vm, exec);
1727     JSValue baseValue = JSValue::decode(encodedBase);
1728     JSValue subscript = JSValue::decode(encodedSubscript);
1729     
1730     ASSERT(baseValue.isObject());
1731     ASSERT(subscript.isUInt32());
1732
1733     JSObject* object = asObject(baseValue);
1734     bool didOptimize = false;
1735
1736     ASSERT(exec->bytecodeOffset());
1737     ASSERT(!byValInfo->stubRoutine);
1738     
1739     if (hasOptimizableIndexing(object->structure(vm))) {
1740         // Attempt to optimize.
1741         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1742         if (arrayMode != byValInfo->arrayMode) {
1743             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1744             didOptimize = true;
1745         }
1746     }
1747     
1748     if (!didOptimize) {
1749         // If we take slow path more than 10 times without patching then make sure we
1750         // never make that mistake again. Or, if we failed to patch and we have some object
1751         // that intercepts indexed get, then don't even wait until 10 times. For cases
1752         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1753         // opportunity for us to observe that the get_by_val may be polymorphic.
1754         if (++byValInfo->slowPathCount >= 10
1755             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1756             // Don't ever try to optimize.
1757             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1758         }
1759     }
1760
1761     uint32_t index = subscript.asUInt32();
1762     if (object->canGetIndexQuickly(index))
1763         return JSValue::encode(JSValue(JSValue::JSTrue));
1764
1765     if (!canAccessArgumentIndexQuickly(*object, index))
1766         byValInfo->arrayProfile->setOutOfBounds();
1767     return JSValue::encode(jsBoolean(object->hasProperty(exec, index)));
1768 }
1769     
1770 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1771 {
1772     VM& vm = exec->vm();
1773     NativeCallFrameTracer tracer(&vm, exec);
1774     JSValue baseValue = JSValue::decode(encodedBase);
1775     JSValue subscript = JSValue::decode(encodedSubscript);
1776     
1777     ASSERT(baseValue.isObject());
1778     ASSERT(subscript.isUInt32());
1779
1780     JSObject* object = asObject(baseValue);
1781     uint32_t index = subscript.asUInt32();
1782     if (object->canGetIndexQuickly(index))
1783         return JSValue::encode(JSValue(JSValue::JSTrue));
1784
1785     if (!canAccessArgumentIndexQuickly(*object, index))
1786         byValInfo->arrayProfile->setOutOfBounds();
1787     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript.asUInt32())));
1788 }
1789     
1790 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1791 {
1792     VM& vm = exec->vm();
1793     NativeCallFrameTracer tracer(&vm, exec);
1794     JSValue baseValue = JSValue::decode(encodedBase);
1795     JSValue subscript = JSValue::decode(encodedSubscript);
1796     
1797     JSValue result;
1798     if (LIKELY(subscript.isUInt32())) {
1799         uint32_t i = subscript.asUInt32();
1800         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1801             result = asString(baseValue)->getIndex(exec, i);
1802         else {
1803             result = baseValue.get(exec, i);
1804             if (!isJSString(baseValue)) {
1805                 ASSERT(exec->bytecodeOffset());
1806                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1807             }
1808         }
1809     } else {
1810         baseValue.requireObjectCoercible(exec);
1811         if (exec->hadException())
1812             return JSValue::encode(jsUndefined());
1813         auto property = subscript.toPropertyKey(exec);
1814         if (exec->hadException())
1815             return JSValue::encode(jsUndefined());
1816         result = baseValue.get(exec, property);
1817     }
1818
1819     return JSValue::encode(result);
1820 }
1821
1822 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1823 {
1824     VM& vm = exec->vm();
1825     NativeCallFrameTracer tracer(&vm, exec);
1826
1827     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1828     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1829     JSValue result = jsBoolean(couldDelete);
1830     if (!couldDelete && exec->codeBlock()->isStrictMode())
1831         vm.throwException(exec, createTypeError(exec, ASCIILiteral("Unable to delete property.")));
1832     return JSValue::encode(result);
1833 }
1834
1835 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1836 {
1837     VM& vm = exec->vm();
1838     NativeCallFrameTracer tracer(&vm, exec);
1839     JSValue value = JSValue::decode(encodedValue);
1840     JSValue proto = JSValue::decode(encodedProto);
1841     
1842     ASSERT(!value.isObject() || !proto.isObject());
1843
1844     bool result = JSObject::defaultHasInstance(exec, value, proto);
1845     return JSValue::encode(jsBoolean(result));
1846 }
1847
1848 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1849 {
1850     VM& vm = exec->vm();
1851     NativeCallFrameTracer tracer(&vm, exec);
1852     JSStack* stack = &exec->interpreter()->stack();
1853     JSValue arguments = JSValue::decode(encodedArguments);
1854     return sizeFrameForVarargs(exec, stack, arguments, numUsedStackSlots, firstVarArgOffset);
1855 }
1856
1857 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
1858 {
1859     VM& vm = exec->vm();
1860     NativeCallFrameTracer tracer(&vm, exec);
1861     JSValue arguments = JSValue::decode(encodedArguments);
1862     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
1863     return newCallFrame;
1864 }
1865
1866 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1867 {
1868     VM& vm = exec->vm();
1869     NativeCallFrameTracer tracer(&vm, exec);
1870     return JSValue::encode(JSValue::decode(value).toObject(exec));
1871 }
1872
1873 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1874 {
1875     VM& vm = exec->vm();
1876     NativeCallFrameTracer tracer(&vm, exec);
1877     JSValue key = JSValue::decode(encodedKey);
1878     CodeBlock* codeBlock = exec->codeBlock();
1879
1880     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1881     void* result = jumpTable.ctiDefault.executableAddress();
1882
1883     if (key.isString()) {
1884         StringImpl* value = asString(key)->value(exec).impl();
1885         if (value->length() == 1)
1886             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1887     }
1888
1889     return reinterpret_cast<char*>(result);
1890 }
1891
1892 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1893 {
1894     VM& vm = exec->vm();
1895     NativeCallFrameTracer tracer(&vm, exec);
1896     JSValue key = JSValue::decode(encodedKey);
1897     CodeBlock* codeBlock = exec->codeBlock();
1898
1899     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1900     void* result;
1901     if (key.isInt32())
1902         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1903     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1904         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1905     else
1906         result = jumpTable.ctiDefault.executableAddress();
1907     return reinterpret_cast<char*>(result);
1908 }
1909
1910 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1911 {
1912     VM& vm = exec->vm();
1913     NativeCallFrameTracer tracer(&vm, exec);
1914     JSValue key = JSValue::decode(encodedKey);
1915     CodeBlock* codeBlock = exec->codeBlock();
1916
1917     void* result;
1918     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1919
1920     if (key.isString()) {
1921         StringImpl* value = asString(key)->value(exec).impl();
1922         result = jumpTable.ctiForValue(value).executableAddress();
1923     } else
1924         result = jumpTable.ctiDefault.executableAddress();
1925
1926     return reinterpret_cast<char*>(result);
1927 }
1928
1929 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1930 {
1931     VM& vm = exec->vm();
1932     NativeCallFrameTracer tracer(&vm, exec);
1933     CodeBlock* codeBlock = exec->codeBlock();
1934     Instruction* pc = bytecodePC;
1935
1936     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1937     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1938     GetPutInfo getPutInfo(pc[4].u.operand);
1939
1940     // ModuleVar is always converted to ClosureVar for get_from_scope.
1941     ASSERT(getPutInfo.resolveType() != ModuleVar);
1942
1943     PropertySlot slot(scope);
1944     if (!scope->getPropertySlot(exec, ident, slot)) {
1945         if (getPutInfo.resolveMode() == ThrowIfNotFound)
1946             vm.throwException(exec, createUndefinedVariableError(exec, ident));
1947         return JSValue::encode(jsUndefined());
1948     }
1949
1950     JSValue result = JSValue();
1951     if (jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)) {
1952         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1953         result = slot.getValue(exec, ident);
1954         if (result == jsTDZValue()) {
1955             exec->vm().throwException(exec, createTDZError(exec));
1956             return JSValue::encode(jsUndefined());
1957         }
1958     }
1959
1960     CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
1961
1962     if (!result)
1963         result = slot.getValue(exec, ident);
1964     return JSValue::encode(result);
1965 }
1966
1967 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1968 {
1969     VM& vm = exec->vm();
1970     NativeCallFrameTracer tracer(&vm, exec);
1971     Instruction* pc = bytecodePC;
1972
1973     CodeBlock* codeBlock = exec->codeBlock();
1974     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1975     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1976     JSValue value = exec->r(pc[3].u.operand).jsValue();
1977     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
1978
1979     // ModuleVar does not keep the scope register value alive in DFG.
1980     ASSERT(getPutInfo.resolveType() != ModuleVar);
1981
1982     if (getPutInfo.resolveType() == LocalClosureVar) {
1983         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1984         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
1985         if (WatchpointSet* set = pc[5].u.watchpointSet)
1986             set->touch("Executed op_put_scope<LocalClosureVar>");
1987         return;
1988     }
1989
1990     bool hasProperty = scope->hasProperty(exec, ident);
1991     if (hasProperty
1992         && jsDynamicCast<JSGlobalLexicalEnvironment*>(scope)
1993         && getPutInfo.initializationMode() != Initialization) {
1994         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1995         PropertySlot slot(scope);
1996         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1997         if (slot.getValue(exec, ident) == jsTDZValue()) {
1998             exec->vm().throwException(exec, createTDZError(exec));
1999             return;
2000         }
2001     }
2002
2003     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2004         exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
2005         return;
2006     }
2007
2008     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, getPutInfo.initializationMode() == Initialization);
2009     scope->methodTable()->put(scope, exec, ident, value, slot);
2010     
2011     if (exec->vm().exception())
2012         return;
2013
2014     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2015 }
2016
2017 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2018 {
2019     VM* vm = &exec->vm();
2020     NativeCallFrameTracer tracer(vm, exec);
2021
2022     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2023     vm->throwException(exec, exceptionValue);
2024
2025     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2026     genericUnwind(vm, exec);
2027 }
2028
2029 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2030 {
2031     VM* vm = &exec->vm();
2032     NativeCallFrameTracer tracer(vm, exec);
2033     vm->heap.flushWriteBarrierBuffer(cell);
2034 }
2035
2036 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2037 {
2038     VM* vm = &exec->vm();
2039     NativeCallFrameTracer tracer(vm, exec);
2040     vm->heap.writeBarrier(cell);
2041 }
2042
2043 // NB: We don't include the value as part of the barrier because the write barrier elision
2044 // phase in the DFG only tracks whether the object being stored to has been barriered. It 
2045 // would be much more complicated to try to model the value being stored as well.
2046 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
2047 {
2048     VM* vm = &exec->vm();
2049     NativeCallFrameTracer tracer(vm, exec);
2050     vm->heap.writeBarrier(cell);
2051 }
2052
2053 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
2054 {
2055     VM* vm = &exec->vm();
2056     NativeCallFrameTracer tracer(vm, exec);
2057
2058     JSValue value = exec->r(pc[2].u.operand).jsValue();
2059     pc[1].u.variablePointer->set(*vm, exec->codeBlock()->globalObject(), value);
2060 }
2061
2062 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2063 {
2064     NativeCallFrameTracer tracer(vm, exec);
2065     genericUnwind(vm, exec);
2066     ASSERT(vm->targetMachinePCForThrow);
2067 }
2068
2069 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2070 {
2071     NativeCallFrameTracer tracer(vm, exec);
2072     genericUnwind(vm, exec, UnwindFromCallerFrame);
2073     ASSERT(vm->targetMachinePCForThrow);
2074 }
2075
2076 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2077 {
2078     VM* vm = &exec->vm();
2079     NativeCallFrameTracer tracer(vm, exec);
2080     genericUnwind(vm, exec);
2081 }
2082
2083 // This function "should" just take the ExecState*, but doing so would make it more difficult
2084 // to call from exception check sites. So, unlike all of our other functions, we allow
2085 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2086 // particularly safe here since this is never called on the critical path - it's only for
2087 // testing.
2088 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2089 {
2090     VM* vm = &exec->vm();
2091     NativeCallFrameTracer tracer(vm, exec);
2092 #if COMPILER(GCC_OR_CLANG)
2093     void* returnPC = __builtin_return_address(0);
2094     doExceptionFuzzing(exec, "JITOperations", returnPC);
2095 #endif // COMPILER(GCC_OR_CLANG)
2096 }
2097
2098 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2099 {
2100     VM& vm = exec->vm();
2101     NativeCallFrameTracer tracer(&vm, exec);
2102     JSValue baseValue = JSValue::decode(encodedBaseValue);
2103     if (baseValue.isUndefinedOrNull())
2104         return JSValue::encode(jsBoolean(false));
2105
2106     JSObject* base = baseValue.toObject(exec);
2107     return JSValue::encode(jsBoolean(base->hasProperty(exec, asString(propertyName)->toIdentifier(exec))));
2108 }
2109
2110 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2111 {
2112     VM& vm = exec->vm();
2113     NativeCallFrameTracer tracer(&vm, exec);
2114     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2115     return JSValue::encode(jsBoolean(object->hasProperty(exec, subscript)));
2116 }
2117     
2118 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2119 {
2120     VM& vm = exec->vm();
2121     NativeCallFrameTracer tracer(&vm, exec);
2122
2123     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2124
2125     return propertyNameEnumerator(exec, base);
2126 }
2127
2128 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2129 {
2130     VM& vm = exec->vm();
2131     NativeCallFrameTracer tracer(&vm, exec);
2132     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2133     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2134     return JSValue::encode(propertyName ? propertyName : jsNull());
2135 }
2136
2137 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2138 {
2139     VM& vm = exec->vm();
2140     NativeCallFrameTracer tracer(&vm, exec);
2141     return jsString(exec, Identifier::from(exec, index).string());
2142 }
2143
2144 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2145 {
2146     exec->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2147 }
2148
2149 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2150 {
2151     VM& vm = exec->vm();
2152     NativeCallFrameTracer tracer(&vm, exec);
2153     RELEASE_ASSERT(!!vm.exception());
2154
2155     if (LegacyProfiler* profiler = vm.enabledProfiler())
2156         profiler->exceptionUnwind(exec);
2157
2158     if (isTerminatedExecutionException(vm.exception())) {
2159         genericUnwind(&vm, exec);
2160         return 1;
2161     } else
2162         return 0;
2163 }
2164
2165 } // extern "C"
2166
2167 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2168 // definition of getHostCallReturnValue() below because the Windows build
2169 // requires it.
2170 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2171 {
2172     if (!exec)
2173         return JSValue::encode(JSValue());
2174     return JSValue::encode(exec->vm().hostCallReturnValue);
2175 }
2176
2177 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2178 asm (
2179 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2180 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2181 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2182     "mov %rbp, %rdi\n"
2183     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2184 );
2185
2186 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2187 asm (
2188 ".text" "\n" \
2189 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2190 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2191 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2192     "push %ebp\n"
2193     "leal -4(%esp), %esp\n"
2194     "push %ebp\n"
2195     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2196     "leal 8(%esp), %esp\n"
2197     "pop %ebp\n"
2198     "ret\n"
2199 );
2200
2201 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2202 asm (
2203 ".text" "\n"
2204 ".align 2" "\n"
2205 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2206 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2207 ".thumb" "\n"
2208 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2209 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2210     "mov r0, r7" "\n"
2211     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2212 );
2213
2214 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2215 asm (
2216 ".text" "\n"
2217 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2218 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2219 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2220 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2221     "mov r0, r11" "\n"
2222     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2223 );
2224
2225 #elif CPU(ARM64)
2226 asm (
2227 ".text" "\n"
2228 ".align 2" "\n"
2229 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2230 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2231 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2232      "mov x0, x29" "\n"
2233      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2234 );
2235
2236 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2237
2238 #if WTF_MIPS_PIC
2239 #define LOAD_FUNCTION_TO_T9(function) \
2240         ".set noreorder" "\n" \
2241         ".cpload $25" "\n" \
2242         ".set reorder" "\n" \
2243         "la $t9, " LOCAL_REFERENCE(function) "\n"
2244 #else
2245 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2246 #endif
2247
2248 asm (
2249 ".text" "\n"
2250 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2251 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2252 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2253     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2254     "move $a0, $fp" "\n"
2255     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2256 );
2257
2258 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2259
2260 #define SH4_SCRATCH_REGISTER "r11"
2261
2262 asm (
2263 ".text" "\n"
2264 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2265 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2266 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2267     "mov r14, r4" "\n"
2268     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2269     "braf " SH4_SCRATCH_REGISTER "\n"
2270     "nop" "\n"
2271     "1: .balign 4" "\n"
2272     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2273 );
2274
2275 #elif COMPILER(MSVC) && CPU(X86)
2276 extern "C" {
2277     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2278     {
2279         __asm mov [esp + 4], ebp;
2280         __asm jmp getHostCallReturnValueWithExecState
2281     }
2282 }
2283 #endif
2284
2285 } // namespace JSC
2286
2287 #endif // ENABLE(JIT)