744e905374618447399b05bea225918f782e77a7
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "ExceptionFuzz.h"
44 #include "GetterSetter.h"
45 #include "HostCallReturnValue.h"
46 #include "ICStats.h"
47 #include "Interpreter.h"
48 #include "JIT.h"
49 #include "JITExceptions.h"
50 #include "JITToDFGDeferredCompilationCallback.h"
51 #include "JSAsyncFunction.h"
52 #include "JSCInlines.h"
53 #include "JSGeneratorFunction.h"
54 #include "JSGlobalObjectFunctions.h"
55 #include "JSLexicalEnvironment.h"
56 #include "JSPropertyNameEnumerator.h"
57 #include "ObjectConstructor.h"
58 #include "PolymorphicAccess.h"
59 #include "PropertyName.h"
60 #include "RegExpObject.h"
61 #include "Repatch.h"
62 #include "ScopedArguments.h"
63 #include "ShadowChicken.h"
64 #include "StructureStubInfo.h"
65 #include "SuperSampler.h"
66 #include "TestRunnerUtils.h"
67 #include "TypeProfilerLog.h"
68 #include "VMInlines.h"
69 #include <wtf/InlineASM.h>
70
71 namespace JSC {
72
73 extern "C" {
74
75 #if COMPILER(MSVC)
76 void * _ReturnAddress(void);
77 #pragma intrinsic(_ReturnAddress)
78
79 #define OUR_RETURN_ADDRESS _ReturnAddress()
80 #else
81 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
82 #endif
83
84 #if ENABLE(OPCODE_SAMPLING)
85 #define CTI_SAMPLER vm->interpreter->sampler()
86 #else
87 #define CTI_SAMPLER 0
88 #endif
89
90
91 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
92 {
93     // We pass in our own code block, because the callframe hasn't been populated.
94     VM* vm = codeBlock->vm();
95     auto scope = DECLARE_THROW_SCOPE(*vm);
96
97     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
98     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
99     if (!callerFrame) {
100         callerFrame = exec;
101         vmEntryFrame = vm->topVMEntryFrame;
102     }
103
104     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
105     throwStackOverflowError(callerFrame, scope);
106 }
107
108 #if ENABLE(WEBASSEMBLY)
109 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
110 {
111     VM* vm = &exec->vm();
112     auto scope = DECLARE_THROW_SCOPE(*vm);
113
114     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
115     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
116
117     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
118     ErrorHandlingScope errorScope(*vm);
119     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
120 }
121
122 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
123 {
124     VM* vm = &exec->vm();
125     auto scope = DECLARE_THROW_SCOPE(*vm);
126
127     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
128     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
129
130     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
131     ErrorHandlingScope errorScope(*vm);
132     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
133 }
134 #endif
135
136 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
137 {
138     VM* vm = &exec->vm();
139     auto scope = DECLARE_THROW_SCOPE(*vm);
140
141     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
142     if (missingArgCount < 0) {
143         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
144         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
145         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
146         throwStackOverflowError(callerFrame, scope);
147     }
148
149     return missingArgCount;
150 }
151
152 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
153 {
154     VM* vm = &exec->vm();
155     auto scope = DECLARE_THROW_SCOPE(*vm);
156
157     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
158     if (missingArgCount < 0) {
159         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
160         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
161         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
162         throwStackOverflowError(callerFrame, scope);
163     }
164
165     return missingArgCount;
166 }
167
168 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
169 {
170     VM* vm = &exec->vm();
171     NativeCallFrameTracer tracer(vm, exec);
172     Identifier ident = Identifier::fromUid(vm, uid);
173     stubInfo->tookSlowPath = true;
174
175     JSValue baseValue = JSValue::decode(base);
176     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
177     baseValue.getPropertySlot(exec, ident, slot);
178
179     return JSValue::encode(slot.getPureResult());
180 }
181
182
183 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
184 {
185     VM* vm = &exec->vm();
186     NativeCallFrameTracer tracer(vm, exec);
187     Identifier ident = Identifier::fromUid(vm, uid);
188
189     JSValue baseValue = JSValue::decode(base);
190     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
191     baseValue.getPropertySlot(exec, ident, slot);
192
193     return JSValue::encode(slot.getPureResult());
194 }
195
196 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
197 {
198     VM* vm = &exec->vm();
199     NativeCallFrameTracer tracer(vm, exec);
200     auto scope = DECLARE_THROW_SCOPE(*vm);
201     Identifier ident = Identifier::fromUid(vm, uid);
202
203     JSValue baseValue = JSValue::decode(base);
204     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
205
206     baseValue.getPropertySlot(exec, ident, slot);
207     RETURN_IF_EXCEPTION(scope, encodedJSValue());
208
209     if (stubInfo->considerCaching(baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
210         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
211
212     return JSValue::encode(slot.getPureResult());
213 }
214
215 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
216 {
217     SuperSamplerScope superSamplerScope(false);
218     
219     VM* vm = &exec->vm();
220     NativeCallFrameTracer tracer(vm, exec);
221     
222     stubInfo->tookSlowPath = true;
223     
224     JSValue baseValue = JSValue::decode(base);
225     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
226     Identifier ident = Identifier::fromUid(vm, uid);
227     
228     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(), ident));
229     return JSValue::encode(baseValue.get(exec, ident, slot));
230 }
231
232 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
233 {
234     SuperSamplerScope superSamplerScope(false);
235     
236     VM* vm = &exec->vm();
237     NativeCallFrameTracer tracer(vm, exec);
238     
239     JSValue baseValue = JSValue::decode(base);
240     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
241     Identifier ident = Identifier::fromUid(vm, uid);
242     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(), ident));
243     return JSValue::encode(baseValue.get(exec, ident, slot));
244 }
245
246 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
247 {
248     SuperSamplerScope superSamplerScope(false);
249     
250     VM* vm = &exec->vm();
251     NativeCallFrameTracer tracer(vm, exec);
252     Identifier ident = Identifier::fromUid(vm, uid);
253
254     JSValue baseValue = JSValue::decode(base);
255     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(), ident));
256
257     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
258         if (stubInfo->considerCaching(baseValue.structureOrNull()))
259             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
260         return found ? slot.getValue(exec, ident) : jsUndefined();
261     }));
262 }
263
264 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
265 {
266     SuperSamplerScope superSamplerScope(false);
267     
268     VM* vm = &exec->vm();
269     NativeCallFrameTracer tracer(vm, exec);
270     auto scope = DECLARE_THROW_SCOPE(*vm);
271
272     if (!base->isObject()) {
273         throwException(exec, scope, createInvalidInParameterError(exec, base));
274         return JSValue::encode(jsUndefined());
275     }
276     
277     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
278
279     Identifier ident = Identifier::fromUid(vm, key);
280     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(), ident));
281     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
282     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
283     RETURN_IF_EXCEPTION(scope, encodedJSValue());
284     
285     RELEASE_ASSERT(accessType == stubInfo->accessType);
286     
287     if (stubInfo->considerCaching(asObject(base)->structure()))
288         repatchIn(exec, base, ident, result, slot, *stubInfo);
289     
290     return JSValue::encode(jsBoolean(result));
291 }
292
293 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
294 {
295     SuperSamplerScope superSamplerScope(false);
296     
297     VM* vm = &exec->vm();
298     NativeCallFrameTracer tracer(vm, exec);
299     auto scope = DECLARE_THROW_SCOPE(*vm);
300
301     stubInfo->tookSlowPath = true;
302
303     if (!base->isObject()) {
304         throwException(exec, scope, createInvalidInParameterError(exec, base));
305         return JSValue::encode(jsUndefined());
306     }
307
308     Identifier ident = Identifier::fromUid(vm, key);
309     LOG_IC((ICEvent::OperationIn, base->classInfo(), ident));
310     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
311 }
312
313 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
314 {
315     SuperSamplerScope superSamplerScope(false);
316     
317     VM* vm = &exec->vm();
318     NativeCallFrameTracer tracer(vm, exec);
319
320     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
321 }
322
323 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
324 {
325     SuperSamplerScope superSamplerScope(false);
326     
327     VM* vm = &exec->vm();
328     NativeCallFrameTracer tracer(vm, exec);
329     
330     stubInfo->tookSlowPath = true;
331     
332     JSValue baseValue = JSValue::decode(encodedBase);
333     Identifier ident = Identifier::fromUid(vm, uid);
334     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(), ident));
335
336     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
337     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
338 }
339
340 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
341 {
342     SuperSamplerScope superSamplerScope(false);
343     
344     VM* vm = &exec->vm();
345     NativeCallFrameTracer tracer(vm, exec);
346     
347     stubInfo->tookSlowPath = true;
348     
349     JSValue baseValue = JSValue::decode(encodedBase);
350     Identifier ident = Identifier::fromUid(vm, uid);
351     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(), ident));
352     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
353     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
354 }
355
356 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
357 {
358     SuperSamplerScope superSamplerScope(false);
359     
360     VM* vm = &exec->vm();
361     NativeCallFrameTracer tracer(vm, exec);
362     
363     stubInfo->tookSlowPath = true;
364     
365     JSValue baseValue = JSValue::decode(encodedBase);
366     Identifier ident = Identifier::fromUid(vm, uid);
367     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(), ident));
368     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
369     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
370 }
371
372 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
373 {
374     SuperSamplerScope superSamplerScope(false);
375     
376     VM* vm = &exec->vm();
377     NativeCallFrameTracer tracer(vm, exec);
378     
379     stubInfo->tookSlowPath = true;
380     
381     JSValue baseValue = JSValue::decode(encodedBase);
382     Identifier ident = Identifier::fromUid(vm, uid);
383     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(), ident));
384     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
385     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
386 }
387
388 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
389 {
390     SuperSamplerScope superSamplerScope(false);
391     
392     VM* vm = &exec->vm();
393     NativeCallFrameTracer tracer(vm, exec);
394     auto scope = DECLARE_THROW_SCOPE(*vm);
395
396     Identifier ident = Identifier::fromUid(vm, uid);
397     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
398
399     JSValue value = JSValue::decode(encodedValue);
400     JSValue baseValue = JSValue::decode(encodedBase);
401     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(), ident));
402     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
403
404     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
405     baseValue.putInline(exec, ident, value, slot);
406     RETURN_IF_EXCEPTION(scope, void());
407
408     if (accessType != static_cast<AccessType>(stubInfo->accessType))
409         return;
410     
411     if (stubInfo->considerCaching(structure))
412         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
413 }
414
415 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
416 {
417     SuperSamplerScope superSamplerScope(false);
418     
419     VM* vm = &exec->vm();
420     NativeCallFrameTracer tracer(vm, exec);
421     auto scope = DECLARE_THROW_SCOPE(*vm);
422
423     Identifier ident = Identifier::fromUid(vm, uid);
424     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
425
426     JSValue value = JSValue::decode(encodedValue);
427     JSValue baseValue = JSValue::decode(encodedBase);
428     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(), ident));
429     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
430
431     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
432     baseValue.putInline(exec, ident, value, slot);
433     RETURN_IF_EXCEPTION(scope, void());
434
435     if (accessType != static_cast<AccessType>(stubInfo->accessType))
436         return;
437     
438     if (stubInfo->considerCaching(structure))
439         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
440 }
441
442 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
443 {
444     SuperSamplerScope superSamplerScope(false);
445     
446     VM* vm = &exec->vm();
447     NativeCallFrameTracer tracer(vm, exec);
448     
449     Identifier ident = Identifier::fromUid(vm, uid);
450     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
451
452     JSValue value = JSValue::decode(encodedValue);
453     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
454     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(), ident));
455     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
456     
457     Structure* structure = baseObject->structure(*vm);
458     baseObject->putDirect(exec->vm(), ident, value, slot);
459     
460     if (accessType != static_cast<AccessType>(stubInfo->accessType))
461         return;
462     
463     if (stubInfo->considerCaching(structure))
464         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
465 }
466
467 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
468 {
469     SuperSamplerScope superSamplerScope(false);
470     
471     VM* vm = &exec->vm();
472     NativeCallFrameTracer tracer(vm, exec);
473     
474     Identifier ident = Identifier::fromUid(vm, uid);
475     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
476
477     JSValue value = JSValue::decode(encodedValue);
478     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
479     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(), ident));
480     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
481     
482     Structure* structure = baseObject->structure(*vm);
483     baseObject->putDirect(exec->vm(), ident, value, slot);
484     
485     if (accessType != static_cast<AccessType>(stubInfo->accessType))
486         return;
487     
488     if (stubInfo->considerCaching(structure))
489         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
490 }
491
492 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
493 {
494     return value.isString() || value.isSymbol();
495 }
496
497 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
498 {
499     VM& vm = callFrame->vm();
500     auto scope = DECLARE_THROW_SCOPE(vm);
501     if (LIKELY(subscript.isUInt32())) {
502         byValInfo->tookSlowPath = true;
503         uint32_t i = subscript.asUInt32();
504         if (baseValue.isObject()) {
505             JSObject* object = asObject(baseValue);
506             if (object->canSetIndexQuickly(i))
507                 object->setIndexQuickly(callFrame->vm(), i, value);
508             else {
509                 // FIXME: This will make us think that in-bounds typed array accesses are actually
510                 // out-of-bounds.
511                 // https://bugs.webkit.org/show_bug.cgi?id=149886
512                 byValInfo->arrayProfile->setOutOfBounds();
513                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
514             }
515         } else
516             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
517         return;
518     }
519
520     auto property = subscript.toPropertyKey(callFrame);
521     // Don't put to an object if toString threw an exception.
522     RETURN_IF_EXCEPTION(scope, void());
523
524     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
525         byValInfo->tookSlowPath = true;
526
527     scope.release();
528     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
529     baseValue.putInline(callFrame, property, value, slot);
530 }
531
532 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
533 {
534     VM& vm = callFrame->vm();
535     auto scope = DECLARE_THROW_SCOPE(vm);
536     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
537     if (LIKELY(subscript.isUInt32())) {
538         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
539         byValInfo->tookSlowPath = true;
540         uint32_t index = subscript.asUInt32();
541         ASSERT(isIndex(index));
542         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
543             baseObject->setIndexQuickly(callFrame->vm(), index, value);
544             return;
545         }
546
547         // FIXME: This will make us think that in-bounds typed array accesses are actually
548         // out-of-bounds.
549         // https://bugs.webkit.org/show_bug.cgi?id=149886
550         byValInfo->arrayProfile->setOutOfBounds();
551         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
552         return;
553     }
554
555     if (subscript.isDouble()) {
556         double subscriptAsDouble = subscript.asDouble();
557         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
558         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
559             byValInfo->tookSlowPath = true;
560             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
561             return;
562         }
563     }
564
565     // Don't put to an object if toString threw an exception.
566     auto property = subscript.toPropertyKey(callFrame);
567     RETURN_IF_EXCEPTION(scope, void());
568
569     if (Optional<uint32_t> index = parseIndex(property)) {
570         byValInfo->tookSlowPath = true;
571         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
572         return;
573     }
574
575     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
576         byValInfo->tookSlowPath = true;
577
578     PutPropertySlot slot(baseObject, isStrictMode);
579     baseObject->putDirect(callFrame->vm(), property, value, slot);
580 }
581
582 enum class OptimizationResult {
583     NotOptimized,
584     SeenOnce,
585     Optimized,
586     GiveUp,
587 };
588
589 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
590 {
591     // See if it's worth optimizing at all.
592     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
593
594     VM& vm = exec->vm();
595
596     if (baseValue.isObject() && subscript.isInt32()) {
597         JSObject* object = asObject(baseValue);
598
599         ASSERT(exec->bytecodeOffset());
600         ASSERT(!byValInfo->stubRoutine);
601
602         Structure* structure = object->structure(vm);
603         if (hasOptimizableIndexing(structure)) {
604             // Attempt to optimize.
605             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
606             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
607                 CodeBlock* codeBlock = exec->codeBlock();
608                 ConcurrentJITLocker locker(codeBlock->m_lock);
609                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
610
611                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
612                 optimizationResult = OptimizationResult::Optimized;
613             }
614         }
615
616         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
617         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
618             optimizationResult = OptimizationResult::GiveUp;
619     }
620
621     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
622         const Identifier propertyName = subscript.toPropertyKey(exec);
623         if (subscript.isSymbol() || !parseIndex(propertyName)) {
624             ASSERT(exec->bytecodeOffset());
625             ASSERT(!byValInfo->stubRoutine);
626             if (byValInfo->seen) {
627                 if (byValInfo->cachedId == propertyName) {
628                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
629                     optimizationResult = OptimizationResult::Optimized;
630                 } else {
631                     // Seem like a generic property access site.
632                     optimizationResult = OptimizationResult::GiveUp;
633                 }
634             } else {
635                 CodeBlock* codeBlock = exec->codeBlock();
636                 ConcurrentJITLocker locker(codeBlock->m_lock);
637                 byValInfo->seen = true;
638                 byValInfo->cachedId = propertyName;
639                 if (subscript.isSymbol())
640                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
641                 optimizationResult = OptimizationResult::SeenOnce;
642             }
643         }
644     }
645
646     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
647         // If we take slow path more than 10 times without patching then make sure we
648         // never make that mistake again. For cases where we see non-index-intercepting
649         // objects, this gives 10 iterations worth of opportunity for us to observe
650         // that the put_by_val may be polymorphic. We count up slowPathCount even if
651         // the result is GiveUp.
652         if (++byValInfo->slowPathCount >= 10)
653             optimizationResult = OptimizationResult::GiveUp;
654     }
655
656     return optimizationResult;
657 }
658
659 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
660 {
661     VM& vm = exec->vm();
662     NativeCallFrameTracer tracer(&vm, exec);
663
664     JSValue baseValue = JSValue::decode(encodedBaseValue);
665     JSValue subscript = JSValue::decode(encodedSubscript);
666     JSValue value = JSValue::decode(encodedValue);
667     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
668         // Don't ever try to optimize.
669         byValInfo->tookSlowPath = true;
670         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
671     }
672     putByVal(exec, baseValue, subscript, value, byValInfo);
673 }
674
675 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
676 {
677     // See if it's worth optimizing at all.
678     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
679
680     VM& vm = exec->vm();
681
682     if (subscript.isInt32()) {
683         ASSERT(exec->bytecodeOffset());
684         ASSERT(!byValInfo->stubRoutine);
685
686         Structure* structure = object->structure(vm);
687         if (hasOptimizableIndexing(structure)) {
688             // Attempt to optimize.
689             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
690             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
691                 CodeBlock* codeBlock = exec->codeBlock();
692                 ConcurrentJITLocker locker(codeBlock->m_lock);
693                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
694
695                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
696                 optimizationResult = OptimizationResult::Optimized;
697             }
698         }
699
700         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
701         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
702             optimizationResult = OptimizationResult::GiveUp;
703     } else if (isStringOrSymbol(subscript)) {
704         const Identifier propertyName = subscript.toPropertyKey(exec);
705         if (subscript.isSymbol() || !parseIndex(propertyName)) {
706             ASSERT(exec->bytecodeOffset());
707             ASSERT(!byValInfo->stubRoutine);
708             if (byValInfo->seen) {
709                 if (byValInfo->cachedId == propertyName) {
710                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
711                     optimizationResult = OptimizationResult::Optimized;
712                 } else {
713                     // Seem like a generic property access site.
714                     optimizationResult = OptimizationResult::GiveUp;
715                 }
716             } else {
717                 CodeBlock* codeBlock = exec->codeBlock();
718                 ConcurrentJITLocker locker(codeBlock->m_lock);
719                 byValInfo->seen = true;
720                 byValInfo->cachedId = propertyName;
721                 if (subscript.isSymbol())
722                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
723                 optimizationResult = OptimizationResult::SeenOnce;
724             }
725         }
726     }
727
728     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
729         // If we take slow path more than 10 times without patching then make sure we
730         // never make that mistake again. For cases where we see non-index-intercepting
731         // objects, this gives 10 iterations worth of opportunity for us to observe
732         // that the get_by_val may be polymorphic. We count up slowPathCount even if
733         // the result is GiveUp.
734         if (++byValInfo->slowPathCount >= 10)
735             optimizationResult = OptimizationResult::GiveUp;
736     }
737
738     return optimizationResult;
739 }
740
741 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
742 {
743     VM& vm = exec->vm();
744     NativeCallFrameTracer tracer(&vm, exec);
745
746     JSValue baseValue = JSValue::decode(encodedBaseValue);
747     JSValue subscript = JSValue::decode(encodedSubscript);
748     JSValue value = JSValue::decode(encodedValue);
749     RELEASE_ASSERT(baseValue.isObject());
750     JSObject* object = asObject(baseValue);
751     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
752         // Don't ever try to optimize.
753         byValInfo->tookSlowPath = true;
754         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
755     }
756
757     directPutByVal(exec, object, subscript, value, byValInfo);
758 }
759
760 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
761 {
762     VM& vm = exec->vm();
763     NativeCallFrameTracer tracer(&vm, exec);
764     
765     JSValue baseValue = JSValue::decode(encodedBaseValue);
766     JSValue subscript = JSValue::decode(encodedSubscript);
767     JSValue value = JSValue::decode(encodedValue);
768
769     putByVal(exec, baseValue, subscript, value, byValInfo);
770 }
771
772
773 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
774 {
775     VM& vm = exec->vm();
776     NativeCallFrameTracer tracer(&vm, exec);
777     
778     JSValue baseValue = JSValue::decode(encodedBaseValue);
779     JSValue subscript = JSValue::decode(encodedSubscript);
780     JSValue value = JSValue::decode(encodedValue);
781     RELEASE_ASSERT(baseValue.isObject());
782     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
783 }
784
785 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
786 {
787     VM* vm = &exec->vm();
788     auto scope = DECLARE_THROW_SCOPE(*vm);
789
790     execCallee->setCodeBlock(0);
791     
792     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
793         return JSValue::encode(JSValue());
794
795     JSValue result = eval(execCallee);
796     RETURN_IF_EXCEPTION(scope, encodedJSValue());
797     
798     return JSValue::encode(result);
799 }
800
801 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
802 {
803     ExecState* exec = execCallee->callerFrame();
804     VM* vm = &exec->vm();
805     auto scope = DECLARE_THROW_SCOPE(*vm);
806
807     execCallee->setCodeBlock(0);
808
809     if (callLinkInfo->specializationKind() == CodeForCall) {
810         CallData callData;
811         CallType callType = getCallData(callee, callData);
812     
813         ASSERT(callType != CallType::JS);
814     
815         if (callType == CallType::Host) {
816             NativeCallFrameTracer tracer(vm, execCallee);
817             execCallee->setCallee(asObject(callee));
818             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
819             if (UNLIKELY(scope.exception())) {
820                 return encodeResult(
821                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
822                     reinterpret_cast<void*>(KeepTheFrame));
823             }
824
825             return encodeResult(
826                 bitwise_cast<void*>(getHostCallReturnValue),
827                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
828         }
829     
830         ASSERT(callType == CallType::None);
831         throwException(exec, scope, createNotAFunctionError(exec, callee));
832         return encodeResult(
833             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
834             reinterpret_cast<void*>(KeepTheFrame));
835     }
836
837     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
838     
839     ConstructData constructData;
840     ConstructType constructType = getConstructData(callee, constructData);
841     
842     ASSERT(constructType != ConstructType::JS);
843     
844     if (constructType == ConstructType::Host) {
845         NativeCallFrameTracer tracer(vm, execCallee);
846         execCallee->setCallee(asObject(callee));
847         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
848         if (UNLIKELY(scope.exception())) {
849             return encodeResult(
850                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
851                 reinterpret_cast<void*>(KeepTheFrame));
852         }
853
854         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
855     }
856     
857     ASSERT(constructType == ConstructType::None);
858     throwException(exec, scope, createNotAConstructorError(exec, callee));
859     return encodeResult(
860         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
861         reinterpret_cast<void*>(KeepTheFrame));
862 }
863
864 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
865 {
866     ExecState* exec = execCallee->callerFrame();
867     VM* vm = &exec->vm();
868     auto throwScope = DECLARE_THROW_SCOPE(*vm);
869
870     CodeSpecializationKind kind = callLinkInfo->specializationKind();
871     NativeCallFrameTracer tracer(vm, exec);
872     
873     RELEASE_ASSERT(!callLinkInfo->isDirect());
874     
875     JSValue calleeAsValue = execCallee->calleeAsValue();
876     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
877     if (!calleeAsFunctionCell) {
878         // FIXME: We should cache these kinds of calls. They can be common and currently they are
879         // expensive.
880         // https://bugs.webkit.org/show_bug.cgi?id=144458
881         throwScope.release();
882         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
883     }
884
885     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
886     JSScope* scope = callee->scopeUnchecked();
887     ExecutableBase* executable = callee->executable();
888
889     MacroAssemblerCodePtr codePtr;
890     CodeBlock* codeBlock = 0;
891     if (executable->isHostFunction()) {
892         codePtr = executable->entrypointFor(kind, MustCheckArity);
893 #if ENABLE(WEBASSEMBLY)
894     } else if (executable->isWebAssemblyExecutable()) {
895         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
896         codeBlock = webAssemblyExecutable->codeBlockForCall();
897         ASSERT(codeBlock);
898         ArityCheckMode arity;
899         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
900             arity = MustCheckArity;
901         else
902             arity = ArityCheckNotRequired;
903         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
904 #endif
905     } else {
906         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
907
908         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
909             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
910             return encodeResult(
911                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
912                 reinterpret_cast<void*>(KeepTheFrame));
913         }
914
915         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
916         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
917         ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
918         if (error) {
919             throwException(exec, throwScope, error);
920             return encodeResult(
921                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
922                 reinterpret_cast<void*>(KeepTheFrame));
923         }
924         codeBlock = *codeBlockSlot;
925         ArityCheckMode arity;
926         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
927             arity = MustCheckArity;
928         else
929             arity = ArityCheckNotRequired;
930         codePtr = functionExecutable->entrypointFor(kind, arity);
931     }
932     if (!callLinkInfo->seenOnce())
933         callLinkInfo->setSeen();
934     else
935         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
936     
937     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
938 }
939
940 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
941 {
942     VM* vm = &exec->vm();
943     auto throwScope = DECLARE_THROW_SCOPE(*vm);
944
945     CodeSpecializationKind kind = callLinkInfo->specializationKind();
946     NativeCallFrameTracer tracer(vm, exec);
947     
948     RELEASE_ASSERT(callLinkInfo->isDirect());
949     
950     // This would happen if the executable died during GC but the CodeBlock did not die. That should
951     // not happen because the CodeBlock should have a weak reference to any executable it uses for
952     // this purpose.
953     RELEASE_ASSERT(callLinkInfo->executable());
954     
955     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
956     // linked.
957     RELEASE_ASSERT(!callLinkInfo->codeBlock());
958     
959     // We just don't support this yet.
960     RELEASE_ASSERT(!callLinkInfo->isVarargs());
961     
962     ExecutableBase* executable = callLinkInfo->executable();
963     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
964
965     JSScope* scope = callee->scopeUnchecked();
966
967     MacroAssemblerCodePtr codePtr;
968     CodeBlock* codeBlock = nullptr;
969     if (executable->isHostFunction())
970         codePtr = executable->entrypointFor(kind, MustCheckArity);
971     else {
972         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
973
974         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
975         
976         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
977         ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
978         if (error) {
979             throwException(exec, throwScope, error);
980             return;
981         }
982         ArityCheckMode arity;
983         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
984         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
985             arity = MustCheckArity;
986         else
987             arity = ArityCheckNotRequired;
988         codePtr = functionExecutable->entrypointFor(kind, arity);
989     }
990     
991     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
992 }
993
994 inline SlowPathReturnType virtualForWithFunction(
995     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
996 {
997     ExecState* exec = execCallee->callerFrame();
998     VM* vm = &exec->vm();
999     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1000
1001     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1002     NativeCallFrameTracer tracer(vm, exec);
1003
1004     JSValue calleeAsValue = execCallee->calleeAsValue();
1005     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1006     if (UNLIKELY(!calleeAsFunctionCell))
1007         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
1008     
1009     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1010     JSScope* scope = function->scopeUnchecked();
1011     ExecutableBase* executable = function->executable();
1012     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1013         bool isWebAssemblyExecutable = false;
1014 #if ENABLE(WEBASSEMBLY)
1015         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
1016 #endif
1017         if (!isWebAssemblyExecutable) {
1018             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1019
1020             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1021                 throwException(exec, throwScope, createNotAConstructorError(exec, function));
1022                 return encodeResult(
1023                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1024                     reinterpret_cast<void*>(KeepTheFrame));
1025             }
1026
1027             CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1028             JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1029             if (error) {
1030                 throwException(exec, throwScope, error);
1031                 return encodeResult(
1032                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1033                     reinterpret_cast<void*>(KeepTheFrame));
1034             }
1035         } else {
1036 #if ENABLE(WEBASSEMBLY)
1037             if (!isCall(kind)) {
1038                 throwException(exec, throwScope, createNotAConstructorError(exec, function));
1039                 return encodeResult(
1040                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1041                     reinterpret_cast<void*>(KeepTheFrame));
1042             }
1043 #endif
1044         }
1045     }
1046     return encodeResult(executable->entrypointFor(
1047         kind, MustCheckArity).executableAddress(),
1048         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1049 }
1050
1051 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1052 {
1053     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1054     JSCell* calleeAsFunctionCell;
1055     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1056
1057     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1058     
1059     return result;
1060 }
1061
1062 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1063 {
1064     JSCell* calleeAsFunctionCellIgnored;
1065     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1066 }
1067
1068 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1069 {
1070     VM* vm = &exec->vm();
1071     NativeCallFrameTracer tracer(vm, exec);
1072     
1073     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1074 }
1075
1076 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1077 {
1078     VM* vm = &exec->vm();
1079     NativeCallFrameTracer tracer(vm, exec);
1080
1081     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1082 }
1083
1084 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1085 {
1086     VM* vm = &exec->vm();
1087     NativeCallFrameTracer tracer(vm, exec);
1088
1089     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1090 }
1091
1092 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1093 {
1094     VM* vm = &exec->vm();
1095     NativeCallFrameTracer tracer(vm, exec);
1096
1097     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1098 }
1099
1100 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1101 {
1102     VM* vm = &exec->vm();
1103     NativeCallFrameTracer tracer(vm, exec);
1104
1105     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1106 }
1107
1108 #if USE(JSVALUE64)
1109 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1110 #else
1111 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1112 #endif
1113 {
1114     VM* vm = &exec->vm();
1115     NativeCallFrameTracer tracer(vm, exec);
1116
1117     bool result = asString(left)->equal(exec, asString(right));
1118 #if USE(JSVALUE64)
1119     return JSValue::encode(jsBoolean(result));
1120 #else
1121     return result;
1122 #endif
1123 }
1124
1125 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1126 {
1127     VM* vm = &exec->vm();
1128     NativeCallFrameTracer tracer(vm, exec);
1129     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1130 }
1131
1132 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1133 {
1134     VM* vm = &exec->vm();
1135     NativeCallFrameTracer tracer(vm, exec);
1136     return JSValue::encode(constructArray(exec, profile, values, size));
1137 }
1138
1139 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1140 {
1141     VM* vm = &exec->vm();
1142     NativeCallFrameTracer tracer(vm, exec);
1143     JSValue sizeValue = JSValue::decode(size);
1144     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1145 }
1146
1147 }
1148
1149 template<typename FunctionType>
1150 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1151 {
1152     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1153     VM& vm = exec->vm();
1154     NativeCallFrameTracer tracer(&vm, exec);
1155     if (isInvalidated)
1156         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1157     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1158 }
1159
1160 extern "C" {
1161
1162 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1163 {
1164     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1165 }
1166
1167 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1168 {
1169     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1170 }
1171
1172 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1173 {
1174     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1175 }
1176
1177 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1178 {
1179     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1180 }
1181
1182 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1183 {
1184     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1185 }
1186
1187 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1188 {
1189     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1190 }
1191
1192 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1193 {
1194     VM* vm = &exec->vm();
1195     NativeCallFrameTracer tracer(vm, exec);
1196
1197     JSFunction* func = jsCast<JSFunction*>(funcCell);
1198     JSValue name = JSValue::decode(encodedName);
1199     func->setFunctionName(exec, name);
1200 }
1201
1202 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1203 {
1204     VM* vm = &exec->vm();
1205     NativeCallFrameTracer tracer(vm, exec);
1206
1207     return constructEmptyObject(exec, structure);
1208 }
1209
1210 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1211 {
1212     SuperSamplerScope superSamplerScope(false);
1213     VM& vm = exec->vm();
1214     NativeCallFrameTracer tracer(&vm, exec);
1215     auto scope = DECLARE_THROW_SCOPE(vm);
1216
1217     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1218     if (!regexp->isValid()) {
1219         throwException(exec, scope, createSyntaxError(exec, regexp->errorMessage()));
1220         return JSValue::encode(jsUndefined());
1221     }
1222
1223     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1224 }
1225
1226 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1227 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1228 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1229 // future, we can switch to using that then.
1230 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1231 {
1232     VM& vm = exec->vm();
1233     NativeCallFrameTracer tracer(&vm, exec);
1234     auto scope = DECLARE_THROW_SCOPE(vm);
1235
1236     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1237         throwException(exec, scope, createTerminatedExecutionException(&vm));
1238
1239     return nullptr;
1240 }
1241
1242 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1243 {
1244     VM& vm = exec->vm();
1245     NativeCallFrameTracer tracer(&vm, exec);
1246
1247     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1248 }
1249
1250 #if ENABLE(DFG_JIT)
1251 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1252 {
1253     codeBlock->updateAllPredictions();
1254     codeBlock->optimizeAfterWarmUp();
1255 }
1256
1257 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1258 {
1259     VM& vm = exec->vm();
1260     NativeCallFrameTracer tracer(&vm, exec);
1261
1262     // Defer GC for a while so that it doesn't run between when we enter into this
1263     // slow path and when we figure out the state of our code block. This prevents
1264     // a number of awkward reentrancy scenarios, including:
1265     //
1266     // - The optimized version of our code block being jettisoned by GC right after
1267     //   we concluded that we wanted to use it, but have not planted it into the JS
1268     //   stack yet.
1269     //
1270     // - An optimized version of our code block being installed just as we decided
1271     //   that it wasn't ready yet.
1272     //
1273     // Note that jettisoning won't happen if we already initiated OSR, because in
1274     // that case we would have already planted the optimized code block into the JS
1275     // stack.
1276     DeferGCForAWhile deferGC(vm.heap);
1277     
1278     CodeBlock* codeBlock = exec->codeBlock();
1279     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1280         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1281         RELEASE_ASSERT_NOT_REACHED();
1282     }
1283     
1284     if (bytecodeIndex) {
1285         // If we're attempting to OSR from a loop, assume that this should be
1286         // separately optimized.
1287         codeBlock->m_shouldAlwaysBeInlined = false;
1288     }
1289
1290     if (Options::verboseOSR()) {
1291         dataLog(
1292             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1293             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1294             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1295             ", exitCounter = ");
1296         if (codeBlock->hasOptimizedReplacement())
1297             dataLog(codeBlock->replacement()->osrExitCounter());
1298         else
1299             dataLog("N/A");
1300         dataLog("\n");
1301     }
1302
1303     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1304         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1305         codeBlock->updateAllPredictions();
1306         if (Options::verboseOSR())
1307             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1308         return encodeResult(0, 0);
1309     }
1310     
1311     Debugger* debugger = codeBlock->globalObject()->debugger();
1312     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1313         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1314         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1315         return encodeResult(0, 0);
1316     }
1317
1318     if (codeBlock->m_shouldAlwaysBeInlined) {
1319         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1320         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1321         if (Options::verboseOSR())
1322             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1323         return encodeResult(0, 0);
1324     }
1325
1326     // We cannot be in the process of asynchronous compilation and also have an optimized
1327     // replacement.
1328     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1329     ASSERT(
1330         !worklist
1331         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1332         && codeBlock->hasOptimizedReplacement()));
1333
1334     DFG::Worklist::State worklistState;
1335     if (worklist) {
1336         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1337         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1338         // what the result was so that we don't plow ahead and attempt OSR or immediate
1339         // reoptimization. This will have already also set the appropriate JIT execution
1340         // count threshold depending on what happened, so if the compilation was anything
1341         // but successful we just want to return early. See the case for worklistState ==
1342         // DFG::Worklist::Compiled, below.
1343         
1344         // Note that we could have alternatively just called Worklist::compilationState()
1345         // here, and if it returned Compiled, we could have then called
1346         // completeAndScheduleOSR() below. But that would have meant that it could take
1347         // longer for code blocks to be completed: they would only complete when *their*
1348         // execution count trigger fired; but that could take a while since the firing is
1349         // racy. It could also mean that code blocks that never run again after being
1350         // compiled would sit on the worklist until next GC. That's fine, but it's
1351         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1352         // possible in order to minimize the chances of us executing baseline code after
1353         // optimized code is already available.
1354         worklistState = worklist->completeAllReadyPlansForVM(
1355             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1356     } else
1357         worklistState = DFG::Worklist::NotKnown;
1358
1359     if (worklistState == DFG::Worklist::Compiling) {
1360         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1361         // We cannot be in the process of asynchronous compilation and also have an optimized
1362         // replacement.
1363         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1364         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1365         return encodeResult(0, 0);
1366     }
1367
1368     if (worklistState == DFG::Worklist::Compiled) {
1369         // If we don't have an optimized replacement but we did just get compiled, then
1370         // the compilation failed or was invalidated, in which case the execution count
1371         // thresholds have already been set appropriately by
1372         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1373         // nothing left to do.
1374         if (!codeBlock->hasOptimizedReplacement()) {
1375             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1376             codeBlock->updateAllPredictions();
1377             if (Options::verboseOSR())
1378                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1379             return encodeResult(0, 0);
1380         }
1381     } else if (codeBlock->hasOptimizedReplacement()) {
1382         if (Options::verboseOSR())
1383             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1384         // If we have an optimized replacement, then it must be the case that we entered
1385         // cti_optimize from a loop. That's because if there's an optimized replacement,
1386         // then all calls to this function will be relinked to the replacement and so
1387         // the prologue OSR will never fire.
1388         
1389         // This is an interesting threshold check. Consider that a function OSR exits
1390         // in the middle of a loop, while having a relatively low exit count. The exit
1391         // will reset the execution counter to some target threshold, meaning that this
1392         // code won't be reached until that loop heats up for >=1000 executions. But then
1393         // we do a second check here, to see if we should either reoptimize, or just
1394         // attempt OSR entry. Hence it might even be correct for
1395         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1396         // additional checking anyway, to reduce the amount of recompilation thrashing.
1397         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1398             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1399             if (Options::verboseOSR()) {
1400                 dataLog(
1401                     "Triggering reoptimization of ", *codeBlock,
1402                     "(", *codeBlock->replacement(), ") (in loop).\n");
1403             }
1404             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1405             return encodeResult(0, 0);
1406         }
1407     } else {
1408         if (!codeBlock->shouldOptimizeNow()) {
1409             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1410             if (Options::verboseOSR()) {
1411                 dataLog(
1412                     "Delaying optimization for ", *codeBlock,
1413                     " because of insufficient profiling.\n");
1414             }
1415             return encodeResult(0, 0);
1416         }
1417
1418         if (Options::verboseOSR())
1419             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1420
1421         unsigned numVarsWithValues;
1422         if (bytecodeIndex)
1423             numVarsWithValues = codeBlock->m_numCalleeLocals;
1424         else
1425             numVarsWithValues = 0;
1426         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1427         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1428         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1429             int operand = mustHandleValues.operandForIndex(i);
1430             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1431                 continue;
1432             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1433         }
1434
1435         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1436         CompilationResult result = DFG::compile(
1437             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1438             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1439         
1440         if (result != CompilationSuccessful) {
1441             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1442             return encodeResult(0, 0);
1443         }
1444     }
1445     
1446     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1447     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1448     
1449     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1450         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1451         if (Options::verboseOSR()) {
1452             dataLog(
1453                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1454         }
1455
1456         codeBlock->optimizeSoon();
1457         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1458         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1459     }
1460
1461     if (Options::verboseOSR()) {
1462         dataLog(
1463             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1464             " succeeded, OSR failed, after a delay of ",
1465             codeBlock->optimizationDelayCounter(), ".\n");
1466     }
1467
1468     // Count the OSR failure as a speculation failure. If this happens a lot, then
1469     // reoptimize.
1470     optimizedCodeBlock->countOSRExit();
1471
1472     // We are a lot more conservative about triggering reoptimization after OSR failure than
1473     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1474     // already, then we really would like to reoptimize immediately. But this case covers
1475     // something else: there weren't many (or any) speculation failures before, but we just
1476     // failed to enter the speculative code because some variable had the wrong value or
1477     // because the OSR code decided for any spurious reason that it did not want to OSR
1478     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1479     // reoptimization trigger.
1480     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1481         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1482         if (Options::verboseOSR()) {
1483             dataLog(
1484                 "Triggering reoptimization of ", *codeBlock, " -> ",
1485                 *codeBlock->replacement(), " (after OSR fail).\n");
1486         }
1487         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1488         return encodeResult(0, 0);
1489     }
1490
1491     // OSR failed this time, but it might succeed next time! Let the code run a bit
1492     // longer and then try again.
1493     codeBlock->optimizeAfterWarmUp();
1494     
1495     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1496     return encodeResult(0, 0);
1497 }
1498 #endif
1499
1500 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1501 {
1502     VM& vm = exec->vm();
1503     NativeCallFrameTracer tracer(&vm, exec);
1504
1505     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1506     ASSERT(isJSArray(arrayValue));
1507     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1508 }
1509
1510 enum class AccessorType {
1511     Getter,
1512     Setter
1513 };
1514
1515 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1516 {
1517     VM& vm = exec->vm();
1518     auto scope = DECLARE_THROW_SCOPE(vm);
1519     auto propertyKey = subscript.toPropertyKey(exec);
1520     RETURN_IF_EXCEPTION(scope, void());
1521
1522     if (accessorType == AccessorType::Getter)
1523         base->putGetter(exec, propertyKey, accessor, attribute);
1524     else
1525         base->putSetter(exec, propertyKey, accessor, attribute);
1526 }
1527
1528 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1529 {
1530     VM& vm = exec->vm();
1531     NativeCallFrameTracer tracer(&vm, exec);
1532
1533     ASSERT(object && object->isObject());
1534     JSObject* baseObj = object->getObject();
1535
1536     ASSERT(getter->isObject());
1537     baseObj->putGetter(exec, uid, getter, options);
1538 }
1539
1540 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1541 {
1542     VM& vm = exec->vm();
1543     NativeCallFrameTracer tracer(&vm, exec);
1544
1545     ASSERT(object && object->isObject());
1546     JSObject* baseObj = object->getObject();
1547
1548     ASSERT(setter->isObject());
1549     baseObj->putSetter(exec, uid, setter, options);
1550 }
1551
1552 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1553 {
1554     VM& vm = exec->vm();
1555     NativeCallFrameTracer tracer(&vm, exec);
1556
1557     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1558 }
1559
1560 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1561 {
1562     VM& vm = exec->vm();
1563     NativeCallFrameTracer tracer(&vm, exec);
1564
1565     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1566 }
1567
1568 #if USE(JSVALUE64)
1569 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1570 {
1571     VM& vm = exec->vm();
1572     NativeCallFrameTracer tracer(&vm, exec);
1573
1574     ASSERT(object && object->isObject());
1575     JSObject* baseObj = asObject(object);
1576
1577     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1578
1579     JSValue getter = JSValue::decode(encodedGetterValue);
1580     JSValue setter = JSValue::decode(encodedSetterValue);
1581     ASSERT(getter.isObject() || getter.isUndefined());
1582     ASSERT(setter.isObject() || setter.isUndefined());
1583     ASSERT(getter.isObject() || setter.isObject());
1584
1585     if (!getter.isUndefined())
1586         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1587     if (!setter.isUndefined())
1588         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1589     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1590 }
1591
1592 #else
1593 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1594 {
1595     VM& vm = exec->vm();
1596     NativeCallFrameTracer tracer(&vm, exec);
1597
1598     ASSERT(object && object->isObject());
1599     JSObject* baseObj = asObject(object);
1600
1601     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1602
1603     ASSERT(!getter || getter->isObject());
1604     ASSERT(!setter || setter->isObject());
1605     ASSERT(getter || setter);
1606
1607     if (getter)
1608         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1609     if (setter)
1610         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1611     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1612 }
1613 #endif
1614
1615 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1616 {
1617     VM& vm = exec->vm();
1618     NativeCallFrameTracer tracer(&vm, exec);
1619
1620     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1621     exec->uncheckedR(scopeReg) = scope->next();
1622 }
1623
1624 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1625 {
1626     VM& vm = exec->vm();
1627     NativeCallFrameTracer tracer(&vm, exec);
1628
1629     JSValue value = JSValue::decode(encodedValue);
1630     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1631
1632     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1633
1634     if (constructor->hasInstance(exec, value, hasInstanceValue))
1635         return 1;
1636     return 0;
1637 }
1638
1639 }
1640
1641 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1642 {
1643     switch (object.structure()->typeInfo().type()) {
1644     case DirectArgumentsType: {
1645         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1646         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1647             return true;
1648         break;
1649     }
1650     case ScopedArgumentsType: {
1651         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1652         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1653             return true;
1654         break;
1655     }
1656     default:
1657         break;
1658     }
1659     return false;
1660 }
1661
1662 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1663 {
1664     VM& vm = exec->vm();
1665     auto scope = DECLARE_THROW_SCOPE(vm);
1666
1667     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1668         Structure& structure = *baseValue.asCell()->structure(vm);
1669         if (JSCell::canUseFastGetOwnProperty(structure)) {
1670             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1671                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1672                     ASSERT(exec->bytecodeOffset());
1673                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1674                         byValInfo->tookSlowPath = true;
1675                     return result;
1676                 }
1677             }
1678         }
1679     }
1680
1681     if (subscript.isUInt32()) {
1682         ASSERT(exec->bytecodeOffset());
1683         byValInfo->tookSlowPath = true;
1684
1685         uint32_t i = subscript.asUInt32();
1686         if (isJSString(baseValue)) {
1687             if (asString(baseValue)->canGetIndex(i)) {
1688                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1689                 return asString(baseValue)->getIndex(exec, i);
1690             }
1691             byValInfo->arrayProfile->setOutOfBounds();
1692         } else if (baseValue.isObject()) {
1693             JSObject* object = asObject(baseValue);
1694             if (object->canGetIndexQuickly(i))
1695                 return object->getIndexQuickly(i);
1696
1697             if (!canAccessArgumentIndexQuickly(*object, i)) {
1698                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1699                 // out-of-bounds.
1700                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1701                 byValInfo->arrayProfile->setOutOfBounds();
1702             }
1703         }
1704
1705         return baseValue.get(exec, i);
1706     }
1707
1708     baseValue.requireObjectCoercible(exec);
1709     RETURN_IF_EXCEPTION(scope, JSValue());
1710     auto property = subscript.toPropertyKey(exec);
1711     RETURN_IF_EXCEPTION(scope, JSValue());
1712
1713     ASSERT(exec->bytecodeOffset());
1714     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1715         byValInfo->tookSlowPath = true;
1716
1717     return baseValue.get(exec, property);
1718 }
1719
1720 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1721 {
1722     // See if it's worth optimizing this at all.
1723     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1724
1725     VM& vm = exec->vm();
1726
1727     if (baseValue.isObject() && subscript.isInt32()) {
1728         JSObject* object = asObject(baseValue);
1729
1730         ASSERT(exec->bytecodeOffset());
1731         ASSERT(!byValInfo->stubRoutine);
1732
1733         if (hasOptimizableIndexing(object->structure(vm))) {
1734             // Attempt to optimize.
1735             Structure* structure = object->structure(vm);
1736             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1737             if (arrayMode != byValInfo->arrayMode) {
1738                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1739                 // Let's update the profile to do better next time.
1740                 CodeBlock* codeBlock = exec->codeBlock();
1741                 ConcurrentJITLocker locker(codeBlock->m_lock);
1742                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1743
1744                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1745                 optimizationResult = OptimizationResult::Optimized;
1746             }
1747         }
1748
1749         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1750         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1751             optimizationResult = OptimizationResult::GiveUp;
1752     }
1753
1754     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1755         const Identifier propertyName = subscript.toPropertyKey(exec);
1756         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1757             ASSERT(exec->bytecodeOffset());
1758             ASSERT(!byValInfo->stubRoutine);
1759             if (byValInfo->seen) {
1760                 if (byValInfo->cachedId == propertyName) {
1761                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1762                     optimizationResult = OptimizationResult::Optimized;
1763                 } else {
1764                     // Seem like a generic property access site.
1765                     optimizationResult = OptimizationResult::GiveUp;
1766                 }
1767             } else {
1768                 CodeBlock* codeBlock = exec->codeBlock();
1769                 ConcurrentJITLocker locker(codeBlock->m_lock);
1770                 byValInfo->seen = true;
1771                 byValInfo->cachedId = propertyName;
1772                 if (subscript.isSymbol())
1773                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1774                 optimizationResult = OptimizationResult::SeenOnce;
1775             }
1776         }
1777     }
1778
1779     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1780         // If we take slow path more than 10 times without patching then make sure we
1781         // never make that mistake again. For cases where we see non-index-intercepting
1782         // objects, this gives 10 iterations worth of opportunity for us to observe
1783         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1784         // the result is GiveUp.
1785         if (++byValInfo->slowPathCount >= 10)
1786             optimizationResult = OptimizationResult::GiveUp;
1787     }
1788
1789     return optimizationResult;
1790 }
1791
1792 extern "C" {
1793
1794 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1795 {
1796     VM& vm = exec->vm();
1797     NativeCallFrameTracer tracer(&vm, exec);
1798     JSValue baseValue = JSValue::decode(encodedBase);
1799     JSValue subscript = JSValue::decode(encodedSubscript);
1800
1801     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1802     return JSValue::encode(result);
1803 }
1804
1805 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1806 {
1807     VM& vm = exec->vm();
1808     NativeCallFrameTracer tracer(&vm, exec);
1809
1810     JSValue baseValue = JSValue::decode(encodedBase);
1811     JSValue subscript = JSValue::decode(encodedSubscript);
1812     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1813     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1814         // Don't ever try to optimize.
1815         byValInfo->tookSlowPath = true;
1816         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1817     }
1818
1819     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1820 }
1821
1822 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1823 {
1824     VM& vm = exec->vm();
1825     NativeCallFrameTracer tracer(&vm, exec);
1826     JSValue baseValue = JSValue::decode(encodedBase);
1827     JSValue subscript = JSValue::decode(encodedSubscript);
1828     
1829     ASSERT(baseValue.isObject());
1830     ASSERT(subscript.isUInt32());
1831
1832     JSObject* object = asObject(baseValue);
1833     bool didOptimize = false;
1834
1835     ASSERT(exec->bytecodeOffset());
1836     ASSERT(!byValInfo->stubRoutine);
1837     
1838     if (hasOptimizableIndexing(object->structure(vm))) {
1839         // Attempt to optimize.
1840         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1841         if (arrayMode != byValInfo->arrayMode) {
1842             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1843             didOptimize = true;
1844         }
1845     }
1846     
1847     if (!didOptimize) {
1848         // If we take slow path more than 10 times without patching then make sure we
1849         // never make that mistake again. Or, if we failed to patch and we have some object
1850         // that intercepts indexed get, then don't even wait until 10 times. For cases
1851         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1852         // opportunity for us to observe that the get_by_val may be polymorphic.
1853         if (++byValInfo->slowPathCount >= 10
1854             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1855             // Don't ever try to optimize.
1856             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1857         }
1858     }
1859
1860     uint32_t index = subscript.asUInt32();
1861     if (object->canGetIndexQuickly(index))
1862         return JSValue::encode(JSValue(JSValue::JSTrue));
1863
1864     if (!canAccessArgumentIndexQuickly(*object, index)) {
1865         // FIXME: This will make us think that in-bounds typed array accesses are actually
1866         // out-of-bounds.
1867         // https://bugs.webkit.org/show_bug.cgi?id=149886
1868         byValInfo->arrayProfile->setOutOfBounds();
1869     }
1870     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1871 }
1872     
1873 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1874 {
1875     VM& vm = exec->vm();
1876     NativeCallFrameTracer tracer(&vm, exec);
1877     JSValue baseValue = JSValue::decode(encodedBase);
1878     JSValue subscript = JSValue::decode(encodedSubscript);
1879     
1880     ASSERT(baseValue.isObject());
1881     ASSERT(subscript.isUInt32());
1882
1883     JSObject* object = asObject(baseValue);
1884     uint32_t index = subscript.asUInt32();
1885     if (object->canGetIndexQuickly(index))
1886         return JSValue::encode(JSValue(JSValue::JSTrue));
1887
1888     if (!canAccessArgumentIndexQuickly(*object, index)) {
1889         // FIXME: This will make us think that in-bounds typed array accesses are actually
1890         // out-of-bounds.
1891         // https://bugs.webkit.org/show_bug.cgi?id=149886
1892         byValInfo->arrayProfile->setOutOfBounds();
1893     }
1894     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1895 }
1896     
1897 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1898 {
1899     VM& vm = exec->vm();
1900     NativeCallFrameTracer tracer(&vm, exec);
1901     auto scope = DECLARE_THROW_SCOPE(vm);
1902     JSValue baseValue = JSValue::decode(encodedBase);
1903     JSValue subscript = JSValue::decode(encodedSubscript);
1904     
1905     JSValue result;
1906     if (LIKELY(subscript.isUInt32())) {
1907         uint32_t i = subscript.asUInt32();
1908         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1909             result = asString(baseValue)->getIndex(exec, i);
1910         else {
1911             result = baseValue.get(exec, i);
1912             if (!isJSString(baseValue)) {
1913                 ASSERT(exec->bytecodeOffset());
1914                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1915             }
1916         }
1917     } else {
1918         baseValue.requireObjectCoercible(exec);
1919         RETURN_IF_EXCEPTION(scope, encodedJSValue());
1920         auto property = subscript.toPropertyKey(exec);
1921         RETURN_IF_EXCEPTION(scope, encodedJSValue());
1922         result = baseValue.get(exec, property);
1923     }
1924
1925     return JSValue::encode(result);
1926 }
1927
1928 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
1929 {
1930     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
1931 }
1932
1933 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
1934 {
1935     VM& vm = exec->vm();
1936     NativeCallFrameTracer tracer(&vm, exec);
1937     auto scope = DECLARE_THROW_SCOPE(vm);
1938
1939     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1940     if (!baseObj)
1941         return false;
1942     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
1943     if (!couldDelete && exec->codeBlock()->isStrictMode())
1944         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
1945     return couldDelete;
1946 }
1947
1948 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
1949 {
1950     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
1951 }
1952
1953 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
1954 {
1955     VM& vm = exec->vm();
1956     NativeCallFrameTracer tracer(&vm, exec);
1957     auto scope = DECLARE_THROW_SCOPE(vm);
1958
1959     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1960     JSValue key = JSValue::decode(encodedKey);
1961     if (!baseObj)
1962         return false;
1963
1964     bool couldDelete;
1965     uint32_t index;
1966     if (key.getUInt32(index))
1967         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
1968     else {
1969         RETURN_IF_EXCEPTION(scope, false);
1970         Identifier property = key.toPropertyKey(exec);
1971         RETURN_IF_EXCEPTION(scope, false);
1972         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
1973     }
1974     if (!couldDelete && exec->codeBlock()->isStrictMode())
1975         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
1976     return couldDelete;
1977 }
1978
1979 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1980 {
1981     VM& vm = exec->vm();
1982     NativeCallFrameTracer tracer(&vm, exec);
1983     JSValue value = JSValue::decode(encodedValue);
1984     JSValue proto = JSValue::decode(encodedProto);
1985     
1986     bool result = JSObject::defaultHasInstance(exec, value, proto);
1987     return JSValue::encode(jsBoolean(result));
1988 }
1989
1990 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
1991 {
1992     VM& vm = exec->vm();
1993     NativeCallFrameTracer tracer(&vm, exec);
1994     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
1995 }
1996
1997 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
1998 {
1999     VM& vm = exec->vm();
2000     NativeCallFrameTracer tracer(&vm, exec);
2001     JSValue arguments = JSValue::decode(encodedArguments);
2002     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2003 }
2004
2005 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2006 {
2007     VM& vm = exec->vm();
2008     NativeCallFrameTracer tracer(&vm, exec);
2009     setupForwardArgumentsFrame(exec, newCallFrame, length);
2010     return newCallFrame;
2011 }
2012
2013 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2014 {
2015     VM& vm = exec->vm();
2016     NativeCallFrameTracer tracer(&vm, exec);
2017     JSValue arguments = JSValue::decode(encodedArguments);
2018     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2019     return newCallFrame;
2020 }
2021
2022 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
2023 {
2024     VM& vm = exec->vm();
2025     NativeCallFrameTracer tracer(&vm, exec);
2026     JSObject* obj = JSValue::decode(value).toObject(exec);
2027     if (!obj)
2028         return JSValue::encode(JSValue());
2029     return JSValue::encode(obj);
2030 }
2031
2032 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2033 {
2034     VM& vm = exec->vm();
2035     NativeCallFrameTracer tracer(&vm, exec);
2036     JSValue key = JSValue::decode(encodedKey);
2037     CodeBlock* codeBlock = exec->codeBlock();
2038
2039     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2040     void* result = jumpTable.ctiDefault.executableAddress();
2041
2042     if (key.isString()) {
2043         StringImpl* value = asString(key)->value(exec).impl();
2044         if (value->length() == 1)
2045             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2046     }
2047
2048     return reinterpret_cast<char*>(result);
2049 }
2050
2051 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2052 {
2053     VM& vm = exec->vm();
2054     NativeCallFrameTracer tracer(&vm, exec);
2055     JSValue key = JSValue::decode(encodedKey);
2056     CodeBlock* codeBlock = exec->codeBlock();
2057
2058     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2059     void* result;
2060     if (key.isInt32())
2061         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2062     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2063         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2064     else
2065         result = jumpTable.ctiDefault.executableAddress();
2066     return reinterpret_cast<char*>(result);
2067 }
2068
2069 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2070 {
2071     VM& vm = exec->vm();
2072     NativeCallFrameTracer tracer(&vm, exec);
2073     JSValue key = JSValue::decode(encodedKey);
2074     CodeBlock* codeBlock = exec->codeBlock();
2075
2076     void* result;
2077     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2078
2079     if (key.isString()) {
2080         StringImpl* value = asString(key)->value(exec).impl();
2081         result = jumpTable.ctiForValue(value).executableAddress();
2082     } else
2083         result = jumpTable.ctiDefault.executableAddress();
2084
2085     return reinterpret_cast<char*>(result);
2086 }
2087
2088 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2089 {
2090     VM& vm = exec->vm();
2091     NativeCallFrameTracer tracer(&vm, exec);
2092     auto throwScope = DECLARE_THROW_SCOPE(vm);
2093
2094     CodeBlock* codeBlock = exec->codeBlock();
2095     Instruction* pc = bytecodePC;
2096
2097     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2098     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2099     GetPutInfo getPutInfo(pc[4].u.operand);
2100
2101     // ModuleVar is always converted to ClosureVar for get_from_scope.
2102     ASSERT(getPutInfo.resolveType() != ModuleVar);
2103
2104     return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2105         if (!found) {
2106             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2107                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2108             return jsUndefined();
2109         }
2110
2111         JSValue result = JSValue();
2112         if (scope->isGlobalLexicalEnvironment()) {
2113             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2114             result = slot.getValue(exec, ident);
2115             if (result == jsTDZValue()) {
2116                 throwException(exec, throwScope, createTDZError(exec));
2117                 return jsUndefined();
2118             }
2119         }
2120
2121         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2122
2123         if (!result)
2124             return slot.getValue(exec, ident);
2125         return result;
2126     }));
2127 }
2128
2129 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2130 {
2131     VM& vm = exec->vm();
2132     NativeCallFrameTracer tracer(&vm, exec);
2133     auto throwScope = DECLARE_THROW_SCOPE(vm);
2134
2135     Instruction* pc = bytecodePC;
2136
2137     CodeBlock* codeBlock = exec->codeBlock();
2138     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2139     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2140     JSValue value = exec->r(pc[3].u.operand).jsValue();
2141     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2142
2143     // ModuleVar does not keep the scope register value alive in DFG.
2144     ASSERT(getPutInfo.resolveType() != ModuleVar);
2145
2146     if (getPutInfo.resolveType() == LocalClosureVar) {
2147         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2148         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2149         if (WatchpointSet* set = pc[5].u.watchpointSet)
2150             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2151         return;
2152     }
2153
2154     bool hasProperty = scope->hasProperty(exec, ident);
2155     if (hasProperty
2156         && scope->isGlobalLexicalEnvironment()
2157         && !isInitialization(getPutInfo.initializationMode())) {
2158         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2159         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2160         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2161         if (slot.getValue(exec, ident) == jsTDZValue()) {
2162             throwException(exec, throwScope, createTDZError(exec));
2163             return;
2164         }
2165     }
2166
2167     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2168         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2169         return;
2170     }
2171
2172     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2173     scope->methodTable()->put(scope, exec, ident, value, slot);
2174     
2175     RETURN_IF_EXCEPTION(throwScope, void());
2176
2177     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2178 }
2179
2180 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2181 {
2182     VM* vm = &exec->vm();
2183     NativeCallFrameTracer tracer(vm, exec);
2184     auto scope = DECLARE_THROW_SCOPE(*vm);
2185
2186     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2187     throwException(exec, scope, exceptionValue);
2188
2189     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2190     genericUnwind(vm, exec);
2191 }
2192
2193 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2194 {
2195     VM& vm = exec->vm();
2196     NativeCallFrameTracer tracer(&vm, exec);
2197
2198     ASSERT(!object->structure()->outOfLineCapacity());
2199     Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2200     object->setButterflyWithoutChangingStructure(vm, result);
2201     return reinterpret_cast<char*>(result);
2202 }
2203
2204 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2205 {
2206     VM& vm = exec->vm();
2207     NativeCallFrameTracer tracer(&vm, exec);
2208
2209     Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2210     object->setButterflyWithoutChangingStructure(vm, result);
2211     return reinterpret_cast<char*>(result);
2212 }
2213
2214 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2215 {
2216     VM* vm = &exec->vm();
2217     NativeCallFrameTracer tracer(vm, exec);
2218     vm->heap.flushWriteBarrierBuffer(cell);
2219 }
2220
2221 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2222 {
2223     VM* vm = &exec->vm();
2224     NativeCallFrameTracer tracer(vm, exec);
2225     vm->heap.writeBarrier(cell);
2226 }
2227
2228 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2229 {
2230     VM* vm = &exec->vm();
2231     NativeCallFrameTracer tracer(vm, exec);
2232     vm->heap.writeBarrierSlowPath(cell);
2233 }
2234
2235 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2236 {
2237     NativeCallFrameTracer tracer(vm, exec);
2238     genericUnwind(vm, exec);
2239     ASSERT(vm->targetMachinePCForThrow);
2240 }
2241
2242 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2243 {
2244     vm->topCallFrame = exec->callerFrame();
2245     genericUnwind(vm, exec, UnwindFromCallerFrame);
2246     ASSERT(vm->targetMachinePCForThrow);
2247 }
2248
2249 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2250 {
2251     VM* vm = &exec->vm();
2252     NativeCallFrameTracer tracer(vm, exec);
2253     genericUnwind(vm, exec);
2254 }
2255
2256 // This function "should" just take the ExecState*, but doing so would make it more difficult
2257 // to call from exception check sites. So, unlike all of our other functions, we allow
2258 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2259 // particularly safe here since this is never called on the critical path - it's only for
2260 // testing.
2261 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2262 {
2263     VM* vm = &exec->vm();
2264     NativeCallFrameTracer tracer(vm, exec);
2265 #if COMPILER(GCC_OR_CLANG)
2266     void* returnPC = __builtin_return_address(0);
2267     doExceptionFuzzing(exec, "JITOperations", returnPC);
2268 #endif // COMPILER(GCC_OR_CLANG)
2269 }
2270
2271 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2272 {
2273     VM& vm = exec->vm();
2274     NativeCallFrameTracer tracer(&vm, exec);
2275     JSValue baseValue = JSValue::decode(encodedBaseValue);
2276     if (baseValue.isUndefinedOrNull())
2277         return JSValue::encode(jsBoolean(false));
2278
2279     JSObject* base = baseValue.toObject(exec);
2280     if (!base)
2281         return JSValue::encode(JSValue());
2282     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2283 }
2284
2285 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2286 {
2287     VM& vm = exec->vm();
2288     NativeCallFrameTracer tracer(&vm, exec);
2289     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2290     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2291 }
2292     
2293 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2294 {
2295     VM& vm = exec->vm();
2296     NativeCallFrameTracer tracer(&vm, exec);
2297
2298     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2299
2300     return propertyNameEnumerator(exec, base);
2301 }
2302
2303 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2304 {
2305     VM& vm = exec->vm();
2306     NativeCallFrameTracer tracer(&vm, exec);
2307     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2308     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2309     return JSValue::encode(propertyName ? propertyName : jsNull());
2310 }
2311
2312 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2313 {
2314     VM& vm = exec->vm();
2315     NativeCallFrameTracer tracer(&vm, exec);
2316     return jsString(exec, Identifier::from(exec, index).string());
2317 }
2318
2319 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2320 {
2321     VM* vm = &exec->vm();
2322     NativeCallFrameTracer tracer(vm, exec);
2323     
2324     JSValue op1 = JSValue::decode(encodedOp1);
2325     JSValue op2 = JSValue::decode(encodedOp2);
2326     
2327     return JSValue::encode(jsAdd(exec, op1, op2));
2328 }
2329
2330 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2331 {
2332     VM* vm = &exec->vm();
2333     NativeCallFrameTracer tracer(vm, exec);
2334     
2335     JSValue op1 = JSValue::decode(encodedOp1);
2336     JSValue op2 = JSValue::decode(encodedOp2);
2337
2338     arithProfile.observeLHSAndRHS(op1, op2);
2339     JSValue result = jsAdd(exec, op1, op2);
2340     arithProfile.observeResult(result);
2341
2342     return JSValue::encode(result);
2343 }
2344
2345 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2346 {
2347     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2348 }
2349
2350 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2351 {
2352     ASSERT(arithProfile);
2353     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2354 }
2355
2356 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2357 {
2358     VM* vm = &exec->vm();
2359     NativeCallFrameTracer tracer(vm, exec);
2360     
2361     JSValue op1 = JSValue::decode(encodedOp1);
2362     JSValue op2 = JSValue::decode(encodedOp2);
2363
2364     ArithProfile* arithProfile = addIC->arithProfile();
2365     ASSERT(arithProfile);
2366     arithProfile->observeLHSAndRHS(op1, op2);
2367     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2368     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2369
2370 #if ENABLE(MATH_IC_STATS)
2371     exec->codeBlock()->dumpMathICStats();
2372 #endif
2373     
2374     JSValue result = jsAdd(exec, op1, op2);
2375     arithProfile->observeResult(result);
2376
2377     return JSValue::encode(result);
2378 }
2379
2380 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2381 {
2382     VM* vm = &exec->vm();
2383     NativeCallFrameTracer tracer(vm, exec);
2384
2385     ArithProfile* arithProfile = addIC->arithProfile();
2386     ASSERT(arithProfile);
2387     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2388 }
2389
2390 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2391 {
2392     VM* vm = &exec->vm();
2393     NativeCallFrameTracer tracer(vm, exec);
2394
2395     JSValue op1 = JSValue::decode(encodedOp1);
2396     JSValue op2 = JSValue::decode(encodedOp2);
2397
2398     auto nonOptimizeVariant = operationValueAddNoOptimize;
2399     if (ArithProfile* arithProfile = addIC->arithProfile())
2400         arithProfile->observeLHSAndRHS(op1, op2);
2401     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2402
2403 #if ENABLE(MATH_IC_STATS)
2404     exec->codeBlock()->dumpMathICStats();
2405 #endif
2406
2407     return JSValue::encode(jsAdd(exec, op1, op2));
2408 }
2409
2410 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2411 {
2412     VM* vm = &exec->vm();
2413     NativeCallFrameTracer tracer(vm, exec);
2414     
2415     JSValue op1 = JSValue::decode(encodedOp1);
2416     JSValue op2 = JSValue::decode(encodedOp2);
2417     
2418     JSValue result = jsAdd(exec, op1, op2);
2419
2420     return JSValue::encode(result);
2421 }
2422
2423 ALWAYS_INLINE static EncodedJSValue unprofiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2424 {
2425     auto scope = DECLARE_THROW_SCOPE(vm);
2426     JSValue op1 = JSValue::decode(encodedOp1);
2427     JSValue op2 = JSValue::decode(encodedOp2);
2428
2429     double a = op1.toNumber(exec);
2430     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2431     double b = op2.toNumber(exec);
2432     return JSValue::encode(jsNumber(a * b));
2433 }
2434
2435 ALWAYS_INLINE static EncodedJSValue profiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2436 {
2437     auto scope = DECLARE_THROW_SCOPE(vm);
2438     JSValue op1 = JSValue::decode(encodedOp1);
2439     JSValue op2 = JSValue::decode(encodedOp2);
2440
2441     if (shouldObserveLHSAndRHSTypes)
2442         arithProfile.observeLHSAndRHS(op1, op2);
2443
2444     double a = op1.toNumber(exec);
2445     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2446     double b = op2.toNumber(exec);
2447     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2448     
2449     JSValue result = jsNumber(a * b);
2450     arithProfile.observeResult(result);
2451     return JSValue::encode(result);
2452 }
2453
2454 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2455 {
2456     VM* vm = &exec->vm();
2457     NativeCallFrameTracer tracer(vm, exec);
2458
2459     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2460 }
2461
2462 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2463 {
2464     VM* vm = &exec->vm();
2465     NativeCallFrameTracer tracer(vm, exec);
2466
2467     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2468 }
2469
2470 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2471 {
2472     VM* vm = &exec->vm();
2473     NativeCallFrameTracer tracer(vm, exec);
2474
2475     auto nonOptimizeVariant = operationValueMulNoOptimize;
2476     if (ArithProfile* arithProfile = mulIC->arithProfile())
2477         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2478     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2479
2480 #if ENABLE(MATH_IC_STATS)
2481     exec->codeBlock()->dumpMathICStats();
2482 #endif
2483
2484     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2485 }
2486
2487 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2488 {
2489     VM* vm = &exec->vm();
2490     NativeCallFrameTracer tracer(vm, exec);
2491
2492     ASSERT(arithProfile);
2493     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2494 }
2495
2496 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2497 {
2498     VM* vm = &exec->vm();
2499     NativeCallFrameTracer tracer(vm, exec);
2500
2501     ArithProfile* arithProfile = mulIC->arithProfile();
2502     ASSERT(arithProfile);
2503     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2504     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2505     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2506
2507 #if ENABLE(MATH_IC_STATS)
2508     exec->codeBlock()->dumpMathICStats();
2509 #endif
2510
2511     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2512 }
2513
2514 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2515 {
2516     VM* vm = &exec->vm();
2517     NativeCallFrameTracer tracer(vm, exec);
2518
2519     ArithProfile* arithProfile = mulIC->arithProfile();
2520     ASSERT(arithProfile);
2521     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2522 }
2523
2524 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2525 {
2526     VM& vm = exec->vm();
2527     auto scope = DECLARE_THROW_SCOPE(vm);
2528     NativeCallFrameTracer tracer(&vm, exec);
2529     
2530     JSValue operand = JSValue::decode(encodedOperand);
2531     double number = operand.toNumber(exec);
2532     if (UNLIKELY(scope.exception()))
2533         return JSValue::encode(JSValue());
2534     return JSValue::encode(jsNumber(-number));
2535 }
2536
2537 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2538 {
2539     VM& vm = exec->vm();
2540     auto scope = DECLARE_THROW_SCOPE(vm);
2541     NativeCallFrameTracer tracer(&vm, exec);
2542
2543     JSValue operand = JSValue::decode(encodedOperand);
2544     arithProfile.observeLHS(operand);
2545     double number = operand.toNumber(exec);
2546     if (UNLIKELY(scope.exception()))
2547         return JSValue::encode(JSValue());
2548
2549     JSValue result = jsNumber(-number);
2550     arithProfile.observeResult(result);
2551     return JSValue::encode(result);
2552 }
2553
2554 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2555 {
2556     return unprofiledNegate(exec, operand);
2557 }
2558
2559 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2560 {
2561     ASSERT(arithProfile);
2562     return profiledNegate(exec, operand, *arithProfile);
2563 }
2564
2565 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2566 {
2567     VM& vm = exec->vm();
2568     auto scope = DECLARE_THROW_SCOPE(vm);
2569     NativeCallFrameTracer tracer(&vm, exec);
2570     
2571     JSValue operand = JSValue::decode(encodedOperand);
2572
2573     ArithProfile* arithProfile = negIC->arithProfile();
2574     ASSERT(arithProfile);
2575     arithProfile->observeLHS(operand);
2576     negIC->generateOutOfLine(vm, exec->codeBlock(), operationArithNegateProfiled);
2577
2578 #if ENABLE(MATH_IC_STATS)
2579     exec->codeBlock()->dumpMathICStats();
2580 #endif
2581     
2582     double number = operand.toNumber(exec);
2583     if (UNLIKELY(scope.exception()))
2584         return JSValue::encode(JSValue());
2585     JSValue result = jsNumber(-number);
2586     arithProfile->observeResult(result);
2587     return JSValue::encode(result);
2588 }
2589
2590 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2591 {
2592     VM& vm = exec->vm();
2593     auto scope = DECLARE_THROW_SCOPE(vm);
2594     NativeCallFrameTracer tracer(&vm, exec);
2595
2596     JSValue operand = JSValue::decode(encodedOperand);
2597
2598     if (ArithProfile* arithProfile = negIC->arithProfile())
2599         arithProfile->observeLHS(operand);
2600     negIC->generateOutOfLine(vm, exec->codeBlock(), operationArithNegate);
2601
2602 #if ENABLE(MATH_IC_STATS)
2603     exec->codeBlock()->dumpMathICStats();
2604 #endif
2605
2606     double number = operand.toNumber(exec);
2607     if (UNLIKELY(scope.exception()))
2608         return JSValue::encode(JSValue());
2609     return JSValue::encode(jsNumber(-number));
2610 }
2611
2612 ALWAYS_INLINE static EncodedJSValue unprofiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2613 {
2614     auto scope = DECLARE_THROW_SCOPE(vm);
2615     JSValue op1 = JSValue::decode(encodedOp1);
2616     JSValue op2 = JSValue::decode(encodedOp2);
2617
2618     double a = op1.toNumber(exec);
2619     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2620     double b = op2.toNumber(exec);
2621     return JSValue::encode(jsNumber(a - b));
2622 }
2623
2624 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2625 {
2626     auto scope = DECLARE_THROW_SCOPE(vm);
2627     JSValue op1 = JSValue::decode(encodedOp1);
2628     JSValue op2 = JSValue::decode(encodedOp2);
2629
2630     if (shouldObserveLHSAndRHSTypes)
2631         arithProfile.observeLHSAndRHS(op1, op2);
2632
2633     double a = op1.toNumber(exec);
2634     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2635     double b = op2.toNumber(exec);
2636     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2637     
2638     JSValue result = jsNumber(a - b);
2639     arithProfile.observeResult(result);
2640     return JSValue::encode(result);
2641 }
2642
2643 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2644 {
2645     VM* vm = &exec->vm();
2646     NativeCallFrameTracer tracer(vm, exec);
2647     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2648 }
2649
2650 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2651 {
2652     ASSERT(arithProfile);
2653
2654     VM* vm = &exec->vm();
2655     NativeCallFrameTracer tracer(vm, exec);
2656
2657     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2658 }
2659
2660 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2661 {
2662     VM* vm = &exec->vm();
2663     NativeCallFrameTracer tracer(vm, exec);
2664
2665     auto nonOptimizeVariant = operationValueSubNoOptimize;
2666     if (ArithProfile* arithProfile = subIC->arithProfile())
2667         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2668     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2669
2670 #if ENABLE(MATH_IC_STATS)
2671     exec->codeBlock()->dumpMathICStats();
2672 #endif
2673
2674     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2675 }
2676
2677 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2678 {
2679     VM* vm = &exec->vm();
2680     NativeCallFrameTracer tracer(vm, exec);
2681
2682     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2683 }
2684
2685 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2686 {
2687     VM* vm = &exec->vm();
2688     NativeCallFrameTracer tracer(vm, exec);
2689
2690     ArithProfile* arithProfile = subIC->arithProfile();
2691     ASSERT(arithProfile);
2692     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2693     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2694     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2695
2696 #if ENABLE(MATH_IC_STATS)
2697     exec->codeBlock()->dumpMathICStats();
2698 #endif
2699
2700     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2701 }
2702
2703 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2704 {
2705     VM* vm = &exec->vm();
2706     NativeCallFrameTracer tracer(vm, exec);
2707
2708     ArithProfile* arithProfile = subIC->arithProfile();
2709     ASSERT(arithProfile);
2710     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2711 }
2712
2713 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2714 {
2715     VM& vm = exec->vm();
2716     NativeCallFrameTracer tracer(&vm, exec);
2717     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2718 }
2719
2720 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2721 {
2722     VM& vm = exec->vm();
2723     NativeCallFrameTracer tracer(&vm, exec);
2724     vm.shadowChicken().update(vm, exec);
2725 }
2726
2727 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2728 {
2729     VM& vm = exec->vm();
2730     NativeCallFrameTracer tracer(&vm, exec);
2731     auto scope = DECLARE_THROW_SCOPE(vm);
2732     RELEASE_ASSERT(!!scope.exception());
2733
2734     if (isTerminatedExecutionException(scope.exception())) {
2735         genericUnwind(&vm, exec);
2736         return 1;
2737     }
2738     return 0;
2739 }
2740
2741 } // extern "C"
2742
2743 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2744 // definition of getHostCallReturnValue() below because the Windows build
2745 // requires it.
2746 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2747 {
2748     if (!exec)
2749         return JSValue::encode(JSValue());
2750     return JSValue::encode(exec->vm().hostCallReturnValue);
2751 }
2752
2753 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2754 asm (
2755 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2756 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2757 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2758     "lea -8(%rsp), %rdi\n"
2759     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2760 );
2761
2762 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2763 asm (
2764 ".text" "\n" \
2765 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2766 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2767 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2768     "push %ebp\n"
2769     "mov %esp, %eax\n"
2770     "leal -4(%esp), %esp\n"
2771     "push %eax\n"
2772     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2773     "leal 8(%esp), %esp\n"
2774     "pop %ebp\n"
2775     "ret\n"
2776 );
2777
2778 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2779 asm (
2780 ".text" "\n"
2781 ".align 2" "\n"
2782 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2783 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2784 ".thumb" "\n"
2785 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2786 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2787     "sub r0, sp, #8" "\n"
2788     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2789 );
2790
2791 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2792 asm (
2793 ".text" "\n"
2794 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2795 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2796 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2797 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2798     "sub r0, sp, #8" "\n"
2799     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2800 );
2801
2802 #elif CPU(ARM64)
2803 asm (
2804 ".text" "\n"
2805 ".align 2" "\n"
2806 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2807 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2808 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2809      "sub x0, sp, #16" "\n"
2810      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2811 );
2812
2813 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2814
2815 #if WTF_MIPS_PIC
2816 #define LOAD_FUNCTION_TO_T9(function) \
2817         ".set noreorder" "\n" \
2818         ".cpload $25" "\n" \
2819         ".set reorder" "\n" \
2820         "la $t9, " LOCAL_REFERENCE(function) "\n"
2821 #else
2822 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2823 #endif
2824
2825 asm (
2826 ".text" "\n"
2827 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2828 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2829 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2830     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2831     "addi $a0, $sp, -8" "\n"
2832     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2833 );
2834
2835 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2836
2837 #define SH4_SCRATCH_REGISTER "r11"
2838
2839 asm (
2840 ".text" "\n"
2841 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2842 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2843 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2844     "mov r15, r4" "\n"
2845     "add -8, r4" "\n"
2846     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2847     "braf " SH4_SCRATCH_REGISTER "\n"
2848     "nop" "\n"
2849     "1: .balign 4" "\n"
2850     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2851 );
2852
2853 #elif COMPILER(MSVC) && CPU(X86)
2854 extern "C" {
2855     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2856     {
2857         __asm lea eax, [esp - 4]
2858         __asm mov [esp + 4], eax;
2859         __asm jmp getHostCallReturnValueWithExecState
2860     }
2861 }
2862 #endif
2863
2864 } // namespace JSC
2865
2866 #endif // ENABLE(JIT)