We should have a way of profiling when a get_by_id is pure and to emit a PureGetById...
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "ExceptionFuzz.h"
44 #include "GetterSetter.h"
45 #include "HostCallReturnValue.h"
46 #include "ICStats.h"
47 #include "Interpreter.h"
48 #include "JIT.h"
49 #include "JITExceptions.h"
50 #include "JITToDFGDeferredCompilationCallback.h"
51 #include "JSAsyncFunction.h"
52 #include "JSCInlines.h"
53 #include "JSGeneratorFunction.h"
54 #include "JSGlobalObjectFunctions.h"
55 #include "JSLexicalEnvironment.h"
56 #include "JSPropertyNameEnumerator.h"
57 #include "ObjectConstructor.h"
58 #include "PolymorphicAccess.h"
59 #include "PropertyName.h"
60 #include "RegExpObject.h"
61 #include "Repatch.h"
62 #include "ScopedArguments.h"
63 #include "ShadowChicken.h"
64 #include "StructureStubInfo.h"
65 #include "SuperSampler.h"
66 #include "TestRunnerUtils.h"
67 #include "TypeProfilerLog.h"
68 #include "VMInlines.h"
69 #include <wtf/InlineASM.h>
70
71 namespace JSC {
72
73 ALWAYS_INLINE static EncodedJSValue pureGetByIdCommon(VM& vm, ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid, const std::function<void (const PropertySlot&, const Identifier&)>& function = [] (const PropertySlot&, const Identifier&) { })
74 {
75     Identifier ident = Identifier::fromUid(&vm, uid);
76     JSValue baseValue = JSValue::decode(base);
77
78     ASSERT(JITCode::isOptimizingJIT(exec->codeBlock()->jitType()));
79
80     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
81     return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool, PropertySlot&) -> JSValue {
82         bool willDoSideEffects = !(slot.isValue() || slot.isUnset()) || slot.isTaintedByOpaqueObject();
83         if (UNLIKELY(willDoSideEffects)) {
84             {
85                 CodeOrigin codeOrigin = exec->codeOrigin();
86                 CodeBlock* currentBaseline = baselineCodeBlockForOriginAndBaselineCodeBlock(codeOrigin, exec->codeBlock()->alternative());
87                 CodeOrigin originBytecodeIndex = CodeOrigin(codeOrigin.bytecodeIndex); // Since we're searching in the baseline, we just care about bytecode index.
88                 ConcurrentJITLocker locker(currentBaseline->m_lock);
89                 if (StructureStubInfo* stub = currentBaseline->findStubInfo(originBytecodeIndex))
90                     stub->didSideEffects = true;
91             }
92
93             exec->codeBlock()->jettison(Profiler::JettisonDueToPureGetByIdEffects);
94             return baseValue.get(exec, uid);
95         }
96
97         function(slot, ident);
98         return slot.isValue() ? slot.getValue(exec, ident) : jsUndefined();
99     }));
100 }
101
102 extern "C" {
103
104 #if COMPILER(MSVC)
105 void * _ReturnAddress(void);
106 #pragma intrinsic(_ReturnAddress)
107
108 #define OUR_RETURN_ADDRESS _ReturnAddress()
109 #else
110 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
111 #endif
112
113 #if ENABLE(OPCODE_SAMPLING)
114 #define CTI_SAMPLER vm->interpreter->sampler()
115 #else
116 #define CTI_SAMPLER 0
117 #endif
118
119
120 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
121 {
122     // We pass in our own code block, because the callframe hasn't been populated.
123     VM* vm = codeBlock->vm();
124     auto scope = DECLARE_THROW_SCOPE(*vm);
125
126     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
127     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
128     if (!callerFrame) {
129         callerFrame = exec;
130         vmEntryFrame = vm->topVMEntryFrame;
131     }
132
133     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
134     throwStackOverflowError(callerFrame, scope);
135 }
136
137 #if ENABLE(WEBASSEMBLY)
138 void JIT_OPERATION operationThrowDivideError(ExecState* exec)
139 {
140     VM* vm = &exec->vm();
141     auto scope = DECLARE_THROW_SCOPE(*vm);
142
143     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
144     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
145
146     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
147     ErrorHandlingScope errorScope(*vm);
148     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Division by zero or division overflow.")));
149 }
150
151 void JIT_OPERATION operationThrowOutOfBoundsAccessError(ExecState* exec)
152 {
153     VM* vm = &exec->vm();
154     auto scope = DECLARE_THROW_SCOPE(*vm);
155
156     VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
157     CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
158
159     NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
160     ErrorHandlingScope errorScope(*vm);
161     throwException(callerFrame, scope, createError(callerFrame, ASCIILiteral("Out-of-bounds access.")));
162 }
163 #endif
164
165 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
166 {
167     VM* vm = &exec->vm();
168     auto scope = DECLARE_THROW_SCOPE(*vm);
169
170     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
171     if (missingArgCount < 0) {
172         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
173         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
174         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
175         throwStackOverflowError(callerFrame, scope);
176     }
177
178     return missingArgCount;
179 }
180
181 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
182 {
183     VM* vm = &exec->vm();
184     auto scope = DECLARE_THROW_SCOPE(*vm);
185
186     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
187     if (missingArgCount < 0) {
188         VMEntryFrame* vmEntryFrame = vm->topVMEntryFrame;
189         CallFrame* callerFrame = exec->callerFrame(vmEntryFrame);
190         NativeCallFrameTracerWithRestore tracer(vm, vmEntryFrame, callerFrame);
191         throwStackOverflowError(callerFrame, scope);
192     }
193
194     return missingArgCount;
195 }
196
197 EncodedJSValue JIT_OPERATION operationPureGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
198 {
199     VM* vm = &exec->vm();
200     NativeCallFrameTracer tracer(vm, exec);
201
202     return pureGetByIdCommon(*vm, exec, base, uid);
203 }
204
205 EncodedJSValue JIT_OPERATION operationPureGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
206 {
207     VM* vm = &exec->vm();
208     NativeCallFrameTracer tracer(vm, exec);
209
210     stubInfo->tookSlowPath = true;
211
212     return pureGetByIdCommon(*vm, exec, base, uid);
213 }
214
215 EncodedJSValue JIT_OPERATION operationPureGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
216 {
217     VM* vm = &exec->vm();
218     NativeCallFrameTracer tracer(vm, exec);
219
220     return pureGetByIdCommon(*vm, exec, base, uid, 
221         [&] (const PropertySlot& slot, const Identifier& ident) {
222             ASSERT((slot.isValue() || slot.isUnset()) && !slot.isTaintedByOpaqueObject());
223             JSValue baseValue = JSValue::decode(base);
224             if (stubInfo->considerCaching(baseValue.structureOrNull()))
225                 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Pure);
226         });
227 }
228
229 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
230 {
231     VM* vm = &exec->vm();
232     NativeCallFrameTracer tracer(vm, exec);
233     Identifier ident = Identifier::fromUid(vm, uid);
234     stubInfo->tookSlowPath = true;
235
236     JSValue baseValue = JSValue::decode(base);
237     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
238     baseValue.getPropertySlot(exec, ident, slot);
239
240     return JSValue::encode(slot.getPureResult());
241 }
242
243 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
244 {
245     VM* vm = &exec->vm();
246     NativeCallFrameTracer tracer(vm, exec);
247     Identifier ident = Identifier::fromUid(vm, uid);
248
249     JSValue baseValue = JSValue::decode(base);
250     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
251     baseValue.getPropertySlot(exec, ident, slot);
252
253     return JSValue::encode(slot.getPureResult());
254 }
255
256 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
257 {
258     VM* vm = &exec->vm();
259     NativeCallFrameTracer tracer(vm, exec);
260     auto scope = DECLARE_THROW_SCOPE(*vm);
261     Identifier ident = Identifier::fromUid(vm, uid);
262
263     JSValue baseValue = JSValue::decode(base);
264     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
265
266     baseValue.getPropertySlot(exec, ident, slot);
267     RETURN_IF_EXCEPTION(scope, encodedJSValue());
268
269     if (stubInfo->considerCaching(baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
270         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
271
272     return JSValue::encode(slot.getPureResult());
273 }
274
275 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
276 {
277     SuperSamplerScope superSamplerScope(false);
278     
279     VM* vm = &exec->vm();
280     NativeCallFrameTracer tracer(vm, exec);
281     
282     stubInfo->tookSlowPath = true;
283     
284     JSValue baseValue = JSValue::decode(base);
285     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
286     Identifier ident = Identifier::fromUid(vm, uid);
287     
288     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(), ident));
289     JSValue result = baseValue.get(exec, ident, slot);
290     bool willDoSideEffects = !(slot.isValue() || slot.isUnset()) || slot.isTaintedByOpaqueObject();
291     stubInfo->didSideEffects |= willDoSideEffects;
292     return JSValue::encode(result);
293 }
294
295 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
296 {
297     SuperSamplerScope superSamplerScope(false);
298     
299     VM* vm = &exec->vm();
300     NativeCallFrameTracer tracer(vm, exec);
301     
302     JSValue baseValue = JSValue::decode(base);
303     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
304     Identifier ident = Identifier::fromUid(vm, uid);
305     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(), ident));
306     return JSValue::encode(baseValue.get(exec, ident, slot));
307 }
308
309 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
310 {
311     SuperSamplerScope superSamplerScope(false);
312     
313     VM* vm = &exec->vm();
314     NativeCallFrameTracer tracer(vm, exec);
315     Identifier ident = Identifier::fromUid(vm, uid);
316
317     JSValue baseValue = JSValue::decode(base);
318     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(), ident));
319
320     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
321         bool willDoSideEffects = !(slot.isValue() || slot.isUnset()) || slot.isTaintedByOpaqueObject();
322         stubInfo->didSideEffects |= willDoSideEffects;
323
324         if (stubInfo->considerCaching(baseValue.structureOrNull()))
325             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
326         return found ? slot.getValue(exec, ident) : jsUndefined();
327     }));
328 }
329
330 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
331 {
332     SuperSamplerScope superSamplerScope(false);
333     
334     VM* vm = &exec->vm();
335     NativeCallFrameTracer tracer(vm, exec);
336     auto scope = DECLARE_THROW_SCOPE(*vm);
337
338     if (!base->isObject()) {
339         throwException(exec, scope, createInvalidInParameterError(exec, base));
340         return JSValue::encode(jsUndefined());
341     }
342     
343     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
344
345     Identifier ident = Identifier::fromUid(vm, key);
346     LOG_IC((ICEvent::OperationInOptimize, base->classInfo(), ident));
347     PropertySlot slot(base, PropertySlot::InternalMethodType::HasProperty);
348     bool result = asObject(base)->getPropertySlot(exec, ident, slot);
349     RETURN_IF_EXCEPTION(scope, encodedJSValue());
350     
351     RELEASE_ASSERT(accessType == stubInfo->accessType);
352     
353     if (stubInfo->considerCaching(asObject(base)->structure()))
354         repatchIn(exec, base, ident, result, slot, *stubInfo);
355     
356     return JSValue::encode(jsBoolean(result));
357 }
358
359 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, UniquedStringImpl* key)
360 {
361     SuperSamplerScope superSamplerScope(false);
362     
363     VM* vm = &exec->vm();
364     NativeCallFrameTracer tracer(vm, exec);
365     auto scope = DECLARE_THROW_SCOPE(*vm);
366
367     stubInfo->tookSlowPath = true;
368
369     if (!base->isObject()) {
370         throwException(exec, scope, createInvalidInParameterError(exec, base));
371         return JSValue::encode(jsUndefined());
372     }
373
374     Identifier ident = Identifier::fromUid(vm, key);
375     LOG_IC((ICEvent::OperationIn, base->classInfo(), ident));
376     return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
377 }
378
379 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
380 {
381     SuperSamplerScope superSamplerScope(false);
382     
383     VM* vm = &exec->vm();
384     NativeCallFrameTracer tracer(vm, exec);
385
386     return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
387 }
388
389 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
390 {
391     SuperSamplerScope superSamplerScope(false);
392     
393     VM* vm = &exec->vm();
394     NativeCallFrameTracer tracer(vm, exec);
395     
396     stubInfo->tookSlowPath = true;
397     
398     JSValue baseValue = JSValue::decode(encodedBase);
399     Identifier ident = Identifier::fromUid(vm, uid);
400     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(), ident));
401
402     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
403     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
404 }
405
406 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
407 {
408     SuperSamplerScope superSamplerScope(false);
409     
410     VM* vm = &exec->vm();
411     NativeCallFrameTracer tracer(vm, exec);
412     
413     stubInfo->tookSlowPath = true;
414     
415     JSValue baseValue = JSValue::decode(encodedBase);
416     Identifier ident = Identifier::fromUid(vm, uid);
417     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(), ident));
418     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
419     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
420 }
421
422 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
423 {
424     SuperSamplerScope superSamplerScope(false);
425     
426     VM* vm = &exec->vm();
427     NativeCallFrameTracer tracer(vm, exec);
428     
429     stubInfo->tookSlowPath = true;
430     
431     JSValue baseValue = JSValue::decode(encodedBase);
432     Identifier ident = Identifier::fromUid(vm, uid);
433     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(), ident));
434     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
435     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
436 }
437
438 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
439 {
440     SuperSamplerScope superSamplerScope(false);
441     
442     VM* vm = &exec->vm();
443     NativeCallFrameTracer tracer(vm, exec);
444     
445     stubInfo->tookSlowPath = true;
446     
447     JSValue baseValue = JSValue::decode(encodedBase);
448     Identifier ident = Identifier::fromUid(vm, uid);
449     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(), ident));
450     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
451     asObject(baseValue)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
452 }
453
454 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
455 {
456     SuperSamplerScope superSamplerScope(false);
457     
458     VM* vm = &exec->vm();
459     NativeCallFrameTracer tracer(vm, exec);
460     auto scope = DECLARE_THROW_SCOPE(*vm);
461
462     Identifier ident = Identifier::fromUid(vm, uid);
463     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
464
465     JSValue value = JSValue::decode(encodedValue);
466     JSValue baseValue = JSValue::decode(encodedBase);
467     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(), ident));
468     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
469
470     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
471     baseValue.putInline(exec, ident, value, slot);
472     RETURN_IF_EXCEPTION(scope, void());
473
474     if (accessType != static_cast<AccessType>(stubInfo->accessType))
475         return;
476     
477     if (stubInfo->considerCaching(structure))
478         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
479 }
480
481 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
482 {
483     SuperSamplerScope superSamplerScope(false);
484     
485     VM* vm = &exec->vm();
486     NativeCallFrameTracer tracer(vm, exec);
487     auto scope = DECLARE_THROW_SCOPE(*vm);
488
489     Identifier ident = Identifier::fromUid(vm, uid);
490     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
491
492     JSValue value = JSValue::decode(encodedValue);
493     JSValue baseValue = JSValue::decode(encodedBase);
494     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(), ident));
495     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
496
497     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
498     baseValue.putInline(exec, ident, value, slot);
499     RETURN_IF_EXCEPTION(scope, void());
500
501     if (accessType != static_cast<AccessType>(stubInfo->accessType))
502         return;
503     
504     if (stubInfo->considerCaching(structure))
505         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
506 }
507
508 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
509 {
510     SuperSamplerScope superSamplerScope(false);
511     
512     VM* vm = &exec->vm();
513     NativeCallFrameTracer tracer(vm, exec);
514     
515     Identifier ident = Identifier::fromUid(vm, uid);
516     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
517
518     JSValue value = JSValue::decode(encodedValue);
519     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
520     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(), ident));
521     PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
522     
523     Structure* structure = baseObject->structure(*vm);
524     baseObject->putDirect(exec->vm(), ident, value, slot);
525     
526     if (accessType != static_cast<AccessType>(stubInfo->accessType))
527         return;
528     
529     if (stubInfo->considerCaching(structure))
530         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
531 }
532
533 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
534 {
535     SuperSamplerScope superSamplerScope(false);
536     
537     VM* vm = &exec->vm();
538     NativeCallFrameTracer tracer(vm, exec);
539     
540     Identifier ident = Identifier::fromUid(vm, uid);
541     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
542
543     JSValue value = JSValue::decode(encodedValue);
544     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
545     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(), ident));
546     PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
547     
548     Structure* structure = baseObject->structure(*vm);
549     baseObject->putDirect(exec->vm(), ident, value, slot);
550     
551     if (accessType != static_cast<AccessType>(stubInfo->accessType))
552         return;
553     
554     if (stubInfo->considerCaching(structure))
555         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
556 }
557
558 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
559 {
560     return value.isString() || value.isSymbol();
561 }
562
563 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
564 {
565     VM& vm = callFrame->vm();
566     auto scope = DECLARE_THROW_SCOPE(vm);
567     if (LIKELY(subscript.isUInt32())) {
568         byValInfo->tookSlowPath = true;
569         uint32_t i = subscript.asUInt32();
570         if (baseValue.isObject()) {
571             JSObject* object = asObject(baseValue);
572             if (object->canSetIndexQuickly(i))
573                 object->setIndexQuickly(callFrame->vm(), i, value);
574             else {
575                 // FIXME: This will make us think that in-bounds typed array accesses are actually
576                 // out-of-bounds.
577                 // https://bugs.webkit.org/show_bug.cgi?id=149886
578                 byValInfo->arrayProfile->setOutOfBounds();
579                 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
580             }
581         } else
582             baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
583         return;
584     }
585
586     auto property = subscript.toPropertyKey(callFrame);
587     // Don't put to an object if toString threw an exception.
588     RETURN_IF_EXCEPTION(scope, void());
589
590     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
591         byValInfo->tookSlowPath = true;
592
593     scope.release();
594     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
595     baseValue.putInline(callFrame, property, value, slot);
596 }
597
598 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
599 {
600     VM& vm = callFrame->vm();
601     auto scope = DECLARE_THROW_SCOPE(vm);
602     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
603     if (LIKELY(subscript.isUInt32())) {
604         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
605         byValInfo->tookSlowPath = true;
606         uint32_t index = subscript.asUInt32();
607         ASSERT(isIndex(index));
608         if (baseObject->canSetIndexQuicklyForPutDirect(index)) {
609             baseObject->setIndexQuickly(callFrame->vm(), index, value);
610             return;
611         }
612
613         // FIXME: This will make us think that in-bounds typed array accesses are actually
614         // out-of-bounds.
615         // https://bugs.webkit.org/show_bug.cgi?id=149886
616         byValInfo->arrayProfile->setOutOfBounds();
617         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
618         return;
619     }
620
621     if (subscript.isDouble()) {
622         double subscriptAsDouble = subscript.asDouble();
623         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
624         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
625             byValInfo->tookSlowPath = true;
626             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
627             return;
628         }
629     }
630
631     // Don't put to an object if toString threw an exception.
632     auto property = subscript.toPropertyKey(callFrame);
633     RETURN_IF_EXCEPTION(scope, void());
634
635     if (Optional<uint32_t> index = parseIndex(property)) {
636         byValInfo->tookSlowPath = true;
637         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
638         return;
639     }
640
641     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
642         byValInfo->tookSlowPath = true;
643
644     PutPropertySlot slot(baseObject, isStrictMode);
645     baseObject->putDirect(callFrame->vm(), property, value, slot);
646 }
647
648 enum class OptimizationResult {
649     NotOptimized,
650     SeenOnce,
651     Optimized,
652     GiveUp,
653 };
654
655 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
656 {
657     // See if it's worth optimizing at all.
658     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
659
660     VM& vm = exec->vm();
661
662     if (baseValue.isObject() && subscript.isInt32()) {
663         JSObject* object = asObject(baseValue);
664
665         ASSERT(exec->bytecodeOffset());
666         ASSERT(!byValInfo->stubRoutine);
667
668         Structure* structure = object->structure(vm);
669         if (hasOptimizableIndexing(structure)) {
670             // Attempt to optimize.
671             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
672             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
673                 CodeBlock* codeBlock = exec->codeBlock();
674                 ConcurrentJITLocker locker(codeBlock->m_lock);
675                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
676
677                 JIT::compilePutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
678                 optimizationResult = OptimizationResult::Optimized;
679             }
680         }
681
682         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
683         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
684             optimizationResult = OptimizationResult::GiveUp;
685     }
686
687     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
688         const Identifier propertyName = subscript.toPropertyKey(exec);
689         if (subscript.isSymbol() || !parseIndex(propertyName)) {
690             ASSERT(exec->bytecodeOffset());
691             ASSERT(!byValInfo->stubRoutine);
692             if (byValInfo->seen) {
693                 if (byValInfo->cachedId == propertyName) {
694                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
695                     optimizationResult = OptimizationResult::Optimized;
696                 } else {
697                     // Seem like a generic property access site.
698                     optimizationResult = OptimizationResult::GiveUp;
699                 }
700             } else {
701                 CodeBlock* codeBlock = exec->codeBlock();
702                 ConcurrentJITLocker locker(codeBlock->m_lock);
703                 byValInfo->seen = true;
704                 byValInfo->cachedId = propertyName;
705                 if (subscript.isSymbol())
706                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
707                 optimizationResult = OptimizationResult::SeenOnce;
708             }
709         }
710     }
711
712     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
713         // If we take slow path more than 10 times without patching then make sure we
714         // never make that mistake again. For cases where we see non-index-intercepting
715         // objects, this gives 10 iterations worth of opportunity for us to observe
716         // that the put_by_val may be polymorphic. We count up slowPathCount even if
717         // the result is GiveUp.
718         if (++byValInfo->slowPathCount >= 10)
719             optimizationResult = OptimizationResult::GiveUp;
720     }
721
722     return optimizationResult;
723 }
724
725 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
726 {
727     VM& vm = exec->vm();
728     NativeCallFrameTracer tracer(&vm, exec);
729
730     JSValue baseValue = JSValue::decode(encodedBaseValue);
731     JSValue subscript = JSValue::decode(encodedSubscript);
732     JSValue value = JSValue::decode(encodedValue);
733     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
734         // Don't ever try to optimize.
735         byValInfo->tookSlowPath = true;
736         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
737     }
738     putByVal(exec, baseValue, subscript, value, byValInfo);
739 }
740
741 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
742 {
743     // See if it's worth optimizing at all.
744     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
745
746     VM& vm = exec->vm();
747
748     if (subscript.isInt32()) {
749         ASSERT(exec->bytecodeOffset());
750         ASSERT(!byValInfo->stubRoutine);
751
752         Structure* structure = object->structure(vm);
753         if (hasOptimizableIndexing(structure)) {
754             // Attempt to optimize.
755             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
756             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
757                 CodeBlock* codeBlock = exec->codeBlock();
758                 ConcurrentJITLocker locker(codeBlock->m_lock);
759                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
760
761                 JIT::compileDirectPutByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
762                 optimizationResult = OptimizationResult::Optimized;
763             }
764         }
765
766         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
767         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
768             optimizationResult = OptimizationResult::GiveUp;
769     } else if (isStringOrSymbol(subscript)) {
770         const Identifier propertyName = subscript.toPropertyKey(exec);
771         if (subscript.isSymbol() || !parseIndex(propertyName)) {
772             ASSERT(exec->bytecodeOffset());
773             ASSERT(!byValInfo->stubRoutine);
774             if (byValInfo->seen) {
775                 if (byValInfo->cachedId == propertyName) {
776                     JIT::compilePutByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
777                     optimizationResult = OptimizationResult::Optimized;
778                 } else {
779                     // Seem like a generic property access site.
780                     optimizationResult = OptimizationResult::GiveUp;
781                 }
782             } else {
783                 CodeBlock* codeBlock = exec->codeBlock();
784                 ConcurrentJITLocker locker(codeBlock->m_lock);
785                 byValInfo->seen = true;
786                 byValInfo->cachedId = propertyName;
787                 if (subscript.isSymbol())
788                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
789                 optimizationResult = OptimizationResult::SeenOnce;
790             }
791         }
792     }
793
794     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
795         // If we take slow path more than 10 times without patching then make sure we
796         // never make that mistake again. For cases where we see non-index-intercepting
797         // objects, this gives 10 iterations worth of opportunity for us to observe
798         // that the get_by_val may be polymorphic. We count up slowPathCount even if
799         // the result is GiveUp.
800         if (++byValInfo->slowPathCount >= 10)
801             optimizationResult = OptimizationResult::GiveUp;
802     }
803
804     return optimizationResult;
805 }
806
807 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
808 {
809     VM& vm = exec->vm();
810     NativeCallFrameTracer tracer(&vm, exec);
811
812     JSValue baseValue = JSValue::decode(encodedBaseValue);
813     JSValue subscript = JSValue::decode(encodedSubscript);
814     JSValue value = JSValue::decode(encodedValue);
815     RELEASE_ASSERT(baseValue.isObject());
816     JSObject* object = asObject(baseValue);
817     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
818         // Don't ever try to optimize.
819         byValInfo->tookSlowPath = true;
820         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
821     }
822
823     directPutByVal(exec, object, subscript, value, byValInfo);
824 }
825
826 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
827 {
828     VM& vm = exec->vm();
829     NativeCallFrameTracer tracer(&vm, exec);
830     
831     JSValue baseValue = JSValue::decode(encodedBaseValue);
832     JSValue subscript = JSValue::decode(encodedSubscript);
833     JSValue value = JSValue::decode(encodedValue);
834
835     putByVal(exec, baseValue, subscript, value, byValInfo);
836 }
837
838
839 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
840 {
841     VM& vm = exec->vm();
842     NativeCallFrameTracer tracer(&vm, exec);
843     
844     JSValue baseValue = JSValue::decode(encodedBaseValue);
845     JSValue subscript = JSValue::decode(encodedSubscript);
846     JSValue value = JSValue::decode(encodedValue);
847     RELEASE_ASSERT(baseValue.isObject());
848     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
849 }
850
851 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
852 {
853     VM* vm = &exec->vm();
854     auto scope = DECLARE_THROW_SCOPE(*vm);
855
856     execCallee->setCodeBlock(0);
857     
858     if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
859         return JSValue::encode(JSValue());
860
861     JSValue result = eval(execCallee);
862     RETURN_IF_EXCEPTION(scope, encodedJSValue());
863     
864     return JSValue::encode(result);
865 }
866
867 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
868 {
869     ExecState* exec = execCallee->callerFrame();
870     VM* vm = &exec->vm();
871     auto scope = DECLARE_THROW_SCOPE(*vm);
872
873     execCallee->setCodeBlock(0);
874
875     if (callLinkInfo->specializationKind() == CodeForCall) {
876         CallData callData;
877         CallType callType = getCallData(callee, callData);
878     
879         ASSERT(callType != CallType::JS);
880     
881         if (callType == CallType::Host) {
882             NativeCallFrameTracer tracer(vm, execCallee);
883             execCallee->setCallee(asObject(callee));
884             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
885             if (UNLIKELY(scope.exception())) {
886                 return encodeResult(
887                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
888                     reinterpret_cast<void*>(KeepTheFrame));
889             }
890
891             return encodeResult(
892                 bitwise_cast<void*>(getHostCallReturnValue),
893                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
894         }
895     
896         ASSERT(callType == CallType::None);
897         throwException(exec, scope, createNotAFunctionError(exec, callee));
898         return encodeResult(
899             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
900             reinterpret_cast<void*>(KeepTheFrame));
901     }
902
903     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
904     
905     ConstructData constructData;
906     ConstructType constructType = getConstructData(callee, constructData);
907     
908     ASSERT(constructType != ConstructType::JS);
909     
910     if (constructType == ConstructType::Host) {
911         NativeCallFrameTracer tracer(vm, execCallee);
912         execCallee->setCallee(asObject(callee));
913         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
914         if (UNLIKELY(scope.exception())) {
915             return encodeResult(
916                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
917                 reinterpret_cast<void*>(KeepTheFrame));
918         }
919
920         return encodeResult(bitwise_cast<void*>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
921     }
922     
923     ASSERT(constructType == ConstructType::None);
924     throwException(exec, scope, createNotAConstructorError(exec, callee));
925     return encodeResult(
926         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
927         reinterpret_cast<void*>(KeepTheFrame));
928 }
929
930 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
931 {
932     ExecState* exec = execCallee->callerFrame();
933     VM* vm = &exec->vm();
934     auto throwScope = DECLARE_THROW_SCOPE(*vm);
935
936     CodeSpecializationKind kind = callLinkInfo->specializationKind();
937     NativeCallFrameTracer tracer(vm, exec);
938     
939     RELEASE_ASSERT(!callLinkInfo->isDirect());
940     
941     JSValue calleeAsValue = execCallee->calleeAsValue();
942     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
943     if (!calleeAsFunctionCell) {
944         // FIXME: We should cache these kinds of calls. They can be common and currently they are
945         // expensive.
946         // https://bugs.webkit.org/show_bug.cgi?id=144458
947         throwScope.release();
948         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
949     }
950
951     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
952     JSScope* scope = callee->scopeUnchecked();
953     ExecutableBase* executable = callee->executable();
954
955     MacroAssemblerCodePtr codePtr;
956     CodeBlock* codeBlock = 0;
957     if (executable->isHostFunction()) {
958         codePtr = executable->entrypointFor(kind, MustCheckArity);
959 #if ENABLE(WEBASSEMBLY)
960     } else if (executable->isWebAssemblyExecutable()) {
961         WebAssemblyExecutable* webAssemblyExecutable = static_cast<WebAssemblyExecutable*>(executable);
962         codeBlock = webAssemblyExecutable->codeBlockForCall();
963         ASSERT(codeBlock);
964         ArityCheckMode arity;
965         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
966             arity = MustCheckArity;
967         else
968             arity = ArityCheckNotRequired;
969         codePtr = webAssemblyExecutable->entrypointFor(kind, arity);
970 #endif
971     } else {
972         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
973
974         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
975             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
976             return encodeResult(
977                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
978                 reinterpret_cast<void*>(KeepTheFrame));
979         }
980
981         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
982         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
983         ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
984         if (error) {
985             throwException(exec, throwScope, error);
986             return encodeResult(
987                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
988                 reinterpret_cast<void*>(KeepTheFrame));
989         }
990         codeBlock = *codeBlockSlot;
991         ArityCheckMode arity;
992         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
993             arity = MustCheckArity;
994         else
995             arity = ArityCheckNotRequired;
996         codePtr = functionExecutable->entrypointFor(kind, arity);
997     }
998     if (!callLinkInfo->seenOnce())
999         callLinkInfo->setSeen();
1000     else
1001         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
1002     
1003     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1004 }
1005
1006 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1007 {
1008     VM* vm = &exec->vm();
1009     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1010
1011     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1012     NativeCallFrameTracer tracer(vm, exec);
1013     
1014     RELEASE_ASSERT(callLinkInfo->isDirect());
1015     
1016     // This would happen if the executable died during GC but the CodeBlock did not die. That should
1017     // not happen because the CodeBlock should have a weak reference to any executable it uses for
1018     // this purpose.
1019     RELEASE_ASSERT(callLinkInfo->executable());
1020     
1021     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1022     // linked.
1023     RELEASE_ASSERT(!callLinkInfo->codeBlock());
1024     
1025     // We just don't support this yet.
1026     RELEASE_ASSERT(!callLinkInfo->isVarargs());
1027     
1028     ExecutableBase* executable = callLinkInfo->executable();
1029     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1030
1031     JSScope* scope = callee->scopeUnchecked();
1032
1033     MacroAssemblerCodePtr codePtr;
1034     CodeBlock* codeBlock = nullptr;
1035     if (executable->isHostFunction())
1036         codePtr = executable->entrypointFor(kind, MustCheckArity);
1037     else {
1038         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1039
1040         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1041         
1042         JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1043         ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
1044         if (error) {
1045             throwException(exec, throwScope, error);
1046             return;
1047         }
1048         ArityCheckMode arity;
1049         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1050         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1051             arity = MustCheckArity;
1052         else
1053             arity = ArityCheckNotRequired;
1054         codePtr = functionExecutable->entrypointFor(kind, arity);
1055     }
1056     
1057     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1058 }
1059
1060 inline SlowPathReturnType virtualForWithFunction(
1061     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1062 {
1063     ExecState* exec = execCallee->callerFrame();
1064     VM* vm = &exec->vm();
1065     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1066
1067     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1068     NativeCallFrameTracer tracer(vm, exec);
1069
1070     JSValue calleeAsValue = execCallee->calleeAsValue();
1071     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1072     if (UNLIKELY(!calleeAsFunctionCell))
1073         return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
1074     
1075     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1076     JSScope* scope = function->scopeUnchecked();
1077     ExecutableBase* executable = function->executable();
1078     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1079         bool isWebAssemblyExecutable = false;
1080 #if ENABLE(WEBASSEMBLY)
1081         isWebAssemblyExecutable = executable->isWebAssemblyExecutable();
1082 #endif
1083         if (!isWebAssemblyExecutable) {
1084             FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1085
1086             if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1087                 throwException(exec, throwScope, createNotAConstructorError(exec, function));
1088                 return encodeResult(
1089                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1090                     reinterpret_cast<void*>(KeepTheFrame));
1091             }
1092
1093             CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1094             JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1095             if (error) {
1096                 throwException(exec, throwScope, error);
1097                 return encodeResult(
1098                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1099                     reinterpret_cast<void*>(KeepTheFrame));
1100             }
1101         } else {
1102 #if ENABLE(WEBASSEMBLY)
1103             if (!isCall(kind)) {
1104                 throwException(exec, throwScope, createNotAConstructorError(exec, function));
1105                 return encodeResult(
1106                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress(),
1107                     reinterpret_cast<void*>(KeepTheFrame));
1108             }
1109 #endif
1110         }
1111     }
1112     return encodeResult(executable->entrypointFor(
1113         kind, MustCheckArity).executableAddress(),
1114         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1115 }
1116
1117 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1118 {
1119     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1120     JSCell* calleeAsFunctionCell;
1121     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1122
1123     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1124     
1125     return result;
1126 }
1127
1128 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1129 {
1130     JSCell* calleeAsFunctionCellIgnored;
1131     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1132 }
1133
1134 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1135 {
1136     VM* vm = &exec->vm();
1137     NativeCallFrameTracer tracer(vm, exec);
1138     
1139     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1140 }
1141
1142 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1143 {
1144     VM* vm = &exec->vm();
1145     NativeCallFrameTracer tracer(vm, exec);
1146
1147     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1148 }
1149
1150 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1151 {
1152     VM* vm = &exec->vm();
1153     NativeCallFrameTracer tracer(vm, exec);
1154
1155     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1156 }
1157
1158 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1159 {
1160     VM* vm = &exec->vm();
1161     NativeCallFrameTracer tracer(vm, exec);
1162
1163     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1164 }
1165
1166 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1167 {
1168     VM* vm = &exec->vm();
1169     NativeCallFrameTracer tracer(vm, exec);
1170
1171     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1172 }
1173
1174 #if USE(JSVALUE64)
1175 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1176 #else
1177 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1178 #endif
1179 {
1180     VM* vm = &exec->vm();
1181     NativeCallFrameTracer tracer(vm, exec);
1182
1183     bool result = asString(left)->equal(exec, asString(right));
1184 #if USE(JSVALUE64)
1185     return JSValue::encode(jsBoolean(result));
1186 #else
1187     return result;
1188 #endif
1189 }
1190
1191 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1192 {
1193     VM* vm = &exec->vm();
1194     NativeCallFrameTracer tracer(vm, exec);
1195     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1196 }
1197
1198 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1199 {
1200     VM* vm = &exec->vm();
1201     NativeCallFrameTracer tracer(vm, exec);
1202     return JSValue::encode(constructArray(exec, profile, values, size));
1203 }
1204
1205 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1206 {
1207     VM* vm = &exec->vm();
1208     NativeCallFrameTracer tracer(vm, exec);
1209     JSValue sizeValue = JSValue::decode(size);
1210     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1211 }
1212
1213 }
1214
1215 template<typename FunctionType>
1216 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1217 {
1218     ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
1219     VM& vm = exec->vm();
1220     NativeCallFrameTracer tracer(&vm, exec);
1221     if (isInvalidated)
1222         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1223     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1224 }
1225
1226 extern "C" {
1227
1228 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1229 {
1230     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1231 }
1232
1233 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1234 {
1235     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1236 }
1237
1238 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1239 {
1240     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1241 }
1242
1243 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1244 {
1245     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1246 }
1247
1248 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1249 {
1250     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1251 }
1252
1253 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1254 {
1255     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1256 }
1257
1258 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1259 {
1260     VM* vm = &exec->vm();
1261     NativeCallFrameTracer tracer(vm, exec);
1262
1263     JSFunction* func = jsCast<JSFunction*>(funcCell);
1264     JSValue name = JSValue::decode(encodedName);
1265     func->setFunctionName(exec, name);
1266 }
1267
1268 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1269 {
1270     VM* vm = &exec->vm();
1271     NativeCallFrameTracer tracer(vm, exec);
1272
1273     return constructEmptyObject(exec, structure);
1274 }
1275
1276 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
1277 {
1278     SuperSamplerScope superSamplerScope(false);
1279     VM& vm = exec->vm();
1280     NativeCallFrameTracer tracer(&vm, exec);
1281     auto scope = DECLARE_THROW_SCOPE(vm);
1282
1283     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1284     if (!regexp->isValid()) {
1285         throwException(exec, scope, createSyntaxError(exec, regexp->errorMessage()));
1286         return JSValue::encode(jsUndefined());
1287     }
1288
1289     return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
1290 }
1291
1292 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1293 // existing DFG slow path generator machinery when creating the slow path for CheckWatchdogTimer
1294 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1295 // future, we can switch to using that then.
1296 UnusedPtr JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
1297 {
1298     VM& vm = exec->vm();
1299     NativeCallFrameTracer tracer(&vm, exec);
1300     auto scope = DECLARE_THROW_SCOPE(vm);
1301
1302     if (UNLIKELY(vm.shouldTriggerTermination(exec)))
1303         throwException(exec, scope, createTerminatedExecutionException(&vm));
1304
1305     return nullptr;
1306 }
1307
1308 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1309 {
1310     VM& vm = exec->vm();
1311     NativeCallFrameTracer tracer(&vm, exec);
1312
1313     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1314 }
1315
1316 #if ENABLE(DFG_JIT)
1317 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1318 {
1319     codeBlock->updateAllPredictions();
1320     codeBlock->optimizeAfterWarmUp();
1321 }
1322
1323 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1324 {
1325     VM& vm = exec->vm();
1326     NativeCallFrameTracer tracer(&vm, exec);
1327
1328     // Defer GC for a while so that it doesn't run between when we enter into this
1329     // slow path and when we figure out the state of our code block. This prevents
1330     // a number of awkward reentrancy scenarios, including:
1331     //
1332     // - The optimized version of our code block being jettisoned by GC right after
1333     //   we concluded that we wanted to use it, but have not planted it into the JS
1334     //   stack yet.
1335     //
1336     // - An optimized version of our code block being installed just as we decided
1337     //   that it wasn't ready yet.
1338     //
1339     // Note that jettisoning won't happen if we already initiated OSR, because in
1340     // that case we would have already planted the optimized code block into the JS
1341     // stack.
1342     DeferGCForAWhile deferGC(vm.heap);
1343     
1344     CodeBlock* codeBlock = exec->codeBlock();
1345     if (codeBlock->jitType() != JITCode::BaselineJIT) {
1346         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1347         RELEASE_ASSERT_NOT_REACHED();
1348     }
1349     
1350     if (bytecodeIndex) {
1351         // If we're attempting to OSR from a loop, assume that this should be
1352         // separately optimized.
1353         codeBlock->m_shouldAlwaysBeInlined = false;
1354     }
1355
1356     if (Options::verboseOSR()) {
1357         dataLog(
1358             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1359             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1360             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1361             ", exitCounter = ");
1362         if (codeBlock->hasOptimizedReplacement())
1363             dataLog(codeBlock->replacement()->osrExitCounter());
1364         else
1365             dataLog("N/A");
1366         dataLog("\n");
1367     }
1368
1369     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1370         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1371         codeBlock->updateAllPredictions();
1372         if (Options::verboseOSR())
1373             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1374         return encodeResult(0, 0);
1375     }
1376     
1377     Debugger* debugger = codeBlock->globalObject()->debugger();
1378     if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1379         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1380         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1381         return encodeResult(0, 0);
1382     }
1383
1384     if (codeBlock->m_shouldAlwaysBeInlined) {
1385         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1386         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1387         if (Options::verboseOSR())
1388             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1389         return encodeResult(0, 0);
1390     }
1391
1392     // We cannot be in the process of asynchronous compilation and also have an optimized
1393     // replacement.
1394     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1395     ASSERT(
1396         !worklist
1397         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1398         && codeBlock->hasOptimizedReplacement()));
1399
1400     DFG::Worklist::State worklistState;
1401     if (worklist) {
1402         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1403         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1404         // what the result was so that we don't plow ahead and attempt OSR or immediate
1405         // reoptimization. This will have already also set the appropriate JIT execution
1406         // count threshold depending on what happened, so if the compilation was anything
1407         // but successful we just want to return early. See the case for worklistState ==
1408         // DFG::Worklist::Compiled, below.
1409         
1410         // Note that we could have alternatively just called Worklist::compilationState()
1411         // here, and if it returned Compiled, we could have then called
1412         // completeAndScheduleOSR() below. But that would have meant that it could take
1413         // longer for code blocks to be completed: they would only complete when *their*
1414         // execution count trigger fired; but that could take a while since the firing is
1415         // racy. It could also mean that code blocks that never run again after being
1416         // compiled would sit on the worklist until next GC. That's fine, but it's
1417         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1418         // possible in order to minimize the chances of us executing baseline code after
1419         // optimized code is already available.
1420         worklistState = worklist->completeAllReadyPlansForVM(
1421             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1422     } else
1423         worklistState = DFG::Worklist::NotKnown;
1424
1425     if (worklistState == DFG::Worklist::Compiling) {
1426         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1427         // We cannot be in the process of asynchronous compilation and also have an optimized
1428         // replacement.
1429         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1430         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1431         return encodeResult(0, 0);
1432     }
1433
1434     if (worklistState == DFG::Worklist::Compiled) {
1435         // If we don't have an optimized replacement but we did just get compiled, then
1436         // the compilation failed or was invalidated, in which case the execution count
1437         // thresholds have already been set appropriately by
1438         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1439         // nothing left to do.
1440         if (!codeBlock->hasOptimizedReplacement()) {
1441             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1442             codeBlock->updateAllPredictions();
1443             if (Options::verboseOSR())
1444                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1445             return encodeResult(0, 0);
1446         }
1447     } else if (codeBlock->hasOptimizedReplacement()) {
1448         if (Options::verboseOSR())
1449             dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1450         // If we have an optimized replacement, then it must be the case that we entered
1451         // cti_optimize from a loop. That's because if there's an optimized replacement,
1452         // then all calls to this function will be relinked to the replacement and so
1453         // the prologue OSR will never fire.
1454         
1455         // This is an interesting threshold check. Consider that a function OSR exits
1456         // in the middle of a loop, while having a relatively low exit count. The exit
1457         // will reset the execution counter to some target threshold, meaning that this
1458         // code won't be reached until that loop heats up for >=1000 executions. But then
1459         // we do a second check here, to see if we should either reoptimize, or just
1460         // attempt OSR entry. Hence it might even be correct for
1461         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1462         // additional checking anyway, to reduce the amount of recompilation thrashing.
1463         if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1464             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1465             if (Options::verboseOSR()) {
1466                 dataLog(
1467                     "Triggering reoptimization of ", *codeBlock,
1468                     "(", *codeBlock->replacement(), ") (in loop).\n");
1469             }
1470             codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1471             return encodeResult(0, 0);
1472         }
1473     } else {
1474         if (!codeBlock->shouldOptimizeNow()) {
1475             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1476             if (Options::verboseOSR()) {
1477                 dataLog(
1478                     "Delaying optimization for ", *codeBlock,
1479                     " because of insufficient profiling.\n");
1480             }
1481             return encodeResult(0, 0);
1482         }
1483
1484         if (Options::verboseOSR())
1485             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1486
1487         unsigned numVarsWithValues;
1488         if (bytecodeIndex)
1489             numVarsWithValues = codeBlock->m_numCalleeLocals;
1490         else
1491             numVarsWithValues = 0;
1492         Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1493         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1494         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1495             int operand = mustHandleValues.operandForIndex(i);
1496             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1497                 continue;
1498             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1499         }
1500
1501         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1502         CompilationResult result = DFG::compile(
1503             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1504             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1505         
1506         if (result != CompilationSuccessful) {
1507             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1508             return encodeResult(0, 0);
1509         }
1510     }
1511     
1512     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1513     ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1514     
1515     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1516         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1517         if (Options::verboseOSR()) {
1518             dataLog(
1519                 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1520         }
1521
1522         codeBlock->optimizeSoon();
1523         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1524         return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1525     }
1526
1527     if (Options::verboseOSR()) {
1528         dataLog(
1529             "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1530             " succeeded, OSR failed, after a delay of ",
1531             codeBlock->optimizationDelayCounter(), ".\n");
1532     }
1533
1534     // Count the OSR failure as a speculation failure. If this happens a lot, then
1535     // reoptimize.
1536     optimizedCodeBlock->countOSRExit();
1537
1538     // We are a lot more conservative about triggering reoptimization after OSR failure than
1539     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1540     // already, then we really would like to reoptimize immediately. But this case covers
1541     // something else: there weren't many (or any) speculation failures before, but we just
1542     // failed to enter the speculative code because some variable had the wrong value or
1543     // because the OSR code decided for any spurious reason that it did not want to OSR
1544     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1545     // reoptimization trigger.
1546     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1547         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1548         if (Options::verboseOSR()) {
1549             dataLog(
1550                 "Triggering reoptimization of ", *codeBlock, " -> ",
1551                 *codeBlock->replacement(), " (after OSR fail).\n");
1552         }
1553         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1554         return encodeResult(0, 0);
1555     }
1556
1557     // OSR failed this time, but it might succeed next time! Let the code run a bit
1558     // longer and then try again.
1559     codeBlock->optimizeAfterWarmUp();
1560     
1561     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1562     return encodeResult(0, 0);
1563 }
1564 #endif
1565
1566 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1567 {
1568     VM& vm = exec->vm();
1569     NativeCallFrameTracer tracer(&vm, exec);
1570
1571     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1572     ASSERT(isJSArray(arrayValue));
1573     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1574 }
1575
1576 enum class AccessorType {
1577     Getter,
1578     Setter
1579 };
1580
1581 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1582 {
1583     VM& vm = exec->vm();
1584     auto scope = DECLARE_THROW_SCOPE(vm);
1585     auto propertyKey = subscript.toPropertyKey(exec);
1586     RETURN_IF_EXCEPTION(scope, void());
1587
1588     if (accessorType == AccessorType::Getter)
1589         base->putGetter(exec, propertyKey, accessor, attribute);
1590     else
1591         base->putSetter(exec, propertyKey, accessor, attribute);
1592 }
1593
1594 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1595 {
1596     VM& vm = exec->vm();
1597     NativeCallFrameTracer tracer(&vm, exec);
1598
1599     ASSERT(object && object->isObject());
1600     JSObject* baseObj = object->getObject();
1601
1602     ASSERT(getter->isObject());
1603     baseObj->putGetter(exec, uid, getter, options);
1604 }
1605
1606 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1607 {
1608     VM& vm = exec->vm();
1609     NativeCallFrameTracer tracer(&vm, exec);
1610
1611     ASSERT(object && object->isObject());
1612     JSObject* baseObj = object->getObject();
1613
1614     ASSERT(setter->isObject());
1615     baseObj->putSetter(exec, uid, setter, options);
1616 }
1617
1618 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1619 {
1620     VM& vm = exec->vm();
1621     NativeCallFrameTracer tracer(&vm, exec);
1622
1623     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1624 }
1625
1626 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1627 {
1628     VM& vm = exec->vm();
1629     NativeCallFrameTracer tracer(&vm, exec);
1630
1631     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1632 }
1633
1634 #if USE(JSVALUE64)
1635 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1636 {
1637     VM& vm = exec->vm();
1638     NativeCallFrameTracer tracer(&vm, exec);
1639
1640     ASSERT(object && object->isObject());
1641     JSObject* baseObj = asObject(object);
1642
1643     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1644
1645     JSValue getter = JSValue::decode(encodedGetterValue);
1646     JSValue setter = JSValue::decode(encodedSetterValue);
1647     ASSERT(getter.isObject() || getter.isUndefined());
1648     ASSERT(setter.isObject() || setter.isUndefined());
1649     ASSERT(getter.isObject() || setter.isObject());
1650
1651     if (!getter.isUndefined())
1652         accessor->setGetter(vm, exec->lexicalGlobalObject(), asObject(getter));
1653     if (!setter.isUndefined())
1654         accessor->setSetter(vm, exec->lexicalGlobalObject(), asObject(setter));
1655     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1656 }
1657
1658 #else
1659 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getter, JSCell* setter)
1660 {
1661     VM& vm = exec->vm();
1662     NativeCallFrameTracer tracer(&vm, exec);
1663
1664     ASSERT(object && object->isObject());
1665     JSObject* baseObj = asObject(object);
1666
1667     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject());
1668
1669     ASSERT(!getter || getter->isObject());
1670     ASSERT(!setter || setter->isObject());
1671     ASSERT(getter || setter);
1672
1673     if (getter)
1674         accessor->setGetter(vm, exec->lexicalGlobalObject(), getter->getObject());
1675     if (setter)
1676         accessor->setSetter(vm, exec->lexicalGlobalObject(), setter->getObject());
1677     baseObj->putDirectAccessor(exec, uid, accessor, attribute);
1678 }
1679 #endif
1680
1681 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1682 {
1683     VM& vm = exec->vm();
1684     NativeCallFrameTracer tracer(&vm, exec);
1685
1686     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1687     exec->uncheckedR(scopeReg) = scope->next();
1688 }
1689
1690 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1691 {
1692     VM& vm = exec->vm();
1693     NativeCallFrameTracer tracer(&vm, exec);
1694
1695     JSValue value = JSValue::decode(encodedValue);
1696     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1697
1698     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure()->typeInfo().implementsDefaultHasInstance());
1699
1700     if (constructor->hasInstance(exec, value, hasInstanceValue))
1701         return 1;
1702     return 0;
1703 }
1704
1705 }
1706
1707 static bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
1708 {
1709     switch (object.structure()->typeInfo().type()) {
1710     case DirectArgumentsType: {
1711         DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
1712         if (directArguments->canAccessArgumentIndexQuicklyInDFG(index))
1713             return true;
1714         break;
1715     }
1716     case ScopedArgumentsType: {
1717         ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
1718         if (scopedArguments->canAccessArgumentIndexQuicklyInDFG(index))
1719             return true;
1720         break;
1721     }
1722     default:
1723         break;
1724     }
1725     return false;
1726 }
1727
1728 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1729 {
1730     VM& vm = exec->vm();
1731     auto scope = DECLARE_THROW_SCOPE(vm);
1732
1733     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1734         Structure& structure = *baseValue.asCell()->structure(vm);
1735         if (JSCell::canUseFastGetOwnProperty(structure)) {
1736             if (RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec)) {
1737                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1738                     ASSERT(exec->bytecodeOffset());
1739                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1740                         byValInfo->tookSlowPath = true;
1741                     return result;
1742                 }
1743             }
1744         }
1745     }
1746
1747     if (subscript.isUInt32()) {
1748         ASSERT(exec->bytecodeOffset());
1749         byValInfo->tookSlowPath = true;
1750
1751         uint32_t i = subscript.asUInt32();
1752         if (isJSString(baseValue)) {
1753             if (asString(baseValue)->canGetIndex(i)) {
1754                 ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString));
1755                 return asString(baseValue)->getIndex(exec, i);
1756             }
1757             byValInfo->arrayProfile->setOutOfBounds();
1758         } else if (baseValue.isObject()) {
1759             JSObject* object = asObject(baseValue);
1760             if (object->canGetIndexQuickly(i))
1761                 return object->getIndexQuickly(i);
1762
1763             if (!canAccessArgumentIndexQuickly(*object, i)) {
1764                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1765                 // out-of-bounds.
1766                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1767                 byValInfo->arrayProfile->setOutOfBounds();
1768             }
1769         }
1770
1771         return baseValue.get(exec, i);
1772     }
1773
1774     baseValue.requireObjectCoercible(exec);
1775     RETURN_IF_EXCEPTION(scope, JSValue());
1776     auto property = subscript.toPropertyKey(exec);
1777     RETURN_IF_EXCEPTION(scope, JSValue());
1778
1779     ASSERT(exec->bytecodeOffset());
1780     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1781         byValInfo->tookSlowPath = true;
1782
1783     return baseValue.get(exec, property);
1784 }
1785
1786 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1787 {
1788     // See if it's worth optimizing this at all.
1789     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1790
1791     VM& vm = exec->vm();
1792
1793     if (baseValue.isObject() && subscript.isInt32()) {
1794         JSObject* object = asObject(baseValue);
1795
1796         ASSERT(exec->bytecodeOffset());
1797         ASSERT(!byValInfo->stubRoutine);
1798
1799         if (hasOptimizableIndexing(object->structure(vm))) {
1800             // Attempt to optimize.
1801             Structure* structure = object->structure(vm);
1802             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1803             if (arrayMode != byValInfo->arrayMode) {
1804                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1805                 // Let's update the profile to do better next time.
1806                 CodeBlock* codeBlock = exec->codeBlock();
1807                 ConcurrentJITLocker locker(codeBlock->m_lock);
1808                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1809
1810                 JIT::compileGetByVal(&vm, exec->codeBlock(), byValInfo, returnAddress, arrayMode);
1811                 optimizationResult = OptimizationResult::Optimized;
1812             }
1813         }
1814
1815         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1816         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1817             optimizationResult = OptimizationResult::GiveUp;
1818     }
1819
1820     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1821         const Identifier propertyName = subscript.toPropertyKey(exec);
1822         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1823             ASSERT(exec->bytecodeOffset());
1824             ASSERT(!byValInfo->stubRoutine);
1825             if (byValInfo->seen) {
1826                 if (byValInfo->cachedId == propertyName) {
1827                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1828                     optimizationResult = OptimizationResult::Optimized;
1829                 } else {
1830                     // Seem like a generic property access site.
1831                     optimizationResult = OptimizationResult::GiveUp;
1832                 }
1833             } else {
1834                 CodeBlock* codeBlock = exec->codeBlock();
1835                 ConcurrentJITLocker locker(codeBlock->m_lock);
1836                 byValInfo->seen = true;
1837                 byValInfo->cachedId = propertyName;
1838                 if (subscript.isSymbol())
1839                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1840                 optimizationResult = OptimizationResult::SeenOnce;
1841             }
1842         }
1843     }
1844
1845     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1846         // If we take slow path more than 10 times without patching then make sure we
1847         // never make that mistake again. For cases where we see non-index-intercepting
1848         // objects, this gives 10 iterations worth of opportunity for us to observe
1849         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1850         // the result is GiveUp.
1851         if (++byValInfo->slowPathCount >= 10)
1852             optimizationResult = OptimizationResult::GiveUp;
1853     }
1854
1855     return optimizationResult;
1856 }
1857
1858 extern "C" {
1859
1860 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1861 {
1862     VM& vm = exec->vm();
1863     NativeCallFrameTracer tracer(&vm, exec);
1864     JSValue baseValue = JSValue::decode(encodedBase);
1865     JSValue subscript = JSValue::decode(encodedSubscript);
1866
1867     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1868     return JSValue::encode(result);
1869 }
1870
1871 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1872 {
1873     VM& vm = exec->vm();
1874     NativeCallFrameTracer tracer(&vm, exec);
1875
1876     JSValue baseValue = JSValue::decode(encodedBase);
1877     JSValue subscript = JSValue::decode(encodedSubscript);
1878     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1879     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
1880         // Don't ever try to optimize.
1881         byValInfo->tookSlowPath = true;
1882         ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric));
1883     }
1884
1885     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
1886 }
1887
1888 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1889 {
1890     VM& vm = exec->vm();
1891     NativeCallFrameTracer tracer(&vm, exec);
1892     JSValue baseValue = JSValue::decode(encodedBase);
1893     JSValue subscript = JSValue::decode(encodedSubscript);
1894     
1895     ASSERT(baseValue.isObject());
1896     ASSERT(subscript.isUInt32());
1897
1898     JSObject* object = asObject(baseValue);
1899     bool didOptimize = false;
1900
1901     ASSERT(exec->bytecodeOffset());
1902     ASSERT(!byValInfo->stubRoutine);
1903     
1904     if (hasOptimizableIndexing(object->structure(vm))) {
1905         // Attempt to optimize.
1906         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1907         if (arrayMode != byValInfo->arrayMode) {
1908             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1909             didOptimize = true;
1910         }
1911     }
1912     
1913     if (!didOptimize) {
1914         // If we take slow path more than 10 times without patching then make sure we
1915         // never make that mistake again. Or, if we failed to patch and we have some object
1916         // that intercepts indexed get, then don't even wait until 10 times. For cases
1917         // where we see non-index-intercepting objects, this gives 10 iterations worth of
1918         // opportunity for us to observe that the get_by_val may be polymorphic.
1919         if (++byValInfo->slowPathCount >= 10
1920             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1921             // Don't ever try to optimize.
1922             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric));
1923         }
1924     }
1925
1926     uint32_t index = subscript.asUInt32();
1927     if (object->canGetIndexQuickly(index))
1928         return JSValue::encode(JSValue(JSValue::JSTrue));
1929
1930     if (!canAccessArgumentIndexQuickly(*object, index)) {
1931         // FIXME: This will make us think that in-bounds typed array accesses are actually
1932         // out-of-bounds.
1933         // https://bugs.webkit.org/show_bug.cgi?id=149886
1934         byValInfo->arrayProfile->setOutOfBounds();
1935     }
1936     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
1937 }
1938     
1939 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1940 {
1941     VM& vm = exec->vm();
1942     NativeCallFrameTracer tracer(&vm, exec);
1943     JSValue baseValue = JSValue::decode(encodedBase);
1944     JSValue subscript = JSValue::decode(encodedSubscript);
1945     
1946     ASSERT(baseValue.isObject());
1947     ASSERT(subscript.isUInt32());
1948
1949     JSObject* object = asObject(baseValue);
1950     uint32_t index = subscript.asUInt32();
1951     if (object->canGetIndexQuickly(index))
1952         return JSValue::encode(JSValue(JSValue::JSTrue));
1953
1954     if (!canAccessArgumentIndexQuickly(*object, index)) {
1955         // FIXME: This will make us think that in-bounds typed array accesses are actually
1956         // out-of-bounds.
1957         // https://bugs.webkit.org/show_bug.cgi?id=149886
1958         byValInfo->arrayProfile->setOutOfBounds();
1959     }
1960     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
1961 }
1962     
1963 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1964 {
1965     VM& vm = exec->vm();
1966     NativeCallFrameTracer tracer(&vm, exec);
1967     auto scope = DECLARE_THROW_SCOPE(vm);
1968     JSValue baseValue = JSValue::decode(encodedBase);
1969     JSValue subscript = JSValue::decode(encodedSubscript);
1970     
1971     JSValue result;
1972     if (LIKELY(subscript.isUInt32())) {
1973         uint32_t i = subscript.asUInt32();
1974         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1975             result = asString(baseValue)->getIndex(exec, i);
1976         else {
1977             result = baseValue.get(exec, i);
1978             if (!isJSString(baseValue)) {
1979                 ASSERT(exec->bytecodeOffset());
1980                 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize));
1981             }
1982         }
1983     } else {
1984         baseValue.requireObjectCoercible(exec);
1985         RETURN_IF_EXCEPTION(scope, encodedJSValue());
1986         auto property = subscript.toPropertyKey(exec);
1987         RETURN_IF_EXCEPTION(scope, encodedJSValue());
1988         result = baseValue.get(exec, property);
1989     }
1990
1991     return JSValue::encode(result);
1992 }
1993
1994 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
1995 {
1996     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
1997 }
1998
1999 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2000 {
2001     VM& vm = exec->vm();
2002     NativeCallFrameTracer tracer(&vm, exec);
2003     auto scope = DECLARE_THROW_SCOPE(vm);
2004
2005     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2006     if (!baseObj)
2007         return false;
2008     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2009     if (!couldDelete && exec->codeBlock()->isStrictMode())
2010         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2011     return couldDelete;
2012 }
2013
2014 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
2015 {
2016     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2017 }
2018
2019 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2020 {
2021     VM& vm = exec->vm();
2022     NativeCallFrameTracer tracer(&vm, exec);
2023     auto scope = DECLARE_THROW_SCOPE(vm);
2024
2025     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2026     JSValue key = JSValue::decode(encodedKey);
2027     if (!baseObj)
2028         return false;
2029
2030     bool couldDelete;
2031     uint32_t index;
2032     if (key.getUInt32(index))
2033         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2034     else {
2035         RETURN_IF_EXCEPTION(scope, false);
2036         Identifier property = key.toPropertyKey(exec);
2037         RETURN_IF_EXCEPTION(scope, false);
2038         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2039     }
2040     if (!couldDelete && exec->codeBlock()->isStrictMode())
2041         throwTypeError(exec, scope, ASCIILiteral(UnableToDeletePropertyError));
2042     return couldDelete;
2043 }
2044
2045 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2046 {
2047     VM& vm = exec->vm();
2048     NativeCallFrameTracer tracer(&vm, exec);
2049     JSValue value = JSValue::decode(encodedValue);
2050     JSValue proto = JSValue::decode(encodedProto);
2051     
2052     bool result = JSObject::defaultHasInstance(exec, value, proto);
2053     return JSValue::encode(jsBoolean(result));
2054 }
2055
2056 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2057 {
2058     VM& vm = exec->vm();
2059     NativeCallFrameTracer tracer(&vm, exec);
2060     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2061 }
2062
2063 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2064 {
2065     VM& vm = exec->vm();
2066     NativeCallFrameTracer tracer(&vm, exec);
2067     JSValue arguments = JSValue::decode(encodedArguments);
2068     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2069 }
2070
2071 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2072 {
2073     VM& vm = exec->vm();
2074     NativeCallFrameTracer tracer(&vm, exec);
2075     setupForwardArgumentsFrame(exec, newCallFrame, length);
2076     return newCallFrame;
2077 }
2078
2079 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2080 {
2081     VM& vm = exec->vm();
2082     NativeCallFrameTracer tracer(&vm, exec);
2083     JSValue arguments = JSValue::decode(encodedArguments);
2084     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2085     return newCallFrame;
2086 }
2087
2088 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
2089 {
2090     VM& vm = exec->vm();
2091     NativeCallFrameTracer tracer(&vm, exec);
2092     JSObject* obj = JSValue::decode(value).toObject(exec);
2093     if (!obj)
2094         return JSValue::encode(JSValue());
2095     return JSValue::encode(obj);
2096 }
2097
2098 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2099 {
2100     VM& vm = exec->vm();
2101     NativeCallFrameTracer tracer(&vm, exec);
2102     JSValue key = JSValue::decode(encodedKey);
2103     CodeBlock* codeBlock = exec->codeBlock();
2104
2105     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2106     void* result = jumpTable.ctiDefault.executableAddress();
2107
2108     if (key.isString()) {
2109         StringImpl* value = asString(key)->value(exec).impl();
2110         if (value->length() == 1)
2111             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2112     }
2113
2114     return reinterpret_cast<char*>(result);
2115 }
2116
2117 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2118 {
2119     VM& vm = exec->vm();
2120     NativeCallFrameTracer tracer(&vm, exec);
2121     JSValue key = JSValue::decode(encodedKey);
2122     CodeBlock* codeBlock = exec->codeBlock();
2123
2124     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2125     void* result;
2126     if (key.isInt32())
2127         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2128     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2129         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2130     else
2131         result = jumpTable.ctiDefault.executableAddress();
2132     return reinterpret_cast<char*>(result);
2133 }
2134
2135 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2136 {
2137     VM& vm = exec->vm();
2138     NativeCallFrameTracer tracer(&vm, exec);
2139     JSValue key = JSValue::decode(encodedKey);
2140     CodeBlock* codeBlock = exec->codeBlock();
2141
2142     void* result;
2143     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2144
2145     if (key.isString()) {
2146         StringImpl* value = asString(key)->value(exec).impl();
2147         result = jumpTable.ctiForValue(value).executableAddress();
2148     } else
2149         result = jumpTable.ctiDefault.executableAddress();
2150
2151     return reinterpret_cast<char*>(result);
2152 }
2153
2154 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
2155 {
2156     VM& vm = exec->vm();
2157     NativeCallFrameTracer tracer(&vm, exec);
2158     auto throwScope = DECLARE_THROW_SCOPE(vm);
2159
2160     CodeBlock* codeBlock = exec->codeBlock();
2161     Instruction* pc = bytecodePC;
2162
2163     const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
2164     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
2165     GetPutInfo getPutInfo(pc[4].u.operand);
2166
2167     // ModuleVar is always converted to ClosureVar for get_from_scope.
2168     ASSERT(getPutInfo.resolveType() != ModuleVar);
2169
2170     return JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2171         if (!found) {
2172             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2173                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2174             return jsUndefined();
2175         }
2176
2177         JSValue result = JSValue();
2178         if (scope->isGlobalLexicalEnvironment()) {
2179             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2180             result = slot.getValue(exec, ident);
2181             if (result == jsTDZValue()) {
2182                 throwException(exec, throwScope, createTDZError(exec));
2183                 return jsUndefined();
2184             }
2185         }
2186
2187         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, pc, scope, slot, ident);
2188
2189         if (!result)
2190             return slot.getValue(exec, ident);
2191         return result;
2192     }));
2193 }
2194
2195 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
2196 {
2197     VM& vm = exec->vm();
2198     NativeCallFrameTracer tracer(&vm, exec);
2199     auto throwScope = DECLARE_THROW_SCOPE(vm);
2200
2201     Instruction* pc = bytecodePC;
2202
2203     CodeBlock* codeBlock = exec->codeBlock();
2204     const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
2205     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
2206     JSValue value = exec->r(pc[3].u.operand).jsValue();
2207     GetPutInfo getPutInfo = GetPutInfo(pc[4].u.operand);
2208
2209     // ModuleVar does not keep the scope register value alive in DFG.
2210     ASSERT(getPutInfo.resolveType() != ModuleVar);
2211
2212     if (getPutInfo.resolveType() == LocalClosureVar) {
2213         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2214         environment->variableAt(ScopeOffset(pc[6].u.operand)).set(vm, environment, value);
2215         if (WatchpointSet* set = pc[5].u.watchpointSet)
2216             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2217         return;
2218     }
2219
2220     bool hasProperty = scope->hasProperty(exec, ident);
2221     if (hasProperty
2222         && scope->isGlobalLexicalEnvironment()
2223         && !isInitialization(getPutInfo.initializationMode())) {
2224         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2225         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2226         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2227         if (slot.getValue(exec, ident) == jsTDZValue()) {
2228             throwException(exec, throwScope, createTDZError(exec));
2229             return;
2230         }
2231     }
2232
2233     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2234         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2235         return;
2236     }
2237
2238     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2239     scope->methodTable()->put(scope, exec, ident, value, slot);
2240     
2241     RETURN_IF_EXCEPTION(throwScope, void());
2242
2243     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, pc, scope, getPutInfo, slot, ident);
2244 }
2245
2246 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2247 {
2248     VM* vm = &exec->vm();
2249     NativeCallFrameTracer tracer(vm, exec);
2250     auto scope = DECLARE_THROW_SCOPE(*vm);
2251
2252     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2253     throwException(exec, scope, exceptionValue);
2254
2255     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2256     genericUnwind(vm, exec);
2257 }
2258
2259 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2260 {
2261     VM& vm = exec->vm();
2262     NativeCallFrameTracer tracer(&vm, exec);
2263
2264     ASSERT(!object->structure()->outOfLineCapacity());
2265     Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2266     object->setButterflyWithoutChangingStructure(vm, result);
2267     return reinterpret_cast<char*>(result);
2268 }
2269
2270 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2271 {
2272     VM& vm = exec->vm();
2273     NativeCallFrameTracer tracer(&vm, exec);
2274
2275     Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
2276     object->setButterflyWithoutChangingStructure(vm, result);
2277     return reinterpret_cast<char*>(result);
2278 }
2279
2280 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
2281 {
2282     VM* vm = &exec->vm();
2283     NativeCallFrameTracer tracer(vm, exec);
2284     vm->heap.flushWriteBarrierBuffer(cell);
2285 }
2286
2287 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2288 {
2289     VM* vm = &exec->vm();
2290     NativeCallFrameTracer tracer(vm, exec);
2291     vm->heap.writeBarrier(cell);
2292 }
2293
2294 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2295 {
2296     VM* vm = &exec->vm();
2297     NativeCallFrameTracer tracer(vm, exec);
2298     vm->heap.writeBarrierSlowPath(cell);
2299 }
2300
2301 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2302 {
2303     NativeCallFrameTracer tracer(vm, exec);
2304     genericUnwind(vm, exec);
2305     ASSERT(vm->targetMachinePCForThrow);
2306 }
2307
2308 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2309 {
2310     vm->topCallFrame = exec->callerFrame();
2311     genericUnwind(vm, exec, UnwindFromCallerFrame);
2312     ASSERT(vm->targetMachinePCForThrow);
2313 }
2314
2315 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2316 {
2317     VM* vm = &exec->vm();
2318     NativeCallFrameTracer tracer(vm, exec);
2319     genericUnwind(vm, exec);
2320 }
2321
2322 // This function "should" just take the ExecState*, but doing so would make it more difficult
2323 // to call from exception check sites. So, unlike all of our other functions, we allow
2324 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2325 // particularly safe here since this is never called on the critical path - it's only for
2326 // testing.
2327 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2328 {
2329     VM* vm = &exec->vm();
2330     NativeCallFrameTracer tracer(vm, exec);
2331 #if COMPILER(GCC_OR_CLANG)
2332     void* returnPC = __builtin_return_address(0);
2333     doExceptionFuzzing(exec, "JITOperations", returnPC);
2334 #endif // COMPILER(GCC_OR_CLANG)
2335 }
2336
2337 EncodedJSValue JIT_OPERATION operationHasGenericProperty(ExecState* exec, EncodedJSValue encodedBaseValue, JSCell* propertyName)
2338 {
2339     VM& vm = exec->vm();
2340     NativeCallFrameTracer tracer(&vm, exec);
2341     JSValue baseValue = JSValue::decode(encodedBaseValue);
2342     if (baseValue.isUndefinedOrNull())
2343         return JSValue::encode(jsBoolean(false));
2344
2345     JSObject* base = baseValue.toObject(exec);
2346     if (!base)
2347         return JSValue::encode(JSValue());
2348     return JSValue::encode(jsBoolean(base->hasPropertyGeneric(exec, asString(propertyName)->toIdentifier(exec), PropertySlot::InternalMethodType::GetOwnProperty)));
2349 }
2350
2351 EncodedJSValue JIT_OPERATION operationHasIndexedProperty(ExecState* exec, JSCell* baseCell, int32_t subscript)
2352 {
2353     VM& vm = exec->vm();
2354     NativeCallFrameTracer tracer(&vm, exec);
2355     JSObject* object = baseCell->toObject(exec, exec->lexicalGlobalObject());
2356     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript, PropertySlot::InternalMethodType::GetOwnProperty)));
2357 }
2358     
2359 JSCell* JIT_OPERATION operationGetPropertyEnumerator(ExecState* exec, JSCell* cell)
2360 {
2361     VM& vm = exec->vm();
2362     NativeCallFrameTracer tracer(&vm, exec);
2363
2364     JSObject* base = cell->toObject(exec, exec->lexicalGlobalObject());
2365
2366     return propertyNameEnumerator(exec, base);
2367 }
2368
2369 EncodedJSValue JIT_OPERATION operationNextEnumeratorPname(ExecState* exec, JSCell* enumeratorCell, int32_t index)
2370 {
2371     VM& vm = exec->vm();
2372     NativeCallFrameTracer tracer(&vm, exec);
2373     JSPropertyNameEnumerator* enumerator = jsCast<JSPropertyNameEnumerator*>(enumeratorCell);
2374     JSString* propertyName = enumerator->propertyNameAtIndex(index);
2375     return JSValue::encode(propertyName ? propertyName : jsNull());
2376 }
2377
2378 JSCell* JIT_OPERATION operationToIndexString(ExecState* exec, int32_t index)
2379 {
2380     VM& vm = exec->vm();
2381     NativeCallFrameTracer tracer(&vm, exec);
2382     return jsString(exec, Identifier::from(exec, index).string());
2383 }
2384
2385 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2386 {
2387     VM* vm = &exec->vm();
2388     NativeCallFrameTracer tracer(vm, exec);
2389     
2390     JSValue op1 = JSValue::decode(encodedOp1);
2391     JSValue op2 = JSValue::decode(encodedOp2);
2392     
2393     return JSValue::encode(jsAdd(exec, op1, op2));
2394 }
2395
2396 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2397 {
2398     VM* vm = &exec->vm();
2399     NativeCallFrameTracer tracer(vm, exec);
2400     
2401     JSValue op1 = JSValue::decode(encodedOp1);
2402     JSValue op2 = JSValue::decode(encodedOp2);
2403
2404     arithProfile.observeLHSAndRHS(op1, op2);
2405     JSValue result = jsAdd(exec, op1, op2);
2406     arithProfile.observeResult(result);
2407
2408     return JSValue::encode(result);
2409 }
2410
2411 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2412 {
2413     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2414 }
2415
2416 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2417 {
2418     ASSERT(arithProfile);
2419     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2420 }
2421
2422 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2423 {
2424     VM* vm = &exec->vm();
2425     NativeCallFrameTracer tracer(vm, exec);
2426     
2427     JSValue op1 = JSValue::decode(encodedOp1);
2428     JSValue op2 = JSValue::decode(encodedOp2);
2429
2430     ArithProfile* arithProfile = addIC->arithProfile();
2431     ASSERT(arithProfile);
2432     arithProfile->observeLHSAndRHS(op1, op2);
2433     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2434     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2435
2436 #if ENABLE(MATH_IC_STATS)
2437     exec->codeBlock()->dumpMathICStats();
2438 #endif
2439     
2440     JSValue result = jsAdd(exec, op1, op2);
2441     arithProfile->observeResult(result);
2442
2443     return JSValue::encode(result);
2444 }
2445
2446 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2447 {
2448     VM* vm = &exec->vm();
2449     NativeCallFrameTracer tracer(vm, exec);
2450
2451     ArithProfile* arithProfile = addIC->arithProfile();
2452     ASSERT(arithProfile);
2453     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2454 }
2455
2456 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2457 {
2458     VM* vm = &exec->vm();
2459     NativeCallFrameTracer tracer(vm, exec);
2460
2461     JSValue op1 = JSValue::decode(encodedOp1);
2462     JSValue op2 = JSValue::decode(encodedOp2);
2463
2464     auto nonOptimizeVariant = operationValueAddNoOptimize;
2465     if (ArithProfile* arithProfile = addIC->arithProfile())
2466         arithProfile->observeLHSAndRHS(op1, op2);
2467     addIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2468
2469 #if ENABLE(MATH_IC_STATS)
2470     exec->codeBlock()->dumpMathICStats();
2471 #endif
2472
2473     return JSValue::encode(jsAdd(exec, op1, op2));
2474 }
2475
2476 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2477 {
2478     VM* vm = &exec->vm();
2479     NativeCallFrameTracer tracer(vm, exec);
2480     
2481     JSValue op1 = JSValue::decode(encodedOp1);
2482     JSValue op2 = JSValue::decode(encodedOp2);
2483     
2484     JSValue result = jsAdd(exec, op1, op2);
2485
2486     return JSValue::encode(result);
2487 }
2488
2489 ALWAYS_INLINE static EncodedJSValue unprofiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2490 {
2491     auto scope = DECLARE_THROW_SCOPE(vm);
2492     JSValue op1 = JSValue::decode(encodedOp1);
2493     JSValue op2 = JSValue::decode(encodedOp2);
2494
2495     double a = op1.toNumber(exec);
2496     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2497     double b = op2.toNumber(exec);
2498     return JSValue::encode(jsNumber(a * b));
2499 }
2500
2501 ALWAYS_INLINE static EncodedJSValue profiledMul(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2502 {
2503     auto scope = DECLARE_THROW_SCOPE(vm);
2504     JSValue op1 = JSValue::decode(encodedOp1);
2505     JSValue op2 = JSValue::decode(encodedOp2);
2506
2507     if (shouldObserveLHSAndRHSTypes)
2508         arithProfile.observeLHSAndRHS(op1, op2);
2509
2510     double a = op1.toNumber(exec);
2511     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2512     double b = op2.toNumber(exec);
2513     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2514     
2515     JSValue result = jsNumber(a * b);
2516     arithProfile.observeResult(result);
2517     return JSValue::encode(result);
2518 }
2519
2520 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2521 {
2522     VM* vm = &exec->vm();
2523     NativeCallFrameTracer tracer(vm, exec);
2524
2525     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2526 }
2527
2528 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2529 {
2530     VM* vm = &exec->vm();
2531     NativeCallFrameTracer tracer(vm, exec);
2532
2533     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2534 }
2535
2536 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2537 {
2538     VM* vm = &exec->vm();
2539     NativeCallFrameTracer tracer(vm, exec);
2540
2541     auto nonOptimizeVariant = operationValueMulNoOptimize;
2542     if (ArithProfile* arithProfile = mulIC->arithProfile())
2543         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2544     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2545
2546 #if ENABLE(MATH_IC_STATS)
2547     exec->codeBlock()->dumpMathICStats();
2548 #endif
2549
2550     return unprofiledMul(*vm, exec, encodedOp1, encodedOp2);
2551 }
2552
2553 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2554 {
2555     VM* vm = &exec->vm();
2556     NativeCallFrameTracer tracer(vm, exec);
2557
2558     ASSERT(arithProfile);
2559     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2560 }
2561
2562 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2563 {
2564     VM* vm = &exec->vm();
2565     NativeCallFrameTracer tracer(vm, exec);
2566
2567     ArithProfile* arithProfile = mulIC->arithProfile();
2568     ASSERT(arithProfile);
2569     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2570     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2571     mulIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2572
2573 #if ENABLE(MATH_IC_STATS)
2574     exec->codeBlock()->dumpMathICStats();
2575 #endif
2576
2577     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2578 }
2579
2580 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2581 {
2582     VM* vm = &exec->vm();
2583     NativeCallFrameTracer tracer(vm, exec);
2584
2585     ArithProfile* arithProfile = mulIC->arithProfile();
2586     ASSERT(arithProfile);
2587     return profiledMul(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2588 }
2589
2590 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2591 {
2592     VM& vm = exec->vm();
2593     auto scope = DECLARE_THROW_SCOPE(vm);
2594     NativeCallFrameTracer tracer(&vm, exec);
2595     
2596     JSValue operand = JSValue::decode(encodedOperand);
2597     double number = operand.toNumber(exec);
2598     if (UNLIKELY(scope.exception()))
2599         return JSValue::encode(JSValue());
2600     return JSValue::encode(jsNumber(-number));
2601 }
2602
2603 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2604 {
2605     VM& vm = exec->vm();
2606     auto scope = DECLARE_THROW_SCOPE(vm);
2607     NativeCallFrameTracer tracer(&vm, exec);
2608
2609     JSValue operand = JSValue::decode(encodedOperand);
2610     arithProfile.observeLHS(operand);
2611     double number = operand.toNumber(exec);
2612     if (UNLIKELY(scope.exception()))
2613         return JSValue::encode(JSValue());
2614
2615     JSValue result = jsNumber(-number);
2616     arithProfile.observeResult(result);
2617     return JSValue::encode(result);
2618 }
2619
2620 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2621 {
2622     return unprofiledNegate(exec, operand);
2623 }
2624
2625 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2626 {
2627     ASSERT(arithProfile);
2628     return profiledNegate(exec, operand, *arithProfile);
2629 }
2630
2631 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2632 {
2633     VM& vm = exec->vm();
2634     auto scope = DECLARE_THROW_SCOPE(vm);
2635     NativeCallFrameTracer tracer(&vm, exec);
2636     
2637     JSValue operand = JSValue::decode(encodedOperand);
2638
2639     ArithProfile* arithProfile = negIC->arithProfile();
2640     ASSERT(arithProfile);
2641     arithProfile->observeLHS(operand);
2642     negIC->generateOutOfLine(vm, exec->codeBlock(), operationArithNegateProfiled);
2643
2644 #if ENABLE(MATH_IC_STATS)
2645     exec->codeBlock()->dumpMathICStats();
2646 #endif
2647     
2648     double number = operand.toNumber(exec);
2649     if (UNLIKELY(scope.exception()))
2650         return JSValue::encode(JSValue());
2651     JSValue result = jsNumber(-number);
2652     arithProfile->observeResult(result);
2653     return JSValue::encode(result);
2654 }
2655
2656 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2657 {
2658     VM& vm = exec->vm();
2659     auto scope = DECLARE_THROW_SCOPE(vm);
2660     NativeCallFrameTracer tracer(&vm, exec);
2661
2662     JSValue operand = JSValue::decode(encodedOperand);
2663
2664     if (ArithProfile* arithProfile = negIC->arithProfile())
2665         arithProfile->observeLHS(operand);
2666     negIC->generateOutOfLine(vm, exec->codeBlock(), operationArithNegate);
2667
2668 #if ENABLE(MATH_IC_STATS)
2669     exec->codeBlock()->dumpMathICStats();
2670 #endif
2671
2672     double number = operand.toNumber(exec);
2673     if (UNLIKELY(scope.exception()))
2674         return JSValue::encode(JSValue());
2675     return JSValue::encode(jsNumber(-number));
2676 }
2677
2678 ALWAYS_INLINE static EncodedJSValue unprofiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2679 {
2680     auto scope = DECLARE_THROW_SCOPE(vm);
2681     JSValue op1 = JSValue::decode(encodedOp1);
2682     JSValue op2 = JSValue::decode(encodedOp2);
2683
2684     double a = op1.toNumber(exec);
2685     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2686     double b = op2.toNumber(exec);
2687     return JSValue::encode(jsNumber(a - b));
2688 }
2689
2690 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2691 {
2692     auto scope = DECLARE_THROW_SCOPE(vm);
2693     JSValue op1 = JSValue::decode(encodedOp1);
2694     JSValue op2 = JSValue::decode(encodedOp2);
2695
2696     if (shouldObserveLHSAndRHSTypes)
2697         arithProfile.observeLHSAndRHS(op1, op2);
2698
2699     double a = op1.toNumber(exec);
2700     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2701     double b = op2.toNumber(exec);
2702     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2703     
2704     JSValue result = jsNumber(a - b);
2705     arithProfile.observeResult(result);
2706     return JSValue::encode(result);
2707 }
2708
2709 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2710 {
2711     VM* vm = &exec->vm();
2712     NativeCallFrameTracer tracer(vm, exec);
2713     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2714 }
2715
2716 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2717 {
2718     ASSERT(arithProfile);
2719
2720     VM* vm = &exec->vm();
2721     NativeCallFrameTracer tracer(vm, exec);
2722
2723     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2724 }
2725
2726 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2727 {
2728     VM* vm = &exec->vm();
2729     NativeCallFrameTracer tracer(vm, exec);
2730
2731     auto nonOptimizeVariant = operationValueSubNoOptimize;
2732     if (ArithProfile* arithProfile = subIC->arithProfile())
2733         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2734     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2735
2736 #if ENABLE(MATH_IC_STATS)
2737     exec->codeBlock()->dumpMathICStats();
2738 #endif
2739
2740     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2741 }
2742
2743 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2744 {
2745     VM* vm = &exec->vm();
2746     NativeCallFrameTracer tracer(vm, exec);
2747
2748     return unprofiledSub(*vm, exec, encodedOp1, encodedOp2);
2749 }
2750
2751 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2752 {
2753     VM* vm = &exec->vm();
2754     NativeCallFrameTracer tracer(vm, exec);
2755
2756     ArithProfile* arithProfile = subIC->arithProfile();
2757     ASSERT(arithProfile);
2758     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2759     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2760     subIC->generateOutOfLine(*vm, exec->codeBlock(), nonOptimizeVariant);
2761
2762 #if ENABLE(MATH_IC_STATS)
2763     exec->codeBlock()->dumpMathICStats();
2764 #endif
2765
2766     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2767 }
2768
2769 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2770 {
2771     VM* vm = &exec->vm();
2772     NativeCallFrameTracer tracer(vm, exec);
2773
2774     ArithProfile* arithProfile = subIC->arithProfile();
2775     ASSERT(arithProfile);
2776     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2777 }
2778
2779 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2780 {
2781     VM& vm = exec->vm();
2782     NativeCallFrameTracer tracer(&vm, exec);
2783     vm.typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2784 }
2785
2786 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2787 {
2788     VM& vm = exec->vm();
2789     NativeCallFrameTracer tracer(&vm, exec);
2790     vm.shadowChicken().update(vm, exec);
2791 }
2792
2793 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2794 {
2795     VM& vm = exec->vm();
2796     NativeCallFrameTracer tracer(&vm, exec);
2797     auto scope = DECLARE_THROW_SCOPE(vm);
2798     RELEASE_ASSERT(!!scope.exception());
2799
2800     if (isTerminatedExecutionException(scope.exception())) {
2801         genericUnwind(&vm, exec);
2802         return 1;
2803     }
2804     return 0;
2805 }
2806
2807 } // extern "C"
2808
2809 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2810 // definition of getHostCallReturnValue() below because the Windows build
2811 // requires it.
2812 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
2813 {
2814     if (!exec)
2815         return JSValue::encode(JSValue());
2816     return JSValue::encode(exec->vm().hostCallReturnValue);
2817 }
2818
2819 #if COMPILER(GCC_OR_CLANG) && CPU(X86_64)
2820 asm (
2821 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2822 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2823 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2824     "lea -8(%rsp), %rdi\n"
2825     "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2826 );
2827
2828 #elif COMPILER(GCC_OR_CLANG) && CPU(X86)
2829 asm (
2830 ".text" "\n" \
2831 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2832 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2833 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2834     "push %ebp\n"
2835     "mov %esp, %eax\n"
2836     "leal -4(%esp), %esp\n"
2837     "push %eax\n"
2838     "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2839     "leal 8(%esp), %esp\n"
2840     "pop %ebp\n"
2841     "ret\n"
2842 );
2843
2844 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_THUMB2)
2845 asm (
2846 ".text" "\n"
2847 ".align 2" "\n"
2848 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2849 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2850 ".thumb" "\n"
2851 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
2852 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2853     "sub r0, sp, #8" "\n"
2854     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2855 );
2856
2857 #elif COMPILER(GCC_OR_CLANG) && CPU(ARM_TRADITIONAL)
2858 asm (
2859 ".text" "\n"
2860 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2861 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2862 INLINE_ARM_FUNCTION(getHostCallReturnValue)
2863 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2864     "sub r0, sp, #8" "\n"
2865     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2866 );
2867
2868 #elif CPU(ARM64)
2869 asm (
2870 ".text" "\n"
2871 ".align 2" "\n"
2872 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2873 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2874 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2875      "sub x0, sp, #16" "\n"
2876      "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2877 );
2878
2879 #elif COMPILER(GCC_OR_CLANG) && CPU(MIPS)
2880
2881 #if WTF_MIPS_PIC
2882 #define LOAD_FUNCTION_TO_T9(function) \
2883         ".set noreorder" "\n" \
2884         ".cpload $25" "\n" \
2885         ".set reorder" "\n" \
2886         "la $t9, " LOCAL_REFERENCE(function) "\n"
2887 #else
2888 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2889 #endif
2890
2891 asm (
2892 ".text" "\n"
2893 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2894 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2895 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2896     LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
2897     "addi $a0, $sp, -8" "\n"
2898     "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
2899 );
2900
2901 #elif COMPILER(GCC_OR_CLANG) && CPU(SH4)
2902
2903 #define SH4_SCRATCH_REGISTER "r11"
2904
2905 asm (
2906 ".text" "\n"
2907 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
2908 HIDE_SYMBOL(getHostCallReturnValue) "\n"
2909 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
2910     "mov r15, r4" "\n"
2911     "add -8, r4" "\n"
2912     "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
2913     "braf " SH4_SCRATCH_REGISTER "\n"
2914     "nop" "\n"
2915     "1: .balign 4" "\n"
2916     "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
2917 );
2918
2919 #elif COMPILER(MSVC) && CPU(X86)
2920 extern "C" {
2921     __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
2922     {
2923         __asm lea eax, [esp - 4]
2924         __asm mov [esp + 4], eax;
2925         __asm jmp getHostCallReturnValueWithExecState
2926     }
2927 }
2928 #endif
2929
2930 } // namespace JSC
2931
2932 #endif // ENABLE(JIT)