8261a69c7d83f2864d67cbcce7664b99b4ce3098
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOperations.cpp
1 /*
2  * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "ArithProfile.h"
32 #include "ArrayConstructor.h"
33 #include "CommonSlowPaths.h"
34 #include "DFGCompilationMode.h"
35 #include "DFGDriver.h"
36 #include "DFGOSREntry.h"
37 #include "DFGThunks.h"
38 #include "DFGWorklist.h"
39 #include "Debugger.h"
40 #include "DirectArguments.h"
41 #include "Error.h"
42 #include "ErrorHandlingScope.h"
43 #include "EvalCodeBlock.h"
44 #include "ExceptionFuzz.h"
45 #include "ExecutableBaseInlines.h"
46 #include "FTLOSREntry.h"
47 #include "FrameTracers.h"
48 #include "FunctionCodeBlock.h"
49 #include "GetterSetter.h"
50 #include "HostCallReturnValue.h"
51 #include "ICStats.h"
52 #include "Interpreter.h"
53 #include "JIT.h"
54 #include "JITExceptions.h"
55 #include "JITToDFGDeferredCompilationCallback.h"
56 #include "JSAsyncFunction.h"
57 #include "JSAsyncGeneratorFunction.h"
58 #include "JSCInlines.h"
59 #include "JSCPtrTag.h"
60 #include "JSGeneratorFunction.h"
61 #include "JSGlobalObjectFunctions.h"
62 #include "JSLexicalEnvironment.h"
63 #include "JSWithScope.h"
64 #include "ModuleProgramCodeBlock.h"
65 #include "ObjectConstructor.h"
66 #include "PolymorphicAccess.h"
67 #include "ProgramCodeBlock.h"
68 #include "PropertyName.h"
69 #include "RegExpObject.h"
70 #include "Repatch.h"
71 #include "ScopedArguments.h"
72 #include "ShadowChicken.h"
73 #include "StructureStubInfo.h"
74 #include "SuperSampler.h"
75 #include "TestRunnerUtils.h"
76 #include "ThunkGenerators.h"
77 #include "TypeProfilerLog.h"
78 #include "VMInlines.h"
79 #include <wtf/InlineASM.h>
80
81 namespace JSC {
82
83 extern "C" {
84
85 #if COMPILER(MSVC)
86 void * _ReturnAddress(void);
87 #pragma intrinsic(_ReturnAddress)
88
89 #define OUR_RETURN_ADDRESS _ReturnAddress()
90 #else
91 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
92 #endif
93
94 #if ENABLE(OPCODE_SAMPLING)
95 #define CTI_SAMPLER vm->interpreter->sampler()
96 #else
97 #define CTI_SAMPLER 0
98 #endif
99
100
101 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
102 {
103     // We pass in our own code block, because the callframe hasn't been populated.
104     VM* vm = codeBlock->vm();
105     auto scope = DECLARE_THROW_SCOPE(*vm);
106     exec->convertToStackOverflowFrame(*vm, codeBlock);
107     NativeCallFrameTracer tracer(vm, exec);
108     throwStackOverflowError(exec, scope);
109 }
110
111 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
112 {
113     VM* vm = &exec->vm();
114     auto scope = DECLARE_THROW_SCOPE(*vm);
115
116     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
117     if (UNLIKELY(missingArgCount < 0)) {
118         CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForCall);
119         exec->convertToStackOverflowFrame(*vm, codeBlock);
120         NativeCallFrameTracer tracer(vm, exec);
121         throwStackOverflowError(vm->topCallFrame, scope);
122     }
123
124     return missingArgCount;
125 }
126
127 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
128 {
129     VM* vm = &exec->vm();
130     auto scope = DECLARE_THROW_SCOPE(*vm);
131
132     int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
133     if (UNLIKELY(missingArgCount < 0)) {
134         CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForConstruct);
135         exec->convertToStackOverflowFrame(*vm, codeBlock);
136         NativeCallFrameTracer tracer(vm, exec);
137         throwStackOverflowError(vm->topCallFrame, scope);
138     }
139
140     return missingArgCount;
141 }
142
143 EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
144 {
145     VM* vm = &exec->vm();
146     NativeCallFrameTracer tracer(vm, exec);
147     Identifier ident = Identifier::fromUid(vm, uid);
148     stubInfo->tookSlowPath = true;
149
150     JSValue baseValue = JSValue::decode(base);
151     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
152     baseValue.getPropertySlot(exec, ident, slot);
153
154     return JSValue::encode(slot.getPureResult());
155 }
156
157
158 EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
159 {
160     VM* vm = &exec->vm();
161     NativeCallFrameTracer tracer(vm, exec);
162     Identifier ident = Identifier::fromUid(vm, uid);
163
164     JSValue baseValue = JSValue::decode(base);
165     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
166     baseValue.getPropertySlot(exec, ident, slot);
167
168     return JSValue::encode(slot.getPureResult());
169 }
170
171 EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
172 {
173     VM* vm = &exec->vm();
174     NativeCallFrameTracer tracer(vm, exec);
175     auto scope = DECLARE_THROW_SCOPE(*vm);
176     Identifier ident = Identifier::fromUid(vm, uid);
177
178     JSValue baseValue = JSValue::decode(base);
179     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
180
181     baseValue.getPropertySlot(exec, ident, slot);
182     RETURN_IF_EXCEPTION(scope, encodedJSValue());
183
184     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
185         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
186
187     return JSValue::encode(slot.getPureResult());
188 }
189
190 EncodedJSValue JIT_OPERATION operationGetByIdDirect(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
191 {
192     VM& vm = exec->vm();
193     NativeCallFrameTracer tracer(&vm, exec);
194     auto scope = DECLARE_THROW_SCOPE(vm);
195     Identifier ident = Identifier::fromUid(&vm, uid);
196     stubInfo->tookSlowPath = true;
197
198     JSValue baseValue = JSValue::decode(base);
199     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
200
201     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
202     RETURN_IF_EXCEPTION(scope, encodedJSValue());
203
204     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
205 }
206
207 EncodedJSValue JIT_OPERATION operationGetByIdDirectGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
208 {
209     VM& vm = exec->vm();
210     NativeCallFrameTracer tracer(&vm, exec);
211     auto scope = DECLARE_THROW_SCOPE(vm);
212     Identifier ident = Identifier::fromUid(&vm, uid);
213
214     JSValue baseValue = JSValue::decode(base);
215     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
216
217     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
218     RETURN_IF_EXCEPTION(scope, encodedJSValue());
219
220     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
221 }
222
223 EncodedJSValue JIT_OPERATION operationGetByIdDirectOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
224 {
225     VM& vm = exec->vm();
226     NativeCallFrameTracer tracer(&vm, exec);
227     auto scope = DECLARE_THROW_SCOPE(vm);
228     Identifier ident = Identifier::fromUid(&vm, uid);
229
230     JSValue baseValue = JSValue::decode(base);
231     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
232
233     bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
234     RETURN_IF_EXCEPTION(scope, encodedJSValue());
235
236     if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
237         repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Direct);
238
239     RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
240 }
241
242 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
243 {
244     SuperSamplerScope superSamplerScope(false);
245     
246     VM* vm = &exec->vm();
247     NativeCallFrameTracer tracer(vm, exec);
248     
249     stubInfo->tookSlowPath = true;
250     
251     JSValue baseValue = JSValue::decode(base);
252     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
253     Identifier ident = Identifier::fromUid(vm, uid);
254     
255     LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident));
256     return JSValue::encode(baseValue.get(exec, ident, slot));
257 }
258
259 EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
260 {
261     SuperSamplerScope superSamplerScope(false);
262     
263     VM* vm = &exec->vm();
264     NativeCallFrameTracer tracer(vm, exec);
265     
266     JSValue baseValue = JSValue::decode(base);
267     PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
268     Identifier ident = Identifier::fromUid(vm, uid);
269     LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident));
270     return JSValue::encode(baseValue.get(exec, ident, slot));
271 }
272
273 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
274 {
275     SuperSamplerScope superSamplerScope(false);
276     
277     VM* vm = &exec->vm();
278     NativeCallFrameTracer tracer(vm, exec);
279     Identifier ident = Identifier::fromUid(vm, uid);
280
281     JSValue baseValue = JSValue::decode(base);
282     LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident));
283
284     return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
285         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
286             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
287         return found ? slot.getValue(exec, ident) : jsUndefined();
288     }));
289 }
290
291 EncodedJSValue JIT_OPERATION operationGetByIdWithThis(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
292 {
293     SuperSamplerScope superSamplerScope(false);
294
295     VM* vm = &exec->vm();
296     NativeCallFrameTracer tracer(vm, exec);
297     Identifier ident = Identifier::fromUid(vm, uid);
298
299     stubInfo->tookSlowPath = true;
300
301     JSValue baseValue = JSValue::decode(base);
302     JSValue thisValue = JSValue::decode(thisEncoded);
303     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
304
305     return JSValue::encode(baseValue.get(exec, ident, slot));
306 }
307
308 EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
309 {
310     SuperSamplerScope superSamplerScope(false);
311
312     VM* vm = &exec->vm();
313     NativeCallFrameTracer tracer(vm, exec);
314     Identifier ident = Identifier::fromUid(vm, uid);
315
316     JSValue baseValue = JSValue::decode(base);
317     JSValue thisValue = JSValue::decode(thisEncoded);
318     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
319
320     return JSValue::encode(baseValue.get(exec, ident, slot));
321 }
322
323 EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
324 {
325     SuperSamplerScope superSamplerScope(false);
326     
327     VM* vm = &exec->vm();
328     NativeCallFrameTracer tracer(vm, exec);
329     Identifier ident = Identifier::fromUid(vm, uid);
330
331     JSValue baseValue = JSValue::decode(base);
332     JSValue thisValue = JSValue::decode(thisEncoded);
333     LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident));
334
335     PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
336     return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
337         if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
338             repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
339         return found ? slot.getValue(exec, ident) : jsUndefined();
340     }));
341 }
342
343 EncodedJSValue JIT_OPERATION operationInById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
344 {
345     SuperSamplerScope superSamplerScope(false);
346
347     VM& vm = exec->vm();
348     NativeCallFrameTracer tracer(&vm, exec);
349     auto scope = DECLARE_THROW_SCOPE(vm);
350
351     stubInfo->tookSlowPath = true;
352
353     Identifier ident = Identifier::fromUid(&vm, uid);
354
355     JSValue baseValue = JSValue::decode(base);
356     if (!baseValue.isObject()) {
357         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
358         return JSValue::encode(jsUndefined());
359     }
360     JSObject* baseObject = asObject(baseValue);
361
362     LOG_IC((ICEvent::OperationInById, baseObject->classInfo(vm), ident));
363
364     scope.release();
365     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
366     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
367 }
368
369 EncodedJSValue JIT_OPERATION operationInByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
370 {
371     SuperSamplerScope superSamplerScope(false);
372
373     VM& vm = exec->vm();
374     NativeCallFrameTracer tracer(&vm, exec);
375     auto scope = DECLARE_THROW_SCOPE(vm);
376
377     Identifier ident = Identifier::fromUid(&vm, uid);
378
379     JSValue baseValue = JSValue::decode(base);
380     if (!baseValue.isObject()) {
381         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
382         return JSValue::encode(jsUndefined());
383     }
384     JSObject* baseObject = asObject(baseValue);
385
386     LOG_IC((ICEvent::OperationInByIdGeneric, baseObject->classInfo(vm), ident));
387
388     scope.release();
389     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
390     return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
391 }
392
393 EncodedJSValue JIT_OPERATION operationInByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
394 {
395     SuperSamplerScope superSamplerScope(false);
396
397     VM& vm = exec->vm();
398     NativeCallFrameTracer tracer(&vm, exec);
399     auto scope = DECLARE_THROW_SCOPE(vm);
400
401     Identifier ident = Identifier::fromUid(&vm, uid);
402
403     JSValue baseValue = JSValue::decode(base);
404     if (!baseValue.isObject()) {
405         throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
406         return JSValue::encode(jsUndefined());
407     }
408     JSObject* baseObject = asObject(baseValue);
409
410     LOG_IC((ICEvent::OperationInByIdOptimize, baseObject->classInfo(vm), ident));
411
412     scope.release();
413     PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
414     bool found = baseObject->getPropertySlot(exec, ident, slot);
415     if (stubInfo->considerCaching(exec->codeBlock(), baseObject->structure(vm)))
416         repatchInByID(exec, baseObject, ident, found, slot, *stubInfo);
417     return JSValue::encode(jsBoolean(found));
418 }
419
420 EncodedJSValue JIT_OPERATION operationInByVal(ExecState* exec, JSCell* base, EncodedJSValue key)
421 {
422     SuperSamplerScope superSamplerScope(false);
423     
424     VM* vm = &exec->vm();
425     NativeCallFrameTracer tracer(vm, exec);
426
427     return JSValue::encode(jsBoolean(CommonSlowPaths::opInByVal(exec, base, JSValue::decode(key))));
428 }
429
430 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
431 {
432     SuperSamplerScope superSamplerScope(false);
433     
434     VM* vm = &exec->vm();
435     NativeCallFrameTracer tracer(vm, exec);
436     
437     stubInfo->tookSlowPath = true;
438     
439     JSValue baseValue = JSValue::decode(encodedBase);
440     Identifier ident = Identifier::fromUid(vm, uid);
441     LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident));
442
443     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
444     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
445 }
446
447 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
448 {
449     SuperSamplerScope superSamplerScope(false);
450     
451     VM* vm = &exec->vm();
452     NativeCallFrameTracer tracer(vm, exec);
453     
454     stubInfo->tookSlowPath = true;
455     
456     JSValue baseValue = JSValue::decode(encodedBase);
457     Identifier ident = Identifier::fromUid(vm, uid);
458     LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident));
459     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
460     baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
461 }
462
463 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
464 {
465     SuperSamplerScope superSamplerScope(false);
466     
467     VM& vm = exec->vm();
468     NativeCallFrameTracer tracer(&vm, exec);
469     
470     stubInfo->tookSlowPath = true;
471     
472     JSValue baseValue = JSValue::decode(encodedBase);
473     Identifier ident = Identifier::fromUid(&vm, uid);
474     LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(vm), ident));
475     PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
476     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
477 }
478
479 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
480 {
481     SuperSamplerScope superSamplerScope(false);
482     
483     VM& vm = exec->vm();
484     NativeCallFrameTracer tracer(&vm, exec);
485     
486     stubInfo->tookSlowPath = true;
487     
488     JSValue baseValue = JSValue::decode(encodedBase);
489     Identifier ident = Identifier::fromUid(&vm, uid);
490     LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(vm), ident));
491     PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
492     CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
493 }
494
495 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
496 {
497     SuperSamplerScope superSamplerScope(false);
498     
499     VM* vm = &exec->vm();
500     NativeCallFrameTracer tracer(vm, exec);
501     auto scope = DECLARE_THROW_SCOPE(*vm);
502
503     Identifier ident = Identifier::fromUid(vm, uid);
504     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
505
506     JSValue value = JSValue::decode(encodedValue);
507     JSValue baseValue = JSValue::decode(encodedBase);
508     LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
509     CodeBlock* codeBlock = exec->codeBlock();
510     PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
511
512     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
513     baseValue.putInline(exec, ident, value, slot);
514     RETURN_IF_EXCEPTION(scope, void());
515
516     if (accessType != static_cast<AccessType>(stubInfo->accessType))
517         return;
518     
519     if (stubInfo->considerCaching(codeBlock, structure))
520         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
521 }
522
523 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
524 {
525     SuperSamplerScope superSamplerScope(false);
526     
527     VM* vm = &exec->vm();
528     NativeCallFrameTracer tracer(vm, exec);
529     auto scope = DECLARE_THROW_SCOPE(*vm);
530
531     Identifier ident = Identifier::fromUid(vm, uid);
532     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
533
534     JSValue value = JSValue::decode(encodedValue);
535     JSValue baseValue = JSValue::decode(encodedBase);
536     LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident));
537     CodeBlock* codeBlock = exec->codeBlock();
538     PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
539
540     Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;    
541     baseValue.putInline(exec, ident, value, slot);
542     RETURN_IF_EXCEPTION(scope, void());
543
544     if (accessType != static_cast<AccessType>(stubInfo->accessType))
545         return;
546     
547     if (stubInfo->considerCaching(codeBlock, structure))
548         repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
549 }
550
551 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
552 {
553     SuperSamplerScope superSamplerScope(false);
554     
555     VM& vm = exec->vm();
556     NativeCallFrameTracer tracer(&vm, exec);
557     auto scope = DECLARE_THROW_SCOPE(vm);
558     
559     Identifier ident = Identifier::fromUid(&vm, uid);
560     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
561
562     JSValue value = JSValue::decode(encodedValue);
563     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
564     LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(vm), ident));
565     CodeBlock* codeBlock = exec->codeBlock();
566     PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
567     Structure* structure = nullptr;
568     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
569     RETURN_IF_EXCEPTION(scope, void());
570     
571     if (accessType != static_cast<AccessType>(stubInfo->accessType))
572         return;
573     
574     if (stubInfo->considerCaching(codeBlock, structure))
575         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
576 }
577
578 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
579 {
580     SuperSamplerScope superSamplerScope(false);
581     
582     VM& vm = exec->vm();
583     NativeCallFrameTracer tracer(&vm, exec);
584     auto scope = DECLARE_THROW_SCOPE(vm);
585     
586     Identifier ident = Identifier::fromUid(&vm, uid);
587     AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
588
589     JSValue value = JSValue::decode(encodedValue);
590     JSObject* baseObject = asObject(JSValue::decode(encodedBase));
591     LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(vm), ident));
592     CodeBlock* codeBlock = exec->codeBlock();
593     PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
594     Structure* structure = nullptr;
595     CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
596     RETURN_IF_EXCEPTION(scope, void());
597     
598     if (accessType != static_cast<AccessType>(stubInfo->accessType))
599         return;
600     
601     if (stubInfo->considerCaching(codeBlock, structure))
602         repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
603 }
604
605 ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
606 {
607     return value.isString() || value.isSymbol();
608 }
609
610 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
611 {
612     VM& vm = callFrame->vm();
613     auto scope = DECLARE_THROW_SCOPE(vm);
614     if (LIKELY(subscript.isUInt32())) {
615         byValInfo->tookSlowPath = true;
616         uint32_t i = subscript.asUInt32();
617         if (baseValue.isObject()) {
618             JSObject* object = asObject(baseValue);
619             if (object->canSetIndexQuickly(i)) {
620                 object->setIndexQuickly(vm, i, value);
621                 return;
622             }
623
624             // FIXME: This will make us think that in-bounds typed array accesses are actually
625             // out-of-bounds.
626             // https://bugs.webkit.org/show_bug.cgi?id=149886
627             byValInfo->arrayProfile->setOutOfBounds();
628             scope.release();
629             object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
630             return;
631         }
632
633         scope.release();
634         baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
635         return;
636     }
637
638     auto property = subscript.toPropertyKey(callFrame);
639     // Don't put to an object if toString threw an exception.
640     RETURN_IF_EXCEPTION(scope, void());
641
642     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
643         byValInfo->tookSlowPath = true;
644
645     scope.release();
646     PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
647     baseValue.putInline(callFrame, property, value, slot);
648 }
649
650 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
651 {
652     VM& vm = callFrame->vm();
653     auto scope = DECLARE_THROW_SCOPE(vm);
654     bool isStrictMode = callFrame->codeBlock()->isStrictMode();
655
656     if (LIKELY(subscript.isUInt32())) {
657         // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
658         byValInfo->tookSlowPath = true;
659         uint32_t index = subscript.asUInt32();
660         ASSERT(isIndex(index));
661
662         switch (baseObject->indexingType()) {
663         case ALL_INT32_INDEXING_TYPES:
664         case ALL_DOUBLE_INDEXING_TYPES:
665         case ALL_CONTIGUOUS_INDEXING_TYPES:
666         case ALL_ARRAY_STORAGE_INDEXING_TYPES:
667             if (index < baseObject->butterfly()->vectorLength())
668                 break;
669             FALLTHROUGH;
670         default:
671             byValInfo->arrayProfile->setOutOfBounds();
672             break;
673         }
674
675         scope.release();
676         baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
677         return;
678     }
679
680     if (subscript.isDouble()) {
681         double subscriptAsDouble = subscript.asDouble();
682         uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
683         if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
684             byValInfo->tookSlowPath = true;
685             scope.release();
686             baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
687             return;
688         }
689     }
690
691     // Don't put to an object if toString threw an exception.
692     auto property = subscript.toPropertyKey(callFrame);
693     RETURN_IF_EXCEPTION(scope, void());
694
695     if (Optional<uint32_t> index = parseIndex(property)) {
696         byValInfo->tookSlowPath = true;
697         scope.release();
698         baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
699         return;
700     }
701
702     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
703         byValInfo->tookSlowPath = true;
704
705     scope.release();
706     PutPropertySlot slot(baseObject, isStrictMode);
707     CommonSlowPaths::putDirectWithReify(vm, callFrame, baseObject, property, value, slot);
708 }
709
710 enum class OptimizationResult {
711     NotOptimized,
712     SeenOnce,
713     Optimized,
714     GiveUp,
715 };
716
717 static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
718 {
719     // See if it's worth optimizing at all.
720     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
721
722     VM& vm = exec->vm();
723     auto scope = DECLARE_THROW_SCOPE(vm);
724
725     if (baseValue.isObject() && isCopyOnWrite(baseValue.getObject()->indexingMode()))
726         return OptimizationResult::GiveUp;
727
728     if (baseValue.isObject() && subscript.isInt32()) {
729         JSObject* object = asObject(baseValue);
730
731         ASSERT(exec->bytecodeOffset());
732         ASSERT(!byValInfo->stubRoutine);
733
734         Structure* structure = object->structure(vm);
735         if (hasOptimizableIndexing(structure)) {
736             // Attempt to optimize.
737             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
738             if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
739                 CodeBlock* codeBlock = exec->codeBlock();
740                 ConcurrentJSLocker locker(codeBlock->m_lock);
741                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
742                 JIT::compilePutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
743                 optimizationResult = OptimizationResult::Optimized;
744             }
745         }
746
747         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
748         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
749             optimizationResult = OptimizationResult::GiveUp;
750     }
751
752     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
753         const Identifier propertyName = subscript.toPropertyKey(exec);
754         RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
755         if (subscript.isSymbol() || !parseIndex(propertyName)) {
756             ASSERT(exec->bytecodeOffset());
757             ASSERT(!byValInfo->stubRoutine);
758             if (byValInfo->seen) {
759                 if (byValInfo->cachedId == propertyName) {
760                     JIT::compilePutByValWithCachedId<OpPutByVal>(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
761                     optimizationResult = OptimizationResult::Optimized;
762                 } else {
763                     // Seem like a generic property access site.
764                     optimizationResult = OptimizationResult::GiveUp;
765                 }
766             } else {
767                 CodeBlock* codeBlock = exec->codeBlock();
768                 ConcurrentJSLocker locker(codeBlock->m_lock);
769                 byValInfo->seen = true;
770                 byValInfo->cachedId = propertyName;
771                 if (subscript.isSymbol())
772                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
773                 optimizationResult = OptimizationResult::SeenOnce;
774             }
775         }
776     }
777
778     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
779         // If we take slow path more than 10 times without patching then make sure we
780         // never make that mistake again. For cases where we see non-index-intercepting
781         // objects, this gives 10 iterations worth of opportunity for us to observe
782         // that the put_by_val may be polymorphic. We count up slowPathCount even if
783         // the result is GiveUp.
784         if (++byValInfo->slowPathCount >= 10)
785             optimizationResult = OptimizationResult::GiveUp;
786     }
787
788     return optimizationResult;
789 }
790
791 void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
792 {
793     VM& vm = exec->vm();
794     NativeCallFrameTracer tracer(&vm, exec);
795     auto scope = DECLARE_THROW_SCOPE(vm);
796
797     JSValue baseValue = JSValue::decode(encodedBaseValue);
798     JSValue subscript = JSValue::decode(encodedSubscript);
799     JSValue value = JSValue::decode(encodedValue);
800     OptimizationResult result = tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
801     RETURN_IF_EXCEPTION(scope, void());
802     if (result == OptimizationResult::GiveUp) {
803         // Don't ever try to optimize.
804         byValInfo->tookSlowPath = true;
805         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationPutByValGeneric);
806     }
807     RELEASE_AND_RETURN(scope, putByVal(exec, baseValue, subscript, value, byValInfo));
808 }
809
810 static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
811 {
812     // See if it's worth optimizing at all.
813     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
814
815     VM& vm = exec->vm();
816     auto scope = DECLARE_THROW_SCOPE(vm);
817
818     if (subscript.isInt32()) {
819         ASSERT(exec->bytecodeOffset());
820         ASSERT(!byValInfo->stubRoutine);
821
822         Structure* structure = object->structure(vm);
823         if (hasOptimizableIndexing(structure)) {
824             // Attempt to optimize.
825             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
826             if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
827                 CodeBlock* codeBlock = exec->codeBlock();
828                 ConcurrentJSLocker locker(codeBlock->m_lock);
829                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
830
831                 JIT::compileDirectPutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
832                 optimizationResult = OptimizationResult::Optimized;
833             }
834         }
835
836         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
837         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
838             optimizationResult = OptimizationResult::GiveUp;
839     } else if (isStringOrSymbol(subscript)) {
840         const Identifier propertyName = subscript.toPropertyKey(exec);
841         RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
842         if (subscript.isSymbol() || !parseIndex(propertyName)) {
843             ASSERT(exec->bytecodeOffset());
844             ASSERT(!byValInfo->stubRoutine);
845             if (byValInfo->seen) {
846                 if (byValInfo->cachedId == propertyName) {
847                     JIT::compilePutByValWithCachedId<OpPutByValDirect>(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
848                     optimizationResult = OptimizationResult::Optimized;
849                 } else {
850                     // Seem like a generic property access site.
851                     optimizationResult = OptimizationResult::GiveUp;
852                 }
853             } else {
854                 CodeBlock* codeBlock = exec->codeBlock();
855                 ConcurrentJSLocker locker(codeBlock->m_lock);
856                 byValInfo->seen = true;
857                 byValInfo->cachedId = propertyName;
858                 if (subscript.isSymbol())
859                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
860                 optimizationResult = OptimizationResult::SeenOnce;
861             }
862         }
863     }
864
865     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
866         // If we take slow path more than 10 times without patching then make sure we
867         // never make that mistake again. For cases where we see non-index-intercepting
868         // objects, this gives 10 iterations worth of opportunity for us to observe
869         // that the get_by_val may be polymorphic. We count up slowPathCount even if
870         // the result is GiveUp.
871         if (++byValInfo->slowPathCount >= 10)
872             optimizationResult = OptimizationResult::GiveUp;
873     }
874
875     return optimizationResult;
876 }
877
878 void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
879 {
880     VM& vm = exec->vm();
881     NativeCallFrameTracer tracer(&vm, exec);
882     auto scope = DECLARE_THROW_SCOPE(vm);
883
884     JSValue baseValue = JSValue::decode(encodedBaseValue);
885     JSValue subscript = JSValue::decode(encodedSubscript);
886     JSValue value = JSValue::decode(encodedValue);
887     RELEASE_ASSERT(baseValue.isObject());
888     JSObject* object = asObject(baseValue);
889     OptimizationResult result = tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
890     RETURN_IF_EXCEPTION(scope, void());
891     if (result == OptimizationResult::GiveUp) {
892         // Don't ever try to optimize.
893         byValInfo->tookSlowPath = true;
894         ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationDirectPutByValGeneric);
895     }
896
897     RELEASE_AND_RETURN(scope, directPutByVal(exec, object, subscript, value, byValInfo));
898 }
899
900 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
901 {
902     VM& vm = exec->vm();
903     NativeCallFrameTracer tracer(&vm, exec);
904     
905     JSValue baseValue = JSValue::decode(encodedBaseValue);
906     JSValue subscript = JSValue::decode(encodedSubscript);
907     JSValue value = JSValue::decode(encodedValue);
908
909     putByVal(exec, baseValue, subscript, value, byValInfo);
910 }
911
912
913 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
914 {
915     VM& vm = exec->vm();
916     NativeCallFrameTracer tracer(&vm, exec);
917     
918     JSValue baseValue = JSValue::decode(encodedBaseValue);
919     JSValue subscript = JSValue::decode(encodedSubscript);
920     JSValue value = JSValue::decode(encodedValue);
921     RELEASE_ASSERT(baseValue.isObject());
922     directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
923 }
924
925 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
926 {
927     VM* vm = &exec->vm();
928     auto scope = DECLARE_THROW_SCOPE(*vm);
929
930     execCallee->setCodeBlock(0);
931     
932     if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
933         return JSValue::encode(JSValue());
934
935     JSValue result = eval(execCallee);
936     RETURN_IF_EXCEPTION(scope, encodedJSValue());
937     
938     return JSValue::encode(result);
939 }
940
941 static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
942 {
943     ExecState* exec = execCallee->callerFrame();
944     VM* vm = &exec->vm();
945     auto scope = DECLARE_THROW_SCOPE(*vm);
946
947     execCallee->setCodeBlock(0);
948
949     if (callLinkInfo->specializationKind() == CodeForCall) {
950         CallData callData;
951         CallType callType = getCallData(*vm, callee, callData);
952     
953         ASSERT(callType != CallType::JS);
954     
955         if (callType == CallType::Host) {
956             NativeCallFrameTracer tracer(vm, execCallee);
957             execCallee->setCallee(asObject(callee));
958             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
959             if (UNLIKELY(scope.exception())) {
960                 return encodeResult(
961                     vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
962                     reinterpret_cast<void*>(KeepTheFrame));
963             }
964
965             return encodeResult(
966                 tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue),
967                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
968         }
969     
970         ASSERT(callType == CallType::None);
971         throwException(exec, scope, createNotAFunctionError(exec, callee));
972         return encodeResult(
973             vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
974             reinterpret_cast<void*>(KeepTheFrame));
975     }
976
977     ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
978     
979     ConstructData constructData;
980     ConstructType constructType = getConstructData(*vm, callee, constructData);
981     
982     ASSERT(constructType != ConstructType::JS);
983     
984     if (constructType == ConstructType::Host) {
985         NativeCallFrameTracer tracer(vm, execCallee);
986         execCallee->setCallee(asObject(callee));
987         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
988         if (UNLIKELY(scope.exception())) {
989             return encodeResult(
990                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
991                 reinterpret_cast<void*>(KeepTheFrame));
992         }
993
994         return encodeResult(tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
995     }
996     
997     ASSERT(constructType == ConstructType::None);
998     throwException(exec, scope, createNotAConstructorError(exec, callee));
999     return encodeResult(
1000         vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1001         reinterpret_cast<void*>(KeepTheFrame));
1002 }
1003
1004 SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1005 {
1006     ExecState* exec = execCallee->callerFrame();
1007     VM* vm = &exec->vm();
1008     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1009
1010     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1011     NativeCallFrameTracer tracer(vm, exec);
1012     
1013     RELEASE_ASSERT(!callLinkInfo->isDirect());
1014     
1015     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1016     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
1017     if (!calleeAsFunctionCell) {
1018         if (auto* internalFunction = jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1019             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1020             RELEASE_ASSERT(!!codePtr);
1021
1022             if (!callLinkInfo->seenOnce())
1023                 callLinkInfo->setSeen();
1024             else
1025                 linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr);
1026
1027             void* linkedTarget = codePtr.executableAddress();
1028             return encodeResult(linkedTarget, reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1029         }
1030         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1031     }
1032
1033     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
1034     JSScope* scope = callee->scopeUnchecked();
1035     ExecutableBase* executable = callee->executable();
1036
1037     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1038     CodeBlock* codeBlock = nullptr;
1039     if (executable->isHostFunction())
1040         codePtr = executable->entrypointFor(kind, MustCheckArity);
1041     else {
1042         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1043
1044         auto handleThrowException = [&] () {
1045             void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress();
1046             return encodeResult(throwTarget, reinterpret_cast<void*>(KeepTheFrame));
1047         };
1048
1049         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1050             throwException(exec, throwScope, createNotAConstructorError(exec, callee));
1051             return handleThrowException();
1052         }
1053
1054         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1055         Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
1056         EXCEPTION_ASSERT(throwScope.exception() == error);
1057         if (UNLIKELY(error))
1058             return handleThrowException();
1059         codeBlock = *codeBlockSlot;
1060         ArityCheckMode arity;
1061         if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
1062             arity = MustCheckArity;
1063         else
1064             arity = ArityCheckNotRequired;
1065         codePtr = functionExecutable->entrypointFor(kind, arity);
1066     }
1067     if (!callLinkInfo->seenOnce())
1068         callLinkInfo->setSeen();
1069     else
1070         linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
1071
1072     return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1073 }
1074
1075 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1076 {
1077     VM* vm = &exec->vm();
1078     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1079
1080     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1081     NativeCallFrameTracer tracer(vm, exec);
1082     
1083     RELEASE_ASSERT(callLinkInfo->isDirect());
1084     
1085     // This would happen if the executable died during GC but the CodeBlock did not die. That should
1086     // not happen because the CodeBlock should have a weak reference to any executable it uses for
1087     // this purpose.
1088     RELEASE_ASSERT(callLinkInfo->executable());
1089     
1090     // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1091     // linked.
1092     RELEASE_ASSERT(!callLinkInfo->codeBlock());
1093     
1094     // We just don't support this yet.
1095     RELEASE_ASSERT(!callLinkInfo->isVarargs());
1096     
1097     ExecutableBase* executable = callLinkInfo->executable();
1098     RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1099
1100     JSScope* scope = callee->scopeUnchecked();
1101
1102     MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1103     CodeBlock* codeBlock = nullptr;
1104     if (executable->isHostFunction())
1105         codePtr = executable->entrypointFor(kind, MustCheckArity);
1106     else {
1107         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1108
1109         RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1110         
1111         Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1112         EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == error);
1113         if (UNLIKELY(error))
1114             return;
1115         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1116         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1117             codePtr = functionExecutable->entrypointFor(kind, MustCheckArity);
1118         else
1119             codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired);
1120     }
1121     
1122     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1123 }
1124
1125 inline SlowPathReturnType virtualForWithFunction(
1126     ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1127 {
1128     ExecState* exec = execCallee->callerFrame();
1129     VM* vm = &exec->vm();
1130     auto throwScope = DECLARE_THROW_SCOPE(*vm);
1131
1132     CodeSpecializationKind kind = callLinkInfo->specializationKind();
1133     NativeCallFrameTracer tracer(vm, exec);
1134
1135     JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1136     calleeAsFunctionCell = getJSFunction(calleeAsValue);
1137     if (UNLIKELY(!calleeAsFunctionCell)) {
1138         if (jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1139             MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1140             ASSERT(!!codePtr);
1141             return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1142         }
1143         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1144     }
1145     
1146     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1147     JSScope* scope = function->scopeUnchecked();
1148     ExecutableBase* executable = function->executable();
1149     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1150         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1151
1152         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1153             throwException(exec, throwScope, createNotAConstructorError(exec, function));
1154             return encodeResult(
1155                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1156                 reinterpret_cast<void*>(KeepTheFrame));
1157         }
1158
1159         CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1160         Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1161         EXCEPTION_ASSERT(throwScope.exception() == error);
1162         if (UNLIKELY(error)) {
1163             return encodeResult(
1164                 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1165                 reinterpret_cast<void*>(KeepTheFrame));
1166         }
1167     }
1168     return encodeResult(executable->entrypointFor(
1169         kind, MustCheckArity).executableAddress(),
1170         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1171 }
1172
1173 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1174 {
1175     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1176     JSCell* calleeAsFunctionCell;
1177     SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1178
1179     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1180     
1181     return result;
1182 }
1183
1184 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1185 {
1186     JSCell* calleeAsFunctionCellIgnored;
1187     return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1188 }
1189
1190 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1191 {
1192     VM* vm = &exec->vm();
1193     NativeCallFrameTracer tracer(vm, exec);
1194     
1195     return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1196 }
1197
1198 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1199 {
1200     VM* vm = &exec->vm();
1201     NativeCallFrameTracer tracer(vm, exec);
1202
1203     return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1204 }
1205
1206 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1207 {
1208     VM* vm = &exec->vm();
1209     NativeCallFrameTracer tracer(vm, exec);
1210
1211     return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1212 }
1213
1214 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1215 {
1216     VM* vm = &exec->vm();
1217     NativeCallFrameTracer tracer(vm, exec);
1218
1219     return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1220 }
1221
1222 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1223 {
1224     VM* vm = &exec->vm();
1225     NativeCallFrameTracer tracer(vm, exec);
1226
1227     return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1228 }
1229
1230 #if USE(JSVALUE64)
1231 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1232 #else
1233 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1234 #endif
1235 {
1236     VM* vm = &exec->vm();
1237     NativeCallFrameTracer tracer(vm, exec);
1238
1239     bool result = asString(left)->equal(exec, asString(right));
1240 #if USE(JSVALUE64)
1241     return JSValue::encode(jsBoolean(result));
1242 #else
1243     return result;
1244 #endif
1245 }
1246
1247 size_t JIT_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1248 {
1249     VM* vm = &exec->vm();
1250     NativeCallFrameTracer tracer(vm, exec);
1251
1252     JSValue src1 = JSValue::decode(encodedOp1);
1253     JSValue src2 = JSValue::decode(encodedOp2);
1254
1255     return JSValue::strictEqual(exec, src1, src2);
1256 }
1257
1258 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1259 {
1260     VM* vm = &exec->vm();
1261     NativeCallFrameTracer tracer(vm, exec);
1262     return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1263 }
1264
1265 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1266 {
1267     VM* vm = &exec->vm();
1268     NativeCallFrameTracer tracer(vm, exec);
1269     JSValue sizeValue = JSValue::decode(size);
1270     return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1271 }
1272
1273 }
1274
1275 template<typename FunctionType>
1276 static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1277 {
1278     VM& vm = exec->vm();
1279     ASSERT(functionExecutable->inherits<FunctionExecutable>(vm));
1280     NativeCallFrameTracer tracer(&vm, exec);
1281     if (isInvalidated)
1282         return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1283     return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1284 }
1285
1286 extern "C" {
1287
1288 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1289 {
1290     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1291 }
1292
1293 EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1294 {
1295     return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1296 }
1297
1298 EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1299 {
1300     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1301 }
1302
1303 EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1304 {
1305     return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1306 }
1307
1308 EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1309 {
1310     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1311 }
1312
1313 EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1314 {
1315     return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1316 }
1317
1318 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1319 {
1320     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1321 }
1322     
1323 EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1324 {
1325     return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1326 }
1327     
1328 void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1329 {
1330     VM* vm = &exec->vm();
1331     NativeCallFrameTracer tracer(vm, exec);
1332
1333     JSFunction* func = jsCast<JSFunction*>(funcCell);
1334     JSValue name = JSValue::decode(encodedName);
1335     func->setFunctionName(exec, name);
1336 }
1337
1338 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1339 {
1340     VM* vm = &exec->vm();
1341     NativeCallFrameTracer tracer(vm, exec);
1342
1343     return constructEmptyObject(exec, structure);
1344 }
1345
1346 JSCell* JIT_OPERATION operationNewRegexp(ExecState* exec, JSCell* regexpPtr)
1347 {
1348     SuperSamplerScope superSamplerScope(false);
1349     VM& vm = exec->vm();
1350     NativeCallFrameTracer tracer(&vm, exec);
1351
1352     RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1353     ASSERT(regexp->isValid());
1354     return RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp);
1355 }
1356
1357 // The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1358 // existing DFG slow path generator machinery when creating the slow path for CheckTraps
1359 // in the DFG. If a DFG slow path generator that supports a void return type is added in the
1360 // future, we can switch to using that then.
1361 UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1362 {
1363     VM& vm = exec->vm();
1364     NativeCallFrameTracer tracer(&vm, exec);
1365     ASSERT(vm.needTrapHandling());
1366     vm.handleTraps(exec);
1367     return nullptr;
1368 }
1369
1370 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1371 {
1372     VM& vm = exec->vm();
1373     NativeCallFrameTracer tracer(&vm, exec);
1374
1375     vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1376 }
1377
1378 #if ENABLE(DFG_JIT)
1379 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1380 {
1381     codeBlock->updateAllPredictions();
1382     codeBlock->optimizeAfterWarmUp();
1383 }
1384
1385 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, uint32_t bytecodeIndex)
1386 {
1387     VM& vm = exec->vm();
1388     NativeCallFrameTracer tracer(&vm, exec);
1389
1390     // Defer GC for a while so that it doesn't run between when we enter into this
1391     // slow path and when we figure out the state of our code block. This prevents
1392     // a number of awkward reentrancy scenarios, including:
1393     //
1394     // - The optimized version of our code block being jettisoned by GC right after
1395     //   we concluded that we wanted to use it, but have not planted it into the JS
1396     //   stack yet.
1397     //
1398     // - An optimized version of our code block being installed just as we decided
1399     //   that it wasn't ready yet.
1400     //
1401     // Note that jettisoning won't happen if we already initiated OSR, because in
1402     // that case we would have already planted the optimized code block into the JS
1403     // stack.
1404     DeferGCForAWhile deferGC(vm.heap);
1405     
1406     CodeBlock* codeBlock = exec->codeBlock();
1407     if (UNLIKELY(codeBlock->jitType() != JITCode::BaselineJIT)) {
1408         dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1409         RELEASE_ASSERT_NOT_REACHED();
1410     }
1411     
1412     if (bytecodeIndex) {
1413         // If we're attempting to OSR from a loop, assume that this should be
1414         // separately optimized.
1415         codeBlock->m_shouldAlwaysBeInlined = false;
1416     }
1417
1418     if (UNLIKELY(Options::verboseOSR())) {
1419         dataLog(
1420             *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1421             ", executeCounter = ", codeBlock->jitExecuteCounter(),
1422             ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1423             ", exitCounter = ");
1424         if (codeBlock->hasOptimizedReplacement())
1425             dataLog(codeBlock->replacement()->osrExitCounter());
1426         else
1427             dataLog("N/A");
1428         dataLog("\n");
1429     }
1430
1431     if (!codeBlock->checkIfOptimizationThresholdReached()) {
1432         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1433         codeBlock->updateAllPredictions();
1434         if (UNLIKELY(Options::verboseOSR()))
1435             dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1436         return encodeResult(0, 0);
1437     }
1438     
1439     Debugger* debugger = codeBlock->globalObject()->debugger();
1440     if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1441         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1442         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1443         return encodeResult(0, 0);
1444     }
1445
1446     if (codeBlock->m_shouldAlwaysBeInlined) {
1447         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1448         updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1449         if (UNLIKELY(Options::verboseOSR()))
1450             dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1451         return encodeResult(0, 0);
1452     }
1453
1454     // We cannot be in the process of asynchronous compilation and also have an optimized
1455     // replacement.
1456     DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1457     ASSERT(
1458         !worklist
1459         || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1460         && codeBlock->hasOptimizedReplacement()));
1461
1462     DFG::Worklist::State worklistState;
1463     if (worklist) {
1464         // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1465         // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1466         // what the result was so that we don't plow ahead and attempt OSR or immediate
1467         // reoptimization. This will have already also set the appropriate JIT execution
1468         // count threshold depending on what happened, so if the compilation was anything
1469         // but successful we just want to return early. See the case for worklistState ==
1470         // DFG::Worklist::Compiled, below.
1471         
1472         // Note that we could have alternatively just called Worklist::compilationState()
1473         // here, and if it returned Compiled, we could have then called
1474         // completeAndScheduleOSR() below. But that would have meant that it could take
1475         // longer for code blocks to be completed: they would only complete when *their*
1476         // execution count trigger fired; but that could take a while since the firing is
1477         // racy. It could also mean that code blocks that never run again after being
1478         // compiled would sit on the worklist until next GC. That's fine, but it's
1479         // probably a waste of memory. Our goal here is to complete code blocks as soon as
1480         // possible in order to minimize the chances of us executing baseline code after
1481         // optimized code is already available.
1482         worklistState = worklist->completeAllReadyPlansForVM(
1483             vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1484     } else
1485         worklistState = DFG::Worklist::NotKnown;
1486
1487     if (worklistState == DFG::Worklist::Compiling) {
1488         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1489         // We cannot be in the process of asynchronous compilation and also have an optimized
1490         // replacement.
1491         RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1492         codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1493         return encodeResult(0, 0);
1494     }
1495
1496     if (worklistState == DFG::Worklist::Compiled) {
1497         // If we don't have an optimized replacement but we did just get compiled, then
1498         // the compilation failed or was invalidated, in which case the execution count
1499         // thresholds have already been set appropriately by
1500         // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1501         // nothing left to do.
1502         if (!codeBlock->hasOptimizedReplacement()) {
1503             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1504             codeBlock->updateAllPredictions();
1505             if (UNLIKELY(Options::verboseOSR()))
1506                 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1507             return encodeResult(0, 0);
1508         }
1509     } else if (codeBlock->hasOptimizedReplacement()) {
1510         CodeBlock* replacement = codeBlock->replacement();
1511         if (UNLIKELY(Options::verboseOSR()))
1512             dataLog("Considering OSR ", codeBlock, " -> ", replacement, ".\n");
1513         // If we have an optimized replacement, then it must be the case that we entered
1514         // cti_optimize from a loop. That's because if there's an optimized replacement,
1515         // then all calls to this function will be relinked to the replacement and so
1516         // the prologue OSR will never fire.
1517         
1518         // This is an interesting threshold check. Consider that a function OSR exits
1519         // in the middle of a loop, while having a relatively low exit count. The exit
1520         // will reset the execution counter to some target threshold, meaning that this
1521         // code won't be reached until that loop heats up for >=1000 executions. But then
1522         // we do a second check here, to see if we should either reoptimize, or just
1523         // attempt OSR entry. Hence it might even be correct for
1524         // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1525         // additional checking anyway, to reduce the amount of recompilation thrashing.
1526         if (replacement->shouldReoptimizeFromLoopNow()) {
1527             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1528             if (UNLIKELY(Options::verboseOSR())) {
1529                 dataLog(
1530                     "Triggering reoptimization of ", codeBlock,
1531                     "(", replacement, ") (in loop).\n");
1532             }
1533             replacement->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1534             return encodeResult(0, 0);
1535         }
1536     } else {
1537         if (!codeBlock->shouldOptimizeNow()) {
1538             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1539             if (UNLIKELY(Options::verboseOSR())) {
1540                 dataLog(
1541                     "Delaying optimization for ", *codeBlock,
1542                     " because of insufficient profiling.\n");
1543             }
1544             return encodeResult(0, 0);
1545         }
1546
1547         if (UNLIKELY(Options::verboseOSR()))
1548             dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1549
1550         unsigned numVarsWithValues;
1551         if (bytecodeIndex)
1552             numVarsWithValues = codeBlock->numCalleeLocals();
1553         else
1554             numVarsWithValues = 0;
1555         Operands<Optional<JSValue>> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1556         int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1557         for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1558             int operand = mustHandleValues.operandForIndex(i);
1559             if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1560                 continue;
1561             mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1562         }
1563
1564         CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1565         CompilationResult result = DFG::compile(
1566             vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1567             mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1568         
1569         if (result != CompilationSuccessful) {
1570             CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1571             return encodeResult(0, 0);
1572         }
1573     }
1574     
1575     CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1576     ASSERT(optimizedCodeBlock && JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1577     
1578     if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1579         CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1580         if (UNLIKELY(Options::verboseOSR())) {
1581             dataLog(
1582                 "Performing OSR ", codeBlock, " -> ", optimizedCodeBlock, ".\n");
1583         }
1584
1585         codeBlock->optimizeSoon();
1586         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1587         void* targetPC = vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress();
1588         targetPC = retagCodePtr(targetPC, JITThunkPtrTag, bitwise_cast<PtrTag>(exec));
1589         return encodeResult(targetPC, dataBuffer);
1590     }
1591
1592     if (UNLIKELY(Options::verboseOSR())) {
1593         dataLog(
1594             "Optimizing ", codeBlock, " -> ", codeBlock->replacement(),
1595             " succeeded, OSR failed, after a delay of ",
1596             codeBlock->optimizationDelayCounter(), ".\n");
1597     }
1598
1599     // Count the OSR failure as a speculation failure. If this happens a lot, then
1600     // reoptimize.
1601     optimizedCodeBlock->countOSRExit();
1602
1603     // We are a lot more conservative about triggering reoptimization after OSR failure than
1604     // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1605     // already, then we really would like to reoptimize immediately. But this case covers
1606     // something else: there weren't many (or any) speculation failures before, but we just
1607     // failed to enter the speculative code because some variable had the wrong value or
1608     // because the OSR code decided for any spurious reason that it did not want to OSR
1609     // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1610     // reoptimization trigger.
1611     if (optimizedCodeBlock->shouldReoptimizeNow()) {
1612         CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1613         if (UNLIKELY(Options::verboseOSR())) {
1614             dataLog(
1615                 "Triggering reoptimization of ", codeBlock, " -> ",
1616                 codeBlock->replacement(), " (after OSR fail).\n");
1617         }
1618         optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1619         return encodeResult(0, 0);
1620     }
1621
1622     // OSR failed this time, but it might succeed next time! Let the code run a bit
1623     // longer and then try again.
1624     codeBlock->optimizeAfterWarmUp();
1625     
1626     CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1627     return encodeResult(0, 0);
1628 }
1629
1630 char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1631 {
1632     VM& vm = exec->vm();
1633     NativeCallFrameTracer tracer(&vm, exec);
1634
1635     CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1636     if (UNLIKELY(!optimizedReplacement))
1637         return nullptr;
1638
1639     switch (optimizedReplacement->jitType()) {
1640     case JITCode::DFGJIT:
1641     case JITCode::FTLJIT: {
1642         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1643         return entry.executableAddress<char*>();
1644     }
1645     default:
1646         break;
1647     }
1648     return nullptr;
1649 }
1650
1651 char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1652 {
1653     VM& vm = exec->vm();
1654     NativeCallFrameTracer tracer(&vm, exec);
1655
1656     CodeBlock* codeBlock = exec->codeBlock();
1657     CodeBlock* optimizedReplacement = codeBlock->replacement();
1658     if (UNLIKELY(!optimizedReplacement))
1659         return nullptr;
1660
1661     switch (optimizedReplacement->jitType()) {
1662     case JITCode::DFGJIT:
1663     case JITCode::FTLJIT: {
1664         MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1665         return entry.executableAddress<char*>();
1666     }
1667     default:
1668         break;
1669     }
1670
1671     codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1672     auto bytecode = codeBlock->instructions().at(bytecodeIndex)->as<OpCatch>();
1673     auto& metadata = bytecode.metadata(codeBlock);
1674     metadata.m_buffer->forEach([&] (ValueProfileAndOperand& profile) {
1675         profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1676     });
1677
1678     return nullptr;
1679 }
1680
1681 #endif
1682
1683 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1684 {
1685     VM& vm = exec->vm();
1686     NativeCallFrameTracer tracer(&vm, exec);
1687
1688     JSValue arrayValue = JSValue::decode(encodedArrayValue);
1689     ASSERT(isJSArray(arrayValue));
1690     asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1691 }
1692
1693 enum class AccessorType {
1694     Getter,
1695     Setter
1696 };
1697
1698 static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1699 {
1700     VM& vm = exec->vm();
1701     auto scope = DECLARE_THROW_SCOPE(vm);
1702     auto propertyKey = subscript.toPropertyKey(exec);
1703     RETURN_IF_EXCEPTION(scope, void());
1704
1705     scope.release();
1706     if (accessorType == AccessorType::Getter)
1707         base->putGetter(exec, propertyKey, accessor, attribute);
1708     else
1709         base->putSetter(exec, propertyKey, accessor, attribute);
1710 }
1711
1712 void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1713 {
1714     VM& vm = exec->vm();
1715     NativeCallFrameTracer tracer(&vm, exec);
1716
1717     ASSERT(object && object->isObject());
1718     JSObject* baseObj = object->getObject();
1719
1720     ASSERT(getter->isObject());
1721     baseObj->putGetter(exec, uid, getter, options);
1722 }
1723
1724 void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1725 {
1726     VM& vm = exec->vm();
1727     NativeCallFrameTracer tracer(&vm, exec);
1728
1729     ASSERT(object && object->isObject());
1730     JSObject* baseObj = object->getObject();
1731
1732     ASSERT(setter->isObject());
1733     baseObj->putSetter(exec, uid, setter, options);
1734 }
1735
1736 void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1737 {
1738     VM& vm = exec->vm();
1739     NativeCallFrameTracer tracer(&vm, exec);
1740
1741     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1742 }
1743
1744 void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1745 {
1746     VM& vm = exec->vm();
1747     NativeCallFrameTracer tracer(&vm, exec);
1748
1749     putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1750 }
1751
1752 #if USE(JSVALUE64)
1753 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1754 {
1755     VM& vm = exec->vm();
1756     NativeCallFrameTracer tracer(&vm, exec);
1757
1758     ASSERT(object && object->isObject());
1759     JSObject* baseObject = asObject(object);
1760
1761     JSValue getter = JSValue::decode(encodedGetterValue);
1762     JSValue setter = JSValue::decode(encodedSetterValue);
1763     ASSERT(getter.isObject() || setter.isObject());
1764     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1765     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1766 }
1767
1768 #else
1769 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getterCell, JSCell* setterCell)
1770 {
1771     VM& vm = exec->vm();
1772     NativeCallFrameTracer tracer(&vm, exec);
1773
1774     ASSERT(object && object->isObject());
1775     JSObject* baseObject = asObject(object);
1776
1777     ASSERT(getterCell || setterCell);
1778     JSObject* getter = getterCell ? getterCell->getObject() : nullptr;
1779     JSObject* setter = setterCell ? setterCell->getObject() : nullptr;
1780     GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1781     CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1782 }
1783 #endif
1784
1785 void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1786 {
1787     VM& vm = exec->vm();
1788     NativeCallFrameTracer tracer(&vm, exec);
1789
1790     JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1791     exec->uncheckedR(scopeReg) = scope->next();
1792 }
1793
1794 int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1795 {
1796     VM& vm = exec->vm();
1797     NativeCallFrameTracer tracer(&vm, exec);
1798
1799     JSValue value = JSValue::decode(encodedValue);
1800     JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1801
1802     ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor->structure(vm)->typeInfo().implementsDefaultHasInstance());
1803
1804     if (constructor->hasInstance(exec, value, hasInstanceValue))
1805         return 1;
1806     return 0;
1807 }
1808
1809 }
1810
1811 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1812 {
1813     VM& vm = exec->vm();
1814     auto scope = DECLARE_THROW_SCOPE(vm);
1815
1816     if (LIKELY(baseValue.isCell() && subscript.isString())) {
1817         Structure& structure = *baseValue.asCell()->structure(vm);
1818         if (JSCell::canUseFastGetOwnProperty(structure)) {
1819             RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec);
1820             RETURN_IF_EXCEPTION(scope, JSValue());
1821             if (existingAtomicString) {
1822                 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get())) {
1823                     ASSERT(exec->bytecodeOffset());
1824                     if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomicString)
1825                         byValInfo->tookSlowPath = true;
1826                     return result;
1827                 }
1828             }
1829         }
1830     }
1831
1832     if (subscript.isUInt32()) {
1833         ASSERT(exec->bytecodeOffset());
1834         byValInfo->tookSlowPath = true;
1835
1836         uint32_t i = subscript.asUInt32();
1837         if (isJSString(baseValue)) {
1838             if (asString(baseValue)->canGetIndex(i)) {
1839                 ctiPatchCallByReturnAddress(returnAddress, operationGetByValString);
1840                 RELEASE_AND_RETURN(scope, asString(baseValue)->getIndex(exec, i));
1841             }
1842             byValInfo->arrayProfile->setOutOfBounds();
1843         } else if (baseValue.isObject()) {
1844             JSObject* object = asObject(baseValue);
1845             if (object->canGetIndexQuickly(i))
1846                 return object->getIndexQuickly(i);
1847
1848             bool skipMarkingOutOfBounds = false;
1849
1850             if (object->indexingType() == ArrayWithContiguous && i < object->butterfly()->publicLength()) {
1851                 // FIXME: expand this to ArrayStorage, Int32, and maybe Double:
1852                 // https://bugs.webkit.org/show_bug.cgi?id=182940
1853                 auto* globalObject = object->globalObject(vm);
1854                 skipMarkingOutOfBounds = globalObject->isOriginalArrayStructure(object->structure(vm)) && globalObject->arrayPrototypeChainIsSane();
1855             }
1856
1857             if (!skipMarkingOutOfBounds && !CommonSlowPaths::canAccessArgumentIndexQuickly(*object, i)) {
1858                 // FIXME: This will make us think that in-bounds typed array accesses are actually
1859                 // out-of-bounds.
1860                 // https://bugs.webkit.org/show_bug.cgi?id=149886
1861                 byValInfo->arrayProfile->setOutOfBounds();
1862             }
1863         }
1864
1865         RELEASE_AND_RETURN(scope, baseValue.get(exec, i));
1866     }
1867
1868     baseValue.requireObjectCoercible(exec);
1869     RETURN_IF_EXCEPTION(scope, JSValue());
1870     auto property = subscript.toPropertyKey(exec);
1871     RETURN_IF_EXCEPTION(scope, JSValue());
1872
1873     ASSERT(exec->bytecodeOffset());
1874     if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1875         byValInfo->tookSlowPath = true;
1876
1877     RELEASE_AND_RETURN(scope, baseValue.get(exec, property));
1878 }
1879
1880 static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1881 {
1882     // See if it's worth optimizing this at all.
1883     OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1884
1885     VM& vm = exec->vm();
1886     auto scope = DECLARE_THROW_SCOPE(vm);
1887
1888     if (baseValue.isObject() && subscript.isInt32()) {
1889         JSObject* object = asObject(baseValue);
1890
1891         ASSERT(exec->bytecodeOffset());
1892         ASSERT(!byValInfo->stubRoutine);
1893
1894         if (hasOptimizableIndexing(object->structure(vm))) {
1895             // Attempt to optimize.
1896             Structure* structure = object->structure(vm);
1897             JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1898             if (arrayMode != byValInfo->arrayMode) {
1899                 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1900                 // Let's update the profile to do better next time.
1901                 CodeBlock* codeBlock = exec->codeBlock();
1902                 ConcurrentJSLocker locker(codeBlock->m_lock);
1903                 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1904
1905                 JIT::compileGetByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
1906                 optimizationResult = OptimizationResult::Optimized;
1907             }
1908         }
1909
1910         // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1911         if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1912             optimizationResult = OptimizationResult::GiveUp;
1913     }
1914
1915     if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1916         const Identifier propertyName = subscript.toPropertyKey(exec);
1917         RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
1918         if (subscript.isSymbol() || !parseIndex(propertyName)) {
1919             ASSERT(exec->bytecodeOffset());
1920             ASSERT(!byValInfo->stubRoutine);
1921             if (byValInfo->seen) {
1922                 if (byValInfo->cachedId == propertyName) {
1923                     JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1924                     optimizationResult = OptimizationResult::Optimized;
1925                 } else {
1926                     // Seem like a generic property access site.
1927                     optimizationResult = OptimizationResult::GiveUp;
1928                 }
1929             } else {
1930                 CodeBlock* codeBlock = exec->codeBlock();
1931                 ConcurrentJSLocker locker(codeBlock->m_lock);
1932                 byValInfo->seen = true;
1933                 byValInfo->cachedId = propertyName;
1934                 if (subscript.isSymbol())
1935                     byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1936                 optimizationResult = OptimizationResult::SeenOnce;
1937             }
1938         }
1939     }
1940
1941     if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1942         // If we take slow path more than 10 times without patching then make sure we
1943         // never make that mistake again. For cases where we see non-index-intercepting
1944         // objects, this gives 10 iterations worth of opportunity for us to observe
1945         // that the get_by_val may be polymorphic. We count up slowPathCount even if
1946         // the result is GiveUp.
1947         if (++byValInfo->slowPathCount >= 10)
1948             optimizationResult = OptimizationResult::GiveUp;
1949     }
1950
1951     return optimizationResult;
1952 }
1953
1954 extern "C" {
1955
1956 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1957 {
1958     VM& vm = exec->vm();
1959     NativeCallFrameTracer tracer(&vm, exec);
1960     JSValue baseValue = JSValue::decode(encodedBase);
1961     JSValue subscript = JSValue::decode(encodedSubscript);
1962
1963     JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1964     return JSValue::encode(result);
1965 }
1966
1967 EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1968 {
1969     VM& vm = exec->vm();
1970     NativeCallFrameTracer tracer(&vm, exec);
1971     auto scope = DECLARE_THROW_SCOPE(vm);
1972
1973     JSValue baseValue = JSValue::decode(encodedBase);
1974     JSValue subscript = JSValue::decode(encodedSubscript);
1975     ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
1976     OptimizationResult result = tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress);
1977     RETURN_IF_EXCEPTION(scope, { });
1978     if (result == OptimizationResult::GiveUp) {
1979         // Don't ever try to optimize.
1980         byValInfo->tookSlowPath = true;
1981         ctiPatchCallByReturnAddress(returnAddress, operationGetByValGeneric);
1982     }
1983
1984     RELEASE_AND_RETURN(scope, JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress)));
1985 }
1986
1987 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1988 {
1989     VM& vm = exec->vm();
1990     NativeCallFrameTracer tracer(&vm, exec);
1991     JSValue baseValue = JSValue::decode(encodedBase);
1992     JSValue subscript = JSValue::decode(encodedSubscript);
1993     
1994     ASSERT(baseValue.isObject());
1995     ASSERT(subscript.isUInt32());
1996
1997     JSObject* object = asObject(baseValue);
1998     bool didOptimize = false;
1999
2000     ASSERT(exec->bytecodeOffset());
2001     ASSERT(!byValInfo->stubRoutine);
2002     
2003     if (hasOptimizableIndexing(object->structure(vm))) {
2004         // Attempt to optimize.
2005         JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
2006         if (arrayMode != byValInfo->arrayMode) {
2007             JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
2008             didOptimize = true;
2009         }
2010     }
2011     
2012     if (!didOptimize) {
2013         // If we take slow path more than 10 times without patching then make sure we
2014         // never make that mistake again. Or, if we failed to patch and we have some object
2015         // that intercepts indexed get, then don't even wait until 10 times. For cases
2016         // where we see non-index-intercepting objects, this gives 10 iterations worth of
2017         // opportunity for us to observe that the get_by_val may be polymorphic.
2018         if (++byValInfo->slowPathCount >= 10
2019             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2020             // Don't ever try to optimize.
2021             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationHasIndexedPropertyGeneric);
2022         }
2023     }
2024
2025     uint32_t index = subscript.asUInt32();
2026     if (object->canGetIndexQuickly(index))
2027         return JSValue::encode(JSValue(JSValue::JSTrue));
2028
2029     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2030         // FIXME: This will make us think that in-bounds typed array accesses are actually
2031         // out-of-bounds.
2032         // https://bugs.webkit.org/show_bug.cgi?id=149886
2033         byValInfo->arrayProfile->setOutOfBounds();
2034     }
2035     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
2036 }
2037     
2038 EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2039 {
2040     VM& vm = exec->vm();
2041     NativeCallFrameTracer tracer(&vm, exec);
2042     JSValue baseValue = JSValue::decode(encodedBase);
2043     JSValue subscript = JSValue::decode(encodedSubscript);
2044     
2045     ASSERT(baseValue.isObject());
2046     ASSERT(subscript.isUInt32());
2047
2048     JSObject* object = asObject(baseValue);
2049     uint32_t index = subscript.asUInt32();
2050     if (object->canGetIndexQuickly(index))
2051         return JSValue::encode(JSValue(JSValue::JSTrue));
2052
2053     if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2054         // FIXME: This will make us think that in-bounds typed array accesses are actually
2055         // out-of-bounds.
2056         // https://bugs.webkit.org/show_bug.cgi?id=149886
2057         byValInfo->arrayProfile->setOutOfBounds();
2058     }
2059     return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, subscript.asUInt32(), PropertySlot::InternalMethodType::GetOwnProperty)));
2060 }
2061     
2062 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2063 {
2064     VM& vm = exec->vm();
2065     NativeCallFrameTracer tracer(&vm, exec);
2066     auto scope = DECLARE_THROW_SCOPE(vm);
2067     JSValue baseValue = JSValue::decode(encodedBase);
2068     JSValue subscript = JSValue::decode(encodedSubscript);
2069     
2070     JSValue result;
2071     if (LIKELY(subscript.isUInt32())) {
2072         uint32_t i = subscript.asUInt32();
2073         if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
2074             RELEASE_AND_RETURN(scope, JSValue::encode(asString(baseValue)->getIndex(exec, i)));
2075
2076         result = baseValue.get(exec, i);
2077         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2078         if (!isJSString(baseValue)) {
2079             ASSERT(exec->bytecodeOffset());
2080             auto getByValFunction = byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize;
2081             ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), getByValFunction);
2082         }
2083     } else {
2084         baseValue.requireObjectCoercible(exec);
2085         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2086         auto property = subscript.toPropertyKey(exec);
2087         RETURN_IF_EXCEPTION(scope, encodedJSValue());
2088         scope.release();
2089         result = baseValue.get(exec, property);
2090     }
2091
2092     return JSValue::encode(result);
2093 }
2094
2095 EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2096 {
2097     return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2098 }
2099
2100 size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2101 {
2102     VM& vm = exec->vm();
2103     NativeCallFrameTracer tracer(&vm, exec);
2104     auto scope = DECLARE_THROW_SCOPE(vm);
2105
2106     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2107     RETURN_IF_EXCEPTION(scope, false);
2108     if (!baseObj)
2109         return false;
2110     bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2111     RETURN_IF_EXCEPTION(scope, false);
2112     if (!couldDelete && exec->codeBlock()->isStrictMode())
2113         throwTypeError(exec, scope, UnableToDeletePropertyError);
2114     return couldDelete;
2115 }
2116
2117 EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base,  EncodedJSValue key)
2118 {
2119     return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2120 }
2121
2122 size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2123 {
2124     VM& vm = exec->vm();
2125     NativeCallFrameTracer tracer(&vm, exec);
2126     auto scope = DECLARE_THROW_SCOPE(vm);
2127
2128     JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2129     RETURN_IF_EXCEPTION(scope, false);
2130     JSValue key = JSValue::decode(encodedKey);
2131     if (!baseObj)
2132         return false;
2133
2134     bool couldDelete;
2135     uint32_t index;
2136     if (key.getUInt32(index))
2137         couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2138     else {
2139         Identifier property = key.toPropertyKey(exec);
2140         RETURN_IF_EXCEPTION(scope, false);
2141         couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2142     }
2143     RETURN_IF_EXCEPTION(scope, false);
2144     if (!couldDelete && exec->codeBlock()->isStrictMode())
2145         throwTypeError(exec, scope, UnableToDeletePropertyError);
2146     return couldDelete;
2147 }
2148
2149 JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2150 {
2151     VM& vm = exec->vm();
2152     NativeCallFrameTracer tracer(&vm, exec);
2153     auto scope = DECLARE_THROW_SCOPE(vm);
2154
2155     JSObject* object = JSValue::decode(objectValue).toObject(exec);
2156     RETURN_IF_EXCEPTION(scope, nullptr);
2157
2158     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2159
2160     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2161 }
2162
2163 JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2164 {
2165     VM& vm = exec->vm();
2166     NativeCallFrameTracer tracer(&vm, exec);
2167     JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2168     return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2169 }
2170
2171 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2172 {
2173     VM& vm = exec->vm();
2174     NativeCallFrameTracer tracer(&vm, exec);
2175     JSValue value = JSValue::decode(encodedValue);
2176     JSValue proto = JSValue::decode(encodedProto);
2177     
2178     bool result = JSObject::defaultHasInstance(exec, value, proto);
2179     return JSValue::encode(jsBoolean(result));
2180 }
2181
2182 EncodedJSValue JIT_OPERATION operationInstanceOfGeneric(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2183 {
2184     VM& vm = exec->vm();
2185     NativeCallFrameTracer tracer(&vm, exec);
2186     JSValue value = JSValue::decode(encodedValue);
2187     JSValue proto = JSValue::decode(encodedProto);
2188     
2189     stubInfo->tookSlowPath = true;
2190     
2191     bool result = JSObject::defaultHasInstance(exec, value, proto);
2192     return JSValue::encode(jsBoolean(result));
2193 }
2194
2195 EncodedJSValue JIT_OPERATION operationInstanceOfOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2196 {
2197     VM& vm = exec->vm();
2198     NativeCallFrameTracer tracer(&vm, exec);
2199     auto scope = DECLARE_THROW_SCOPE(vm);
2200     JSValue value = JSValue::decode(encodedValue);
2201     JSValue proto = JSValue::decode(encodedProto);
2202     
2203     bool result = JSObject::defaultHasInstance(exec, value, proto);
2204     RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
2205     
2206     if (stubInfo->considerCaching(exec->codeBlock(), value.structureOrNull()))
2207         repatchInstanceOf(exec, value, proto, *stubInfo, result);
2208     
2209     return JSValue::encode(jsBoolean(result));
2210 }
2211
2212 int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2213 {
2214     VM& vm = exec->vm();
2215     NativeCallFrameTracer tracer(&vm, exec);
2216     return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2217 }
2218
2219 int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2220 {
2221     VM& vm = exec->vm();
2222     NativeCallFrameTracer tracer(&vm, exec);
2223     JSValue arguments = JSValue::decode(encodedArguments);
2224     return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2225 }
2226
2227 CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2228 {
2229     VM& vm = exec->vm();
2230     NativeCallFrameTracer tracer(&vm, exec);
2231     setupForwardArgumentsFrame(exec, newCallFrame, length);
2232     return newCallFrame;
2233 }
2234
2235 CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2236 {
2237     VM& vm = exec->vm();
2238     NativeCallFrameTracer tracer(&vm, exec);
2239     JSValue arguments = JSValue::decode(encodedArguments);
2240     setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2241     return newCallFrame;
2242 }
2243
2244 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2245 {
2246     VM& vm = exec->vm();
2247     NativeCallFrameTracer tracer(&vm, exec);
2248     JSValue key = JSValue::decode(encodedKey);
2249     CodeBlock* codeBlock = exec->codeBlock();
2250
2251     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2252     void* result = jumpTable.ctiDefault.executableAddress();
2253
2254     if (key.isString()) {
2255         StringImpl* value = asString(key)->value(exec).impl();
2256         if (value->length() == 1)
2257             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2258     }
2259
2260     assertIsTaggedWith(result, JSSwitchPtrTag);
2261     return reinterpret_cast<char*>(result);
2262 }
2263
2264 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2265 {
2266     VM& vm = exec->vm();
2267     NativeCallFrameTracer tracer(&vm, exec);
2268     JSValue key = JSValue::decode(encodedKey);
2269     CodeBlock* codeBlock = exec->codeBlock();
2270
2271     SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2272     void* result;
2273     if (key.isInt32())
2274         result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2275     else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2276         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2277     else
2278         result = jumpTable.ctiDefault.executableAddress();
2279     assertIsTaggedWith(result, JSSwitchPtrTag);
2280     return reinterpret_cast<char*>(result);
2281 }
2282
2283 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2284 {
2285     VM& vm = exec->vm();
2286     NativeCallFrameTracer tracer(&vm, exec);
2287     JSValue key = JSValue::decode(encodedKey);
2288     CodeBlock* codeBlock = exec->codeBlock();
2289
2290     void* result;
2291     StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2292
2293     if (key.isString()) {
2294         StringImpl* value = asString(key)->value(exec).impl();
2295         result = jumpTable.ctiForValue(value).executableAddress();
2296     } else
2297         result = jumpTable.ctiDefault.executableAddress();
2298
2299     assertIsTaggedWith(result, JSSwitchPtrTag);
2300     return reinterpret_cast<char*>(result);
2301 }
2302
2303 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, const Instruction* pc)
2304 {
2305     VM& vm = exec->vm();
2306     NativeCallFrameTracer tracer(&vm, exec);
2307     auto throwScope = DECLARE_THROW_SCOPE(vm);
2308
2309     CodeBlock* codeBlock = exec->codeBlock();
2310
2311     auto bytecode = pc->as<OpGetFromScope>();
2312     const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2313     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2314     GetPutInfo& getPutInfo = bytecode.metadata(codeBlock).m_getPutInfo;
2315
2316     // ModuleVar is always converted to ClosureVar for get_from_scope.
2317     ASSERT(getPutInfo.resolveType() != ModuleVar);
2318
2319     RELEASE_AND_RETURN(throwScope, JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2320         if (!found) {
2321             if (getPutInfo.resolveMode() == ThrowIfNotFound)
2322                 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2323             return jsUndefined();
2324         }
2325
2326         JSValue result = JSValue();
2327         if (scope->isGlobalLexicalEnvironment()) {
2328             // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2329             result = slot.getValue(exec, ident);
2330             if (result == jsTDZValue()) {
2331                 throwException(exec, throwScope, createTDZError(exec));
2332                 return jsUndefined();
2333             }
2334         }
2335
2336         CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, bytecode, scope, slot, ident);
2337
2338         if (!result)
2339             return slot.getValue(exec, ident);
2340         return result;
2341     })));
2342 }
2343
2344 void JIT_OPERATION operationPutToScope(ExecState* exec, const Instruction* pc)
2345 {
2346     VM& vm = exec->vm();
2347     NativeCallFrameTracer tracer(&vm, exec);
2348     auto throwScope = DECLARE_THROW_SCOPE(vm);
2349
2350     CodeBlock* codeBlock = exec->codeBlock();
2351     auto bytecode = pc->as<OpPutToScope>();
2352     auto& metadata = bytecode.metadata(codeBlock);
2353
2354     const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2355     JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2356     JSValue value = exec->r(bytecode.m_value.offset()).jsValue();
2357     GetPutInfo& getPutInfo = metadata.m_getPutInfo;
2358
2359     // ModuleVar does not keep the scope register value alive in DFG.
2360     ASSERT(getPutInfo.resolveType() != ModuleVar);
2361
2362     if (getPutInfo.resolveType() == LocalClosureVar) {
2363         JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2364         environment->variableAt(ScopeOffset(metadata.m_operand)).set(vm, environment, value);
2365         if (WatchpointSet* set = metadata.m_watchpointSet)
2366             set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2367         return;
2368     }
2369
2370     bool hasProperty = scope->hasProperty(exec, ident);
2371     EXCEPTION_ASSERT(!throwScope.exception() || !hasProperty);
2372     if (hasProperty
2373         && scope->isGlobalLexicalEnvironment()
2374         && !isInitialization(getPutInfo.initializationMode())) {
2375         // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2376         PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2377         JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2378         if (slot.getValue(exec, ident) == jsTDZValue()) {
2379             throwException(exec, throwScope, createTDZError(exec));
2380             return;
2381         }
2382     }
2383
2384     if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2385         throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2386         return;
2387     }
2388
2389     PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2390     scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2391     
2392     RETURN_IF_EXCEPTION(throwScope, void());
2393
2394     CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, bytecode, scope, slot, ident);
2395 }
2396
2397 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2398 {
2399     VM* vm = &exec->vm();
2400     NativeCallFrameTracer tracer(vm, exec);
2401     auto scope = DECLARE_THROW_SCOPE(*vm);
2402
2403     JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2404     throwException(exec, scope, exceptionValue);
2405
2406     // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2407     genericUnwind(vm, exec);
2408 }
2409
2410 char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2411 {
2412     VM& vm = exec->vm();
2413     NativeCallFrameTracer tracer(&vm, exec);
2414
2415     ASSERT(!object->structure(vm)->outOfLineCapacity());
2416     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2417     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2418     return reinterpret_cast<char*>(result);
2419 }
2420
2421 char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2422 {
2423     VM& vm = exec->vm();
2424     NativeCallFrameTracer tracer(&vm, exec);
2425
2426     Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure(vm)->outOfLineCapacity(), newSize);
2427     object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2428     return reinterpret_cast<char*>(result);
2429 }
2430
2431 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2432 {
2433     VM* vm = &exec->vm();
2434     NativeCallFrameTracer tracer(vm, exec);
2435     vm->heap.writeBarrier(cell);
2436 }
2437
2438 void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2439 {
2440     VM* vm = &exec->vm();
2441     NativeCallFrameTracer tracer(vm, exec);
2442     vm->heap.writeBarrierSlowPath(cell);
2443 }
2444
2445 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2446 {
2447     NativeCallFrameTracer tracer(vm, exec);
2448     genericUnwind(vm, exec);
2449     ASSERT(vm->targetMachinePCForThrow);
2450 }
2451
2452 void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2453 {
2454     ASSERT(exec->isStackOverflowFrame());
2455     ASSERT(jsCast<ErrorInstance*>(vm->exceptionForInspection()->value().asCell())->isStackOverflowError());
2456     lookupExceptionHandler(vm, exec);
2457 }
2458
2459 void JIT_OPERATION operationVMHandleException(ExecState* exec)
2460 {
2461     VM* vm = &exec->vm();
2462     NativeCallFrameTracer tracer(vm, exec);
2463     genericUnwind(vm, exec);
2464 }
2465
2466 // This function "should" just take the ExecState*, but doing so would make it more difficult
2467 // to call from exception check sites. So, unlike all of our other functions, we allow
2468 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2469 // particularly safe here since this is never called on the critical path - it's only for
2470 // testing.
2471 void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2472 {
2473     VM* vm = &exec->vm();
2474     NativeCallFrameTracer tracer(vm, exec);
2475     auto scope = DECLARE_THROW_SCOPE(*vm);
2476     UNUSED_PARAM(scope);
2477 #if COMPILER(GCC_COMPATIBLE)
2478     void* returnPC = __builtin_return_address(0);
2479     doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2480 #endif // COMPILER(GCC_COMPATIBLE)
2481 }
2482
2483 ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2484 {
2485     VM* vm = &exec->vm();
2486     NativeCallFrameTracer tracer(vm, exec);
2487     
2488     JSValue op1 = JSValue::decode(encodedOp1);
2489     JSValue op2 = JSValue::decode(encodedOp2);
2490     
2491     return JSValue::encode(jsAdd(exec, op1, op2));
2492 }
2493
2494 ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2495 {
2496     VM* vm = &exec->vm();
2497     NativeCallFrameTracer tracer(vm, exec);
2498     
2499     JSValue op1 = JSValue::decode(encodedOp1);
2500     JSValue op2 = JSValue::decode(encodedOp2);
2501
2502     arithProfile.observeLHSAndRHS(op1, op2);
2503     JSValue result = jsAdd(exec, op1, op2);
2504     arithProfile.observeResult(result);
2505
2506     return JSValue::encode(result);
2507 }
2508
2509 EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2510 {
2511     return unprofiledAdd(exec, encodedOp1, encodedOp2);
2512 }
2513
2514 EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2515 {
2516     ASSERT(arithProfile);
2517     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2518 }
2519
2520 EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2521 {
2522     VM* vm = &exec->vm();
2523     NativeCallFrameTracer tracer(vm, exec);
2524     
2525     JSValue op1 = JSValue::decode(encodedOp1);
2526     JSValue op2 = JSValue::decode(encodedOp2);
2527
2528     ArithProfile* arithProfile = addIC->arithProfile();
2529     ASSERT(arithProfile);
2530     arithProfile->observeLHSAndRHS(op1, op2);
2531     auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2532     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2533
2534 #if ENABLE(MATH_IC_STATS)
2535     exec->codeBlock()->dumpMathICStats();
2536 #endif
2537     
2538     JSValue result = jsAdd(exec, op1, op2);
2539     arithProfile->observeResult(result);
2540
2541     return JSValue::encode(result);
2542 }
2543
2544 EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2545 {
2546     VM* vm = &exec->vm();
2547     NativeCallFrameTracer tracer(vm, exec);
2548
2549     ArithProfile* arithProfile = addIC->arithProfile();
2550     ASSERT(arithProfile);
2551     return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2552 }
2553
2554 EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2555 {
2556     VM* vm = &exec->vm();
2557     NativeCallFrameTracer tracer(vm, exec);
2558
2559     JSValue op1 = JSValue::decode(encodedOp1);
2560     JSValue op2 = JSValue::decode(encodedOp2);
2561
2562     auto nonOptimizeVariant = operationValueAddNoOptimize;
2563     if (ArithProfile* arithProfile = addIC->arithProfile())
2564         arithProfile->observeLHSAndRHS(op1, op2);
2565     addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2566
2567 #if ENABLE(MATH_IC_STATS)
2568     exec->codeBlock()->dumpMathICStats();
2569 #endif
2570
2571     return JSValue::encode(jsAdd(exec, op1, op2));
2572 }
2573
2574 EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2575 {
2576     VM* vm = &exec->vm();
2577     NativeCallFrameTracer tracer(vm, exec);
2578     
2579     JSValue op1 = JSValue::decode(encodedOp1);
2580     JSValue op2 = JSValue::decode(encodedOp2);
2581     
2582     JSValue result = jsAdd(exec, op1, op2);
2583
2584     return JSValue::encode(result);
2585 }
2586
2587 ALWAYS_INLINE static EncodedJSValue unprofiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2588 {
2589     JSValue op1 = JSValue::decode(encodedOp1);
2590     JSValue op2 = JSValue::decode(encodedOp2);
2591
2592     return JSValue::encode(jsMul(exec, op1, op2));
2593 }
2594
2595 ALWAYS_INLINE static EncodedJSValue profiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2596 {
2597     VM& vm = exec->vm();
2598     auto scope = DECLARE_THROW_SCOPE(vm);
2599     JSValue op1 = JSValue::decode(encodedOp1);
2600     JSValue op2 = JSValue::decode(encodedOp2);
2601
2602     if (shouldObserveLHSAndRHSTypes)
2603         arithProfile.observeLHSAndRHS(op1, op2);
2604
2605     JSValue result = jsMul(exec, op1, op2);
2606     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2607     arithProfile.observeResult(result);
2608     return JSValue::encode(result);
2609 }
2610
2611 EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2612 {
2613     VM* vm = &exec->vm();
2614     NativeCallFrameTracer tracer(vm, exec);
2615
2616     return unprofiledMul(exec, encodedOp1, encodedOp2);
2617 }
2618
2619 EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2620 {
2621     VM* vm = &exec->vm();
2622     NativeCallFrameTracer tracer(vm, exec);
2623
2624     return unprofiledMul(exec, encodedOp1, encodedOp2);
2625 }
2626
2627 EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2628 {
2629     VM* vm = &exec->vm();
2630     NativeCallFrameTracer tracer(vm, exec);
2631
2632     auto nonOptimizeVariant = operationValueMulNoOptimize;
2633     if (ArithProfile* arithProfile = mulIC->arithProfile())
2634         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2635     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2636
2637 #if ENABLE(MATH_IC_STATS)
2638     exec->codeBlock()->dumpMathICStats();
2639 #endif
2640
2641     return unprofiledMul(exec, encodedOp1, encodedOp2);
2642 }
2643
2644 EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2645 {
2646     VM* vm = &exec->vm();
2647     NativeCallFrameTracer tracer(vm, exec);
2648
2649     ASSERT(arithProfile);
2650     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2651 }
2652
2653 EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2654 {
2655     VM* vm = &exec->vm();
2656     NativeCallFrameTracer tracer(vm, exec);
2657
2658     ArithProfile* arithProfile = mulIC->arithProfile();
2659     ASSERT(arithProfile);
2660     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2661     auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2662     mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2663
2664 #if ENABLE(MATH_IC_STATS)
2665     exec->codeBlock()->dumpMathICStats();
2666 #endif
2667
2668     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile, false);
2669 }
2670
2671 EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2672 {
2673     VM* vm = &exec->vm();
2674     NativeCallFrameTracer tracer(vm, exec);
2675
2676     ArithProfile* arithProfile = mulIC->arithProfile();
2677     ASSERT(arithProfile);
2678     return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2679 }
2680
2681 ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2682 {
2683     VM& vm = exec->vm();
2684     auto scope = DECLARE_THROW_SCOPE(vm);
2685     NativeCallFrameTracer tracer(&vm, exec);
2686     
2687     JSValue operand = JSValue::decode(encodedOperand);
2688     
2689     JSValue primValue = operand.toPrimitive(exec, PreferNumber);
2690     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2691     
2692     if (primValue.isBigInt())
2693         return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2694     
2695     double number = primValue.toNumber(exec);
2696     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2697     return JSValue::encode(jsNumber(-number));
2698 }
2699
2700 ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2701 {
2702     VM& vm = exec->vm();
2703     auto scope = DECLARE_THROW_SCOPE(vm);
2704     NativeCallFrameTracer tracer(&vm, exec);
2705
2706     JSValue operand = JSValue::decode(encodedOperand);
2707     arithProfile.observeLHS(operand);
2708     
2709     JSValue primValue = operand.toPrimitive(exec);
2710     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2711     
2712     if (primValue.isBigInt()) {
2713         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2714         arithProfile.observeResult(result);
2715
2716         return JSValue::encode(result);
2717     }
2718
2719     double number = primValue.toNumber(exec);
2720     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2721     JSValue result = jsNumber(-number);
2722     arithProfile.observeResult(result);
2723     return JSValue::encode(result);
2724 }
2725
2726 EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2727 {
2728     return unprofiledNegate(exec, operand);
2729 }
2730
2731 EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2732 {
2733     ASSERT(arithProfile);
2734     return profiledNegate(exec, operand, *arithProfile);
2735 }
2736
2737 EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2738 {
2739     VM& vm = exec->vm();
2740     auto scope = DECLARE_THROW_SCOPE(vm);
2741     NativeCallFrameTracer tracer(&vm, exec);
2742     
2743     JSValue operand = JSValue::decode(encodedOperand);
2744
2745     ArithProfile* arithProfile = negIC->arithProfile();
2746     ASSERT(arithProfile);
2747     arithProfile->observeLHS(operand);
2748     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2749
2750 #if ENABLE(MATH_IC_STATS)
2751     exec->codeBlock()->dumpMathICStats();
2752 #endif
2753     
2754     JSValue primValue = operand.toPrimitive(exec);
2755     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2756     
2757     if (primValue.isBigInt()) {
2758         JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2759         arithProfile->observeResult(result);
2760         return JSValue::encode(result);
2761     }
2762
2763     double number = primValue.toNumber(exec);
2764     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2765     JSValue result = jsNumber(-number);
2766     arithProfile->observeResult(result);
2767     return JSValue::encode(result);
2768 }
2769
2770 EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2771 {
2772     VM& vm = exec->vm();
2773     auto scope = DECLARE_THROW_SCOPE(vm);
2774     NativeCallFrameTracer tracer(&vm, exec);
2775
2776     JSValue operand = JSValue::decode(encodedOperand);
2777
2778     if (ArithProfile* arithProfile = negIC->arithProfile())
2779         arithProfile->observeLHS(operand);
2780     negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2781
2782 #if ENABLE(MATH_IC_STATS)
2783     exec->codeBlock()->dumpMathICStats();
2784 #endif
2785
2786     JSValue primValue = operand.toPrimitive(exec);
2787     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2788     
2789     if (primValue.isBigInt())
2790         return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2791
2792     double number = primValue.toNumber(exec);
2793     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2794     return JSValue::encode(jsNumber(-number));
2795 }
2796
2797 ALWAYS_INLINE static EncodedJSValue unprofiledSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2798 {
2799     JSValue op1 = JSValue::decode(encodedOp1);
2800     JSValue op2 = JSValue::decode(encodedOp2);
2801     
2802     return JSValue::encode(jsSub(exec, op1, op2));
2803 }
2804
2805 ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2806 {
2807     auto scope = DECLARE_THROW_SCOPE(vm);
2808
2809     JSValue op1 = JSValue::decode(encodedOp1);
2810     JSValue op2 = JSValue::decode(encodedOp2);
2811
2812     if (shouldObserveLHSAndRHSTypes)
2813         arithProfile.observeLHSAndRHS(op1, op2);
2814
2815     JSValue result = jsSub(exec, op1, op2);
2816     RETURN_IF_EXCEPTION(scope, encodedJSValue());
2817     arithProfile.observeResult(result);
2818     return JSValue::encode(result);
2819 }
2820
2821 EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2822 {
2823     VM* vm = &exec->vm();
2824     NativeCallFrameTracer tracer(vm, exec);
2825     return unprofiledSub(exec, encodedOp1, encodedOp2);
2826 }
2827
2828 EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2829 {
2830     ASSERT(arithProfile);
2831
2832     VM* vm = &exec->vm();
2833     NativeCallFrameTracer tracer(vm, exec);
2834
2835     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2836 }
2837
2838 EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2839 {
2840     VM* vm = &exec->vm();
2841     NativeCallFrameTracer tracer(vm, exec);
2842
2843     auto nonOptimizeVariant = operationValueSubNoOptimize;
2844     if (ArithProfile* arithProfile = subIC->arithProfile())
2845         arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2846     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2847
2848 #if ENABLE(MATH_IC_STATS)
2849     exec->codeBlock()->dumpMathICStats();
2850 #endif
2851
2852     return unprofiledSub(exec, encodedOp1, encodedOp2);
2853 }
2854
2855 EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2856 {
2857     VM* vm = &exec->vm();
2858     NativeCallFrameTracer tracer(vm, exec);
2859
2860     return unprofiledSub(exec, encodedOp1, encodedOp2);
2861 }
2862
2863 EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2864 {
2865     VM* vm = &exec->vm();
2866     NativeCallFrameTracer tracer(vm, exec);
2867
2868     ArithProfile* arithProfile = subIC->arithProfile();
2869     ASSERT(arithProfile);
2870     arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2871     auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2872     subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2873
2874 #if ENABLE(MATH_IC_STATS)
2875     exec->codeBlock()->dumpMathICStats();
2876 #endif
2877
2878     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2879 }
2880
2881 EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2882 {
2883     VM* vm = &exec->vm();
2884     NativeCallFrameTracer tracer(vm, exec);
2885
2886     ArithProfile* arithProfile = subIC->arithProfile();
2887     ASSERT(arithProfile);
2888     return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2889 }
2890
2891 void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2892 {
2893     VM& vm = exec->vm();
2894     NativeCallFrameTracer tracer(&vm, exec);
2895     vm.typeProfilerLog()->processLogEntries(vm, "Log Full, called from inside baseline JIT"_s);
2896 }
2897
2898 void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2899 {
2900     VM& vm = exec->vm();
2901     NativeCallFrameTracer tracer(&vm, exec);
2902     RELEASE_ASSERT(vm.shadowChicken());
2903     vm.shadowChicken()->update(vm, exec);
2904 }
2905
2906 int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2907 {
2908     VM& vm = exec->vm();
2909     NativeCallFrameTracer tracer(&vm, exec);
2910     auto scope = DECLARE_THROW_SCOPE(vm);
2911     RELEASE_ASSERT(!!scope.exception());
2912
2913     if (isTerminatedExecutionException(vm, scope.exception())) {
2914         genericUnwind(&vm, exec);
2915         return 1;
2916     }
2917     return 0;
2918 }
2919
2920 } // extern "C"
2921
2922 } // namespace JSC
2923
2924 #endif // ENABLE(JIT)